commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5ad9bcd6da4c3f4d8333397169f6af76f8946330
|
django_afip/migrations/0012_taxpayer_profile_one_to_one.py
|
django_afip/migrations/0012_taxpayer_profile_one_to_one.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-03 02:56
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('afip', '0011_receipt_entry_vat'),
]
operations = [
migrations.AlterField(
model_name='taxpayerprofile',
name='taxpayer',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to='afip.TaxPayer', verbose_name='taxpayer'),
),
]
|
Add missing migration for 4503f10
|
Add missing migration for 4503f10
|
Python
|
isc
|
hobarrera/django-afip,hobarrera/django-afip
|
Add missing migration for 4503f10
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-03 02:56
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('afip', '0011_receipt_entry_vat'),
]
operations = [
migrations.AlterField(
model_name='taxpayerprofile',
name='taxpayer',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to='afip.TaxPayer', verbose_name='taxpayer'),
),
]
|
<commit_before><commit_msg>Add missing migration for 4503f10<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-03 02:56
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('afip', '0011_receipt_entry_vat'),
]
operations = [
migrations.AlterField(
model_name='taxpayerprofile',
name='taxpayer',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to='afip.TaxPayer', verbose_name='taxpayer'),
),
]
|
Add missing migration for 4503f10# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-03 02:56
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('afip', '0011_receipt_entry_vat'),
]
operations = [
migrations.AlterField(
model_name='taxpayerprofile',
name='taxpayer',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to='afip.TaxPayer', verbose_name='taxpayer'),
),
]
|
<commit_before><commit_msg>Add missing migration for 4503f10<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-03 02:56
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('afip', '0011_receipt_entry_vat'),
]
operations = [
migrations.AlterField(
model_name='taxpayerprofile',
name='taxpayer',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to='afip.TaxPayer', verbose_name='taxpayer'),
),
]
|
|
d6e9fd27633882274cd272c707097a2646b2f129
|
tt/tests/unit/tables/test_static_methods_truth_table.py
|
tt/tests/unit/tables/test_static_methods_truth_table.py
|
from ._helpers import TruthTableTestCase
from ....tables import TruthTable
class TestStaticMethodsTruthTable(TruthTableTestCase):
def test_generate_symbols_0(self):
"""Test generating 0 symbols."""
self.assertEqual(
TruthTable.generate_symbols(0),
[])
def test_generate_symbols_lt_26(self):
"""Test generating less than 26 symbols."""
self.assertEqual(
TruthTable.generate_symbols(5),
['A', 'B', 'C', 'D', 'E'])
def test_generate_symbols_eq_26(self):
"""Test generating exactly 26 symbols."""
self.assertEqual(
TruthTable.generate_symbols(26),
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z'])
def test_generate_symbols_gt_26(self):
"""Test generating more 26 symbols (the first boundary)."""
self.assertEqual(
TruthTable.generate_symbols(27),
['AA', 'AB', 'AC', 'AD', 'AE', 'AF', 'AG', 'AH', 'AI', 'AJ', 'AK',
'AL', 'AM', 'AN', 'AO', 'AP', 'AQ', 'AR', 'AS', 'AT', 'AU', 'AV',
'AW', 'AX', 'AY', 'AZ', 'BA'])
def test_input_combos_empty(self):
"""Test getting an empty set of input_combos."""
self.assertEqual(
list(TruthTable.input_combos(0)),
[()])
def test_input_combos_one_repeat(self):
"""Test getting input combos for 1 repeat."""
self.assertEqual(
list(TruthTable.input_combos(1)),
[(False,), (True,)])
def test_input_combos_multiple_repeats(self):
"""Test getting input combos for more than one repeats."""
self.assertEqual(
list(TruthTable.input_combos(2)),
[(False, False,),
(False, True,),
(True, False,),
(True, True,)])
|
Add tests for TruthTable class static methods
|
Add tests for TruthTable class static methods
|
Python
|
mit
|
welchbj/tt,welchbj/tt,welchbj/tt
|
Add tests for TruthTable class static methods
|
from ._helpers import TruthTableTestCase
from ....tables import TruthTable
class TestStaticMethodsTruthTable(TruthTableTestCase):
def test_generate_symbols_0(self):
"""Test generating 0 symbols."""
self.assertEqual(
TruthTable.generate_symbols(0),
[])
def test_generate_symbols_lt_26(self):
"""Test generating less than 26 symbols."""
self.assertEqual(
TruthTable.generate_symbols(5),
['A', 'B', 'C', 'D', 'E'])
def test_generate_symbols_eq_26(self):
"""Test generating exactly 26 symbols."""
self.assertEqual(
TruthTable.generate_symbols(26),
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z'])
def test_generate_symbols_gt_26(self):
"""Test generating more 26 symbols (the first boundary)."""
self.assertEqual(
TruthTable.generate_symbols(27),
['AA', 'AB', 'AC', 'AD', 'AE', 'AF', 'AG', 'AH', 'AI', 'AJ', 'AK',
'AL', 'AM', 'AN', 'AO', 'AP', 'AQ', 'AR', 'AS', 'AT', 'AU', 'AV',
'AW', 'AX', 'AY', 'AZ', 'BA'])
def test_input_combos_empty(self):
"""Test getting an empty set of input_combos."""
self.assertEqual(
list(TruthTable.input_combos(0)),
[()])
def test_input_combos_one_repeat(self):
"""Test getting input combos for 1 repeat."""
self.assertEqual(
list(TruthTable.input_combos(1)),
[(False,), (True,)])
def test_input_combos_multiple_repeats(self):
"""Test getting input combos for more than one repeats."""
self.assertEqual(
list(TruthTable.input_combos(2)),
[(False, False,),
(False, True,),
(True, False,),
(True, True,)])
|
<commit_before><commit_msg>Add tests for TruthTable class static methods<commit_after>
|
from ._helpers import TruthTableTestCase
from ....tables import TruthTable
class TestStaticMethodsTruthTable(TruthTableTestCase):
def test_generate_symbols_0(self):
"""Test generating 0 symbols."""
self.assertEqual(
TruthTable.generate_symbols(0),
[])
def test_generate_symbols_lt_26(self):
"""Test generating less than 26 symbols."""
self.assertEqual(
TruthTable.generate_symbols(5),
['A', 'B', 'C', 'D', 'E'])
def test_generate_symbols_eq_26(self):
"""Test generating exactly 26 symbols."""
self.assertEqual(
TruthTable.generate_symbols(26),
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z'])
def test_generate_symbols_gt_26(self):
"""Test generating more 26 symbols (the first boundary)."""
self.assertEqual(
TruthTable.generate_symbols(27),
['AA', 'AB', 'AC', 'AD', 'AE', 'AF', 'AG', 'AH', 'AI', 'AJ', 'AK',
'AL', 'AM', 'AN', 'AO', 'AP', 'AQ', 'AR', 'AS', 'AT', 'AU', 'AV',
'AW', 'AX', 'AY', 'AZ', 'BA'])
def test_input_combos_empty(self):
"""Test getting an empty set of input_combos."""
self.assertEqual(
list(TruthTable.input_combos(0)),
[()])
def test_input_combos_one_repeat(self):
"""Test getting input combos for 1 repeat."""
self.assertEqual(
list(TruthTable.input_combos(1)),
[(False,), (True,)])
def test_input_combos_multiple_repeats(self):
"""Test getting input combos for more than one repeats."""
self.assertEqual(
list(TruthTable.input_combos(2)),
[(False, False,),
(False, True,),
(True, False,),
(True, True,)])
|
Add tests for TruthTable class static methodsfrom ._helpers import TruthTableTestCase
from ....tables import TruthTable
class TestStaticMethodsTruthTable(TruthTableTestCase):
def test_generate_symbols_0(self):
"""Test generating 0 symbols."""
self.assertEqual(
TruthTable.generate_symbols(0),
[])
def test_generate_symbols_lt_26(self):
"""Test generating less than 26 symbols."""
self.assertEqual(
TruthTable.generate_symbols(5),
['A', 'B', 'C', 'D', 'E'])
def test_generate_symbols_eq_26(self):
"""Test generating exactly 26 symbols."""
self.assertEqual(
TruthTable.generate_symbols(26),
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z'])
def test_generate_symbols_gt_26(self):
"""Test generating more 26 symbols (the first boundary)."""
self.assertEqual(
TruthTable.generate_symbols(27),
['AA', 'AB', 'AC', 'AD', 'AE', 'AF', 'AG', 'AH', 'AI', 'AJ', 'AK',
'AL', 'AM', 'AN', 'AO', 'AP', 'AQ', 'AR', 'AS', 'AT', 'AU', 'AV',
'AW', 'AX', 'AY', 'AZ', 'BA'])
def test_input_combos_empty(self):
"""Test getting an empty set of input_combos."""
self.assertEqual(
list(TruthTable.input_combos(0)),
[()])
def test_input_combos_one_repeat(self):
"""Test getting input combos for 1 repeat."""
self.assertEqual(
list(TruthTable.input_combos(1)),
[(False,), (True,)])
def test_input_combos_multiple_repeats(self):
"""Test getting input combos for more than one repeats."""
self.assertEqual(
list(TruthTable.input_combos(2)),
[(False, False,),
(False, True,),
(True, False,),
(True, True,)])
|
<commit_before><commit_msg>Add tests for TruthTable class static methods<commit_after>from ._helpers import TruthTableTestCase
from ....tables import TruthTable
class TestStaticMethodsTruthTable(TruthTableTestCase):
def test_generate_symbols_0(self):
"""Test generating 0 symbols."""
self.assertEqual(
TruthTable.generate_symbols(0),
[])
def test_generate_symbols_lt_26(self):
"""Test generating less than 26 symbols."""
self.assertEqual(
TruthTable.generate_symbols(5),
['A', 'B', 'C', 'D', 'E'])
def test_generate_symbols_eq_26(self):
"""Test generating exactly 26 symbols."""
self.assertEqual(
TruthTable.generate_symbols(26),
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z'])
def test_generate_symbols_gt_26(self):
"""Test generating more 26 symbols (the first boundary)."""
self.assertEqual(
TruthTable.generate_symbols(27),
['AA', 'AB', 'AC', 'AD', 'AE', 'AF', 'AG', 'AH', 'AI', 'AJ', 'AK',
'AL', 'AM', 'AN', 'AO', 'AP', 'AQ', 'AR', 'AS', 'AT', 'AU', 'AV',
'AW', 'AX', 'AY', 'AZ', 'BA'])
def test_input_combos_empty(self):
"""Test getting an empty set of input_combos."""
self.assertEqual(
list(TruthTable.input_combos(0)),
[()])
def test_input_combos_one_repeat(self):
"""Test getting input combos for 1 repeat."""
self.assertEqual(
list(TruthTable.input_combos(1)),
[(False,), (True,)])
def test_input_combos_multiple_repeats(self):
"""Test getting input combos for more than one repeats."""
self.assertEqual(
list(TruthTable.input_combos(2)),
[(False, False,),
(False, True,),
(True, False,),
(True, True,)])
|
|
46b1fe384f6d7f282c70f10dbb7911c7a2cb53e7
|
plugins/CoD4_MW.py
|
plugins/CoD4_MW.py
|
import os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class CoD4MWPlugin(BasePlugin):
Name = "Call of Duty 4: Modern Warfare"
support_os = ["Windows"]
def backup(self, _):
_.add_folder('Profiles', os.path.join(SteamGamesPath, 'Call of Duty 4'), 'players')
def restore(self, _):
_.restore_folder('Profiles', os.path.join(SteamGamesPath, 'Call of Duty 4'), 'players')
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'Call of Duty 4')):
return True
return False
|
Call of Duty 4: Modern Warfare plugin
|
Call of Duty 4: Modern Warfare plugin
|
Python
|
mit
|
Pr0Ger/SGSB
|
Call of Duty 4: Modern Warfare plugin
|
import os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class CoD4MWPlugin(BasePlugin):
Name = "Call of Duty 4: Modern Warfare"
support_os = ["Windows"]
def backup(self, _):
_.add_folder('Profiles', os.path.join(SteamGamesPath, 'Call of Duty 4'), 'players')
def restore(self, _):
_.restore_folder('Profiles', os.path.join(SteamGamesPath, 'Call of Duty 4'), 'players')
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'Call of Duty 4')):
return True
return False
|
<commit_before><commit_msg>Call of Duty 4: Modern Warfare plugin<commit_after>
|
import os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class CoD4MWPlugin(BasePlugin):
Name = "Call of Duty 4: Modern Warfare"
support_os = ["Windows"]
def backup(self, _):
_.add_folder('Profiles', os.path.join(SteamGamesPath, 'Call of Duty 4'), 'players')
def restore(self, _):
_.restore_folder('Profiles', os.path.join(SteamGamesPath, 'Call of Duty 4'), 'players')
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'Call of Duty 4')):
return True
return False
|
Call of Duty 4: Modern Warfare pluginimport os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class CoD4MWPlugin(BasePlugin):
Name = "Call of Duty 4: Modern Warfare"
support_os = ["Windows"]
def backup(self, _):
_.add_folder('Profiles', os.path.join(SteamGamesPath, 'Call of Duty 4'), 'players')
def restore(self, _):
_.restore_folder('Profiles', os.path.join(SteamGamesPath, 'Call of Duty 4'), 'players')
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'Call of Duty 4')):
return True
return False
|
<commit_before><commit_msg>Call of Duty 4: Modern Warfare plugin<commit_after>import os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class CoD4MWPlugin(BasePlugin):
Name = "Call of Duty 4: Modern Warfare"
support_os = ["Windows"]
def backup(self, _):
_.add_folder('Profiles', os.path.join(SteamGamesPath, 'Call of Duty 4'), 'players')
def restore(self, _):
_.restore_folder('Profiles', os.path.join(SteamGamesPath, 'Call of Duty 4'), 'players')
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'Call of Duty 4')):
return True
return False
|
|
51fd06209a46ce8162cf59b53667b07c9d6cfe52
|
test/frmwrk_1/test_frmwrk_1.py
|
test/frmwrk_1/test_frmwrk_1.py
|
import os
import pathlib
import subprocess
def setup_module():
rm_paths = [
'.*testsuite.sv',
'.testrunner.gold',
'.testrunner.gold.tmp',
'.testrunner.sv',
'.svunit_top.sv',
'.testsuite.gold',
'.testsuite.gold.tmp',
'test_unit_test.sv',
'test_unit_test.gold',
]
for rm_path in rm_paths:
for p in pathlib.Path('.').glob(rm_path):
p.unlink()
def test_dummy():
create_unit_test('test.sv')
golden_class_unit_test('test', 'test0')
verify_file('test_unit_test.gold', 'test_unit_test.sv')
def create_unit_test(name):
subprocess.check_call(['create_unit_test.pl', name])
def golden_class_unit_test(FILE, MYNAME):
template = open('{}/test/templates/class_unit_test.gold'.format(os.environ['SVUNIT_INSTALL']))
with open('{}_unit_test.gold'.format(FILE), 'w') as output:
for line in template:
output.write(line.replace('FILE', FILE).replace('MYNAME', MYNAME))
def verify_file(file0, file1):
result = subprocess.run(['diff', '-wbB', file0, file1], stdout=subprocess.PIPE)
assert result.returncode in [0, 1]
if result.returncode == 1:
assert result.stdout == b''
|
Create pytest version of frmwrk_1 test
|
Create pytest version of frmwrk_1 test
|
Python
|
apache-2.0
|
svunit/svunit,svunit/svunit,svunit/svunit,nosnhojn/svunit-code,nosnhojn/svunit-code,nosnhojn/svunit-code
|
Create pytest version of frmwrk_1 test
|
import os
import pathlib
import subprocess
def setup_module():
rm_paths = [
'.*testsuite.sv',
'.testrunner.gold',
'.testrunner.gold.tmp',
'.testrunner.sv',
'.svunit_top.sv',
'.testsuite.gold',
'.testsuite.gold.tmp',
'test_unit_test.sv',
'test_unit_test.gold',
]
for rm_path in rm_paths:
for p in pathlib.Path('.').glob(rm_path):
p.unlink()
def test_dummy():
create_unit_test('test.sv')
golden_class_unit_test('test', 'test0')
verify_file('test_unit_test.gold', 'test_unit_test.sv')
def create_unit_test(name):
subprocess.check_call(['create_unit_test.pl', name])
def golden_class_unit_test(FILE, MYNAME):
template = open('{}/test/templates/class_unit_test.gold'.format(os.environ['SVUNIT_INSTALL']))
with open('{}_unit_test.gold'.format(FILE), 'w') as output:
for line in template:
output.write(line.replace('FILE', FILE).replace('MYNAME', MYNAME))
def verify_file(file0, file1):
result = subprocess.run(['diff', '-wbB', file0, file1], stdout=subprocess.PIPE)
assert result.returncode in [0, 1]
if result.returncode == 1:
assert result.stdout == b''
|
<commit_before><commit_msg>Create pytest version of frmwrk_1 test<commit_after>
|
import os
import pathlib
import subprocess
def setup_module():
rm_paths = [
'.*testsuite.sv',
'.testrunner.gold',
'.testrunner.gold.tmp',
'.testrunner.sv',
'.svunit_top.sv',
'.testsuite.gold',
'.testsuite.gold.tmp',
'test_unit_test.sv',
'test_unit_test.gold',
]
for rm_path in rm_paths:
for p in pathlib.Path('.').glob(rm_path):
p.unlink()
def test_dummy():
create_unit_test('test.sv')
golden_class_unit_test('test', 'test0')
verify_file('test_unit_test.gold', 'test_unit_test.sv')
def create_unit_test(name):
subprocess.check_call(['create_unit_test.pl', name])
def golden_class_unit_test(FILE, MYNAME):
template = open('{}/test/templates/class_unit_test.gold'.format(os.environ['SVUNIT_INSTALL']))
with open('{}_unit_test.gold'.format(FILE), 'w') as output:
for line in template:
output.write(line.replace('FILE', FILE).replace('MYNAME', MYNAME))
def verify_file(file0, file1):
result = subprocess.run(['diff', '-wbB', file0, file1], stdout=subprocess.PIPE)
assert result.returncode in [0, 1]
if result.returncode == 1:
assert result.stdout == b''
|
Create pytest version of frmwrk_1 testimport os
import pathlib
import subprocess
def setup_module():
rm_paths = [
'.*testsuite.sv',
'.testrunner.gold',
'.testrunner.gold.tmp',
'.testrunner.sv',
'.svunit_top.sv',
'.testsuite.gold',
'.testsuite.gold.tmp',
'test_unit_test.sv',
'test_unit_test.gold',
]
for rm_path in rm_paths:
for p in pathlib.Path('.').glob(rm_path):
p.unlink()
def test_dummy():
create_unit_test('test.sv')
golden_class_unit_test('test', 'test0')
verify_file('test_unit_test.gold', 'test_unit_test.sv')
def create_unit_test(name):
subprocess.check_call(['create_unit_test.pl', name])
def golden_class_unit_test(FILE, MYNAME):
template = open('{}/test/templates/class_unit_test.gold'.format(os.environ['SVUNIT_INSTALL']))
with open('{}_unit_test.gold'.format(FILE), 'w') as output:
for line in template:
output.write(line.replace('FILE', FILE).replace('MYNAME', MYNAME))
def verify_file(file0, file1):
result = subprocess.run(['diff', '-wbB', file0, file1], stdout=subprocess.PIPE)
assert result.returncode in [0, 1]
if result.returncode == 1:
assert result.stdout == b''
|
<commit_before><commit_msg>Create pytest version of frmwrk_1 test<commit_after>import os
import pathlib
import subprocess
def setup_module():
rm_paths = [
'.*testsuite.sv',
'.testrunner.gold',
'.testrunner.gold.tmp',
'.testrunner.sv',
'.svunit_top.sv',
'.testsuite.gold',
'.testsuite.gold.tmp',
'test_unit_test.sv',
'test_unit_test.gold',
]
for rm_path in rm_paths:
for p in pathlib.Path('.').glob(rm_path):
p.unlink()
def test_dummy():
create_unit_test('test.sv')
golden_class_unit_test('test', 'test0')
verify_file('test_unit_test.gold', 'test_unit_test.sv')
def create_unit_test(name):
subprocess.check_call(['create_unit_test.pl', name])
def golden_class_unit_test(FILE, MYNAME):
template = open('{}/test/templates/class_unit_test.gold'.format(os.environ['SVUNIT_INSTALL']))
with open('{}_unit_test.gold'.format(FILE), 'w') as output:
for line in template:
output.write(line.replace('FILE', FILE).replace('MYNAME', MYNAME))
def verify_file(file0, file1):
result = subprocess.run(['diff', '-wbB', file0, file1], stdout=subprocess.PIPE)
assert result.returncode in [0, 1]
if result.returncode == 1:
assert result.stdout == b''
|
|
e3012a79401d47d02fe6a2245fe0f65c1e3fce06
|
tests/acceptance/test_build.py
|
tests/acceptance/test_build.py
|
#!/usr/bin/python
# Copyright 2016 Mender Software AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import subprocess
# Make sure common is imported after fabric, because we override some functions.
from common import *
class TestBuild:
def test_default_server_certificate(self):
"""Test that the md5sum we have on record matches the server certificate.
This makes sure the warning about this certificate is correct."""
output = subprocess.check_output(["md5sum", "../../meta-mender-core/recipes-mender/mender/files/server.crt"])
# Crude check, just make sure it occurs in the build file.
subprocess.check_call("fgrep %s ../../meta-mender-core/recipes-mender/mender/mender_*.bb >/dev/null 2>&1"
% output.split()[0], shell=True)
|
Add test to make sure our server certificate check stays up to date.
|
Add test to make sure our server certificate check stays up to date.
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io>
|
Python
|
apache-2.0
|
bboozzoo/meta-mender,bboozzoo/meta-mender,bboozzoo/meta-mender,bboozzoo/meta-mender
|
Add test to make sure our server certificate check stays up to date.
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io>
|
#!/usr/bin/python
# Copyright 2016 Mender Software AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import subprocess
# Make sure common is imported after fabric, because we override some functions.
from common import *
class TestBuild:
def test_default_server_certificate(self):
"""Test that the md5sum we have on record matches the server certificate.
This makes sure the warning about this certificate is correct."""
output = subprocess.check_output(["md5sum", "../../meta-mender-core/recipes-mender/mender/files/server.crt"])
# Crude check, just make sure it occurs in the build file.
subprocess.check_call("fgrep %s ../../meta-mender-core/recipes-mender/mender/mender_*.bb >/dev/null 2>&1"
% output.split()[0], shell=True)
|
<commit_before><commit_msg>Add test to make sure our server certificate check stays up to date.
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io><commit_after>
|
#!/usr/bin/python
# Copyright 2016 Mender Software AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import subprocess
# Make sure common is imported after fabric, because we override some functions.
from common import *
class TestBuild:
def test_default_server_certificate(self):
"""Test that the md5sum we have on record matches the server certificate.
This makes sure the warning about this certificate is correct."""
output = subprocess.check_output(["md5sum", "../../meta-mender-core/recipes-mender/mender/files/server.crt"])
# Crude check, just make sure it occurs in the build file.
subprocess.check_call("fgrep %s ../../meta-mender-core/recipes-mender/mender/mender_*.bb >/dev/null 2>&1"
% output.split()[0], shell=True)
|
Add test to make sure our server certificate check stays up to date.
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io>#!/usr/bin/python
# Copyright 2016 Mender Software AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import subprocess
# Make sure common is imported after fabric, because we override some functions.
from common import *
class TestBuild:
def test_default_server_certificate(self):
"""Test that the md5sum we have on record matches the server certificate.
This makes sure the warning about this certificate is correct."""
output = subprocess.check_output(["md5sum", "../../meta-mender-core/recipes-mender/mender/files/server.crt"])
# Crude check, just make sure it occurs in the build file.
subprocess.check_call("fgrep %s ../../meta-mender-core/recipes-mender/mender/mender_*.bb >/dev/null 2>&1"
% output.split()[0], shell=True)
|
<commit_before><commit_msg>Add test to make sure our server certificate check stays up to date.
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io><commit_after>#!/usr/bin/python
# Copyright 2016 Mender Software AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import subprocess
# Make sure common is imported after fabric, because we override some functions.
from common import *
class TestBuild:
def test_default_server_certificate(self):
"""Test that the md5sum we have on record matches the server certificate.
This makes sure the warning about this certificate is correct."""
output = subprocess.check_output(["md5sum", "../../meta-mender-core/recipes-mender/mender/files/server.crt"])
# Crude check, just make sure it occurs in the build file.
subprocess.check_call("fgrep %s ../../meta-mender-core/recipes-mender/mender/mender_*.bb >/dev/null 2>&1"
% output.split()[0], shell=True)
|
|
4acbf2c0675eeb5491616ba854ee2cc93812813f
|
abusehelper/contrib/experts/iscexpert.py
|
abusehelper/contrib/experts/iscexpert.py
|
import idiokit
from abusehelper.core import bot, events, utils
from combiner import Expert
import socket
import json
ISC_IP_API_URL = "http://isc.sans.edu/api/ip"
def is_ipv4(ip):
try:
socket.inet_aton(ip)
except (ValueError, socket.error):
return False
return True
class IscExpert(Expert):
ip_key = bot.Param("key which has IP address as value " +
"(default: %default)", default="ip")
@idiokit.stream
def get_isc_info(self, event, key, eid):
for ip in event.values(key, filter=is_ipv4):
url = "{0}/{1}?json".format(ISC_IP_API_URL, ip)
try:
info, fileobj = yield utils.fetch_url(url)
except utils.FetchUrlFailed, fuf:
self.log.error("Fetch failed: %r", fuf)
continue
data = json.load(fileobj)
ip_data = data.get("ip")
if ip_data:
augmentation = events.Event()
for key, value in ip_data.iteritems():
key = unicode(key).strip()
value = unicode(value).strip()
augmentation.add("dshield " + key, value)
yield idiokit.send(eid, augmentation)
@idiokit.stream
def augment(self):
while True:
eid, event = yield idiokit.next()
yield self.get_isc_info(event, self.ip_key, eid)
if __name__ == "__main__":
IscExpert.from_command_line().execute()
|
Add Internet Storm Center IP API expert.
|
Add Internet Storm Center IP API expert.
|
Python
|
mit
|
abusesa/abusehelper
|
Add Internet Storm Center IP API expert.
|
import idiokit
from abusehelper.core import bot, events, utils
from combiner import Expert
import socket
import json
ISC_IP_API_URL = "http://isc.sans.edu/api/ip"
def is_ipv4(ip):
try:
socket.inet_aton(ip)
except (ValueError, socket.error):
return False
return True
class IscExpert(Expert):
ip_key = bot.Param("key which has IP address as value " +
"(default: %default)", default="ip")
@idiokit.stream
def get_isc_info(self, event, key, eid):
for ip in event.values(key, filter=is_ipv4):
url = "{0}/{1}?json".format(ISC_IP_API_URL, ip)
try:
info, fileobj = yield utils.fetch_url(url)
except utils.FetchUrlFailed, fuf:
self.log.error("Fetch failed: %r", fuf)
continue
data = json.load(fileobj)
ip_data = data.get("ip")
if ip_data:
augmentation = events.Event()
for key, value in ip_data.iteritems():
key = unicode(key).strip()
value = unicode(value).strip()
augmentation.add("dshield " + key, value)
yield idiokit.send(eid, augmentation)
@idiokit.stream
def augment(self):
while True:
eid, event = yield idiokit.next()
yield self.get_isc_info(event, self.ip_key, eid)
if __name__ == "__main__":
IscExpert.from_command_line().execute()
|
<commit_before><commit_msg>Add Internet Storm Center IP API expert.<commit_after>
|
import idiokit
from abusehelper.core import bot, events, utils
from combiner import Expert
import socket
import json
ISC_IP_API_URL = "http://isc.sans.edu/api/ip"
def is_ipv4(ip):
try:
socket.inet_aton(ip)
except (ValueError, socket.error):
return False
return True
class IscExpert(Expert):
ip_key = bot.Param("key which has IP address as value " +
"(default: %default)", default="ip")
@idiokit.stream
def get_isc_info(self, event, key, eid):
for ip in event.values(key, filter=is_ipv4):
url = "{0}/{1}?json".format(ISC_IP_API_URL, ip)
try:
info, fileobj = yield utils.fetch_url(url)
except utils.FetchUrlFailed, fuf:
self.log.error("Fetch failed: %r", fuf)
continue
data = json.load(fileobj)
ip_data = data.get("ip")
if ip_data:
augmentation = events.Event()
for key, value in ip_data.iteritems():
key = unicode(key).strip()
value = unicode(value).strip()
augmentation.add("dshield " + key, value)
yield idiokit.send(eid, augmentation)
@idiokit.stream
def augment(self):
while True:
eid, event = yield idiokit.next()
yield self.get_isc_info(event, self.ip_key, eid)
if __name__ == "__main__":
IscExpert.from_command_line().execute()
|
Add Internet Storm Center IP API expert.import idiokit
from abusehelper.core import bot, events, utils
from combiner import Expert
import socket
import json
ISC_IP_API_URL = "http://isc.sans.edu/api/ip"
def is_ipv4(ip):
try:
socket.inet_aton(ip)
except (ValueError, socket.error):
return False
return True
class IscExpert(Expert):
ip_key = bot.Param("key which has IP address as value " +
"(default: %default)", default="ip")
@idiokit.stream
def get_isc_info(self, event, key, eid):
for ip in event.values(key, filter=is_ipv4):
url = "{0}/{1}?json".format(ISC_IP_API_URL, ip)
try:
info, fileobj = yield utils.fetch_url(url)
except utils.FetchUrlFailed, fuf:
self.log.error("Fetch failed: %r", fuf)
continue
data = json.load(fileobj)
ip_data = data.get("ip")
if ip_data:
augmentation = events.Event()
for key, value in ip_data.iteritems():
key = unicode(key).strip()
value = unicode(value).strip()
augmentation.add("dshield " + key, value)
yield idiokit.send(eid, augmentation)
@idiokit.stream
def augment(self):
while True:
eid, event = yield idiokit.next()
yield self.get_isc_info(event, self.ip_key, eid)
if __name__ == "__main__":
IscExpert.from_command_line().execute()
|
<commit_before><commit_msg>Add Internet Storm Center IP API expert.<commit_after>import idiokit
from abusehelper.core import bot, events, utils
from combiner import Expert
import socket
import json
ISC_IP_API_URL = "http://isc.sans.edu/api/ip"
def is_ipv4(ip):
try:
socket.inet_aton(ip)
except (ValueError, socket.error):
return False
return True
class IscExpert(Expert):
ip_key = bot.Param("key which has IP address as value " +
"(default: %default)", default="ip")
@idiokit.stream
def get_isc_info(self, event, key, eid):
for ip in event.values(key, filter=is_ipv4):
url = "{0}/{1}?json".format(ISC_IP_API_URL, ip)
try:
info, fileobj = yield utils.fetch_url(url)
except utils.FetchUrlFailed, fuf:
self.log.error("Fetch failed: %r", fuf)
continue
data = json.load(fileobj)
ip_data = data.get("ip")
if ip_data:
augmentation = events.Event()
for key, value in ip_data.iteritems():
key = unicode(key).strip()
value = unicode(value).strip()
augmentation.add("dshield " + key, value)
yield idiokit.send(eid, augmentation)
@idiokit.stream
def augment(self):
while True:
eid, event = yield idiokit.next()
yield self.get_isc_info(event, self.ip_key, eid)
if __name__ == "__main__":
IscExpert.from_command_line().execute()
|
|
db150068c0985bd2b87bebdc3909f0ec7bd08e56
|
py/keyboard-row.py
|
py/keyboard-row.py
|
class Solution(object):
def findWords(self, words):
"""
:type words: List[str]
:rtype: List[str]
"""
rows = [
set('qwertyuiop'),
set('asdfghjkl'),
set('zxcvbnm'),
]
return filter(lambda w: not all([len(set(w.lower()) - r) for r in rows]), words)
|
Add py solution for 500. Keyboard Row
|
Add py solution for 500. Keyboard Row
500. Keyboard Row: https://leetcode.com/problems/keyboard-row/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 500. Keyboard Row
500. Keyboard Row: https://leetcode.com/problems/keyboard-row/
|
class Solution(object):
def findWords(self, words):
"""
:type words: List[str]
:rtype: List[str]
"""
rows = [
set('qwertyuiop'),
set('asdfghjkl'),
set('zxcvbnm'),
]
return filter(lambda w: not all([len(set(w.lower()) - r) for r in rows]), words)
|
<commit_before><commit_msg>Add py solution for 500. Keyboard Row
500. Keyboard Row: https://leetcode.com/problems/keyboard-row/<commit_after>
|
class Solution(object):
def findWords(self, words):
"""
:type words: List[str]
:rtype: List[str]
"""
rows = [
set('qwertyuiop'),
set('asdfghjkl'),
set('zxcvbnm'),
]
return filter(lambda w: not all([len(set(w.lower()) - r) for r in rows]), words)
|
Add py solution for 500. Keyboard Row
500. Keyboard Row: https://leetcode.com/problems/keyboard-row/class Solution(object):
def findWords(self, words):
"""
:type words: List[str]
:rtype: List[str]
"""
rows = [
set('qwertyuiop'),
set('asdfghjkl'),
set('zxcvbnm'),
]
return filter(lambda w: not all([len(set(w.lower()) - r) for r in rows]), words)
|
<commit_before><commit_msg>Add py solution for 500. Keyboard Row
500. Keyboard Row: https://leetcode.com/problems/keyboard-row/<commit_after>class Solution(object):
def findWords(self, words):
"""
:type words: List[str]
:rtype: List[str]
"""
rows = [
set('qwertyuiop'),
set('asdfghjkl'),
set('zxcvbnm'),
]
return filter(lambda w: not all([len(set(w.lower()) - r) for r in rows]), words)
|
|
8db34671110ab0161704a2d1f52b050502dea5d0
|
qnd/config_test.py
|
qnd/config_test.py
|
import sys
import unittest
import tensorflow as tf
from .config import *
class TestConfig(unittest.TestCase):
def test_def_config(self):
config = def_config()
self.assertTrue(isinstance(config(), tf.contrib.learn.ClusterConfig))
def append_argv():
sys.argv += [
"--ps_hosts", "localhost:4242",
"--worker_hosts", "localhost:5353",
"--task_type", "ps",
"--task_index", "0"]
if __name__ == "__main__":
append_argv()
unittest.main(argv=["test"])
|
Add test of config module
|
Add test of config module
|
Python
|
unlicense
|
raviqqe/tensorflow-qnd,raviqqe/tensorflow-qnd
|
Add test of config module
|
import sys
import unittest
import tensorflow as tf
from .config import *
class TestConfig(unittest.TestCase):
def test_def_config(self):
config = def_config()
self.assertTrue(isinstance(config(), tf.contrib.learn.ClusterConfig))
def append_argv():
sys.argv += [
"--ps_hosts", "localhost:4242",
"--worker_hosts", "localhost:5353",
"--task_type", "ps",
"--task_index", "0"]
if __name__ == "__main__":
append_argv()
unittest.main(argv=["test"])
|
<commit_before><commit_msg>Add test of config module<commit_after>
|
import sys
import unittest
import tensorflow as tf
from .config import *
class TestConfig(unittest.TestCase):
def test_def_config(self):
config = def_config()
self.assertTrue(isinstance(config(), tf.contrib.learn.ClusterConfig))
def append_argv():
sys.argv += [
"--ps_hosts", "localhost:4242",
"--worker_hosts", "localhost:5353",
"--task_type", "ps",
"--task_index", "0"]
if __name__ == "__main__":
append_argv()
unittest.main(argv=["test"])
|
Add test of config moduleimport sys
import unittest
import tensorflow as tf
from .config import *
class TestConfig(unittest.TestCase):
def test_def_config(self):
config = def_config()
self.assertTrue(isinstance(config(), tf.contrib.learn.ClusterConfig))
def append_argv():
sys.argv += [
"--ps_hosts", "localhost:4242",
"--worker_hosts", "localhost:5353",
"--task_type", "ps",
"--task_index", "0"]
if __name__ == "__main__":
append_argv()
unittest.main(argv=["test"])
|
<commit_before><commit_msg>Add test of config module<commit_after>import sys
import unittest
import tensorflow as tf
from .config import *
class TestConfig(unittest.TestCase):
def test_def_config(self):
config = def_config()
self.assertTrue(isinstance(config(), tf.contrib.learn.ClusterConfig))
def append_argv():
sys.argv += [
"--ps_hosts", "localhost:4242",
"--worker_hosts", "localhost:5353",
"--task_type", "ps",
"--task_index", "0"]
if __name__ == "__main__":
append_argv()
unittest.main(argv=["test"])
|
|
986d0e103588cd2754c887ebb6a2db3769508553
|
st2actions/tests/unit/test_actionchain_params_rendering.py
|
st2actions/tests/unit/test_actionchain_params_rendering.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2actions.runners import actionchainrunner as acr
from st2common.exceptions.action import ParameterRenderingFailedException
from st2common.models.system.actionchain import Node
class ActionChainRunnerResolveParamsTests(unittest2.TestCase):
def test_render_params_action_context(self):
runner = acr.get_runner()
chain_context = {
'parent': {
'execution_id': 'some_awesome_exec_id',
'user': 'dad'
},
'user': 'son',
'k1': 'v1'
}
task_params = {
'exec_id': {'default': '{{action_context.parent.execution_id}}'},
'k2': {},
'foo': {'default': 1}
}
action_node = Node(name='test_action_context_params', ref='core.local', params=task_params)
rendered_params = runner._resolve_params(action_node, {}, {}, {}, chain_context)
self.assertEqual(rendered_params['exec_id']['default'], 'some_awesome_exec_id')
def test_render_params_action_context_non_existent_member(self):
runner = acr.get_runner()
chain_context = {
'parent': {
'execution_id': 'some_awesome_exec_id',
'user': 'dad'
},
'user': 'son',
'k1': 'v1'
}
task_params = {
'exec_id': {'default': '{{action_context.parent.yo_gimme_tha_key}}'},
'k2': {},
'foo': {'default': 1}
}
action_node = Node(name='test_action_context_params', ref='core.local', params=task_params)
try:
rendered_params = runner._resolve_params(action_node, {}, {}, {}, chain_context)
self.fail('Should have thrown an instance of %s' % ParameterRenderingFailedException)
except ParameterRenderingFailedException:
pass
|
Add unit test for action_context params rendering in chain
|
Add unit test for action_context params rendering in chain
|
Python
|
apache-2.0
|
StackStorm/st2,pixelrebel/st2,peak6/st2,dennybaa/st2,tonybaloney/st2,armab/st2,lakshmi-kannan/st2,Plexxi/st2,punalpatel/st2,punalpatel/st2,dennybaa/st2,armab/st2,dennybaa/st2,punalpatel/st2,Plexxi/st2,Itxaka/st2,emedvedev/st2,StackStorm/st2,Plexxi/st2,lakshmi-kannan/st2,pixelrebel/st2,Itxaka/st2,peak6/st2,lakshmi-kannan/st2,StackStorm/st2,emedvedev/st2,pixelrebel/st2,nzlosh/st2,alfasin/st2,alfasin/st2,nzlosh/st2,StackStorm/st2,peak6/st2,Itxaka/st2,tonybaloney/st2,Plexxi/st2,nzlosh/st2,nzlosh/st2,alfasin/st2,emedvedev/st2,armab/st2,tonybaloney/st2
|
Add unit test for action_context params rendering in chain
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2actions.runners import actionchainrunner as acr
from st2common.exceptions.action import ParameterRenderingFailedException
from st2common.models.system.actionchain import Node
class ActionChainRunnerResolveParamsTests(unittest2.TestCase):
def test_render_params_action_context(self):
runner = acr.get_runner()
chain_context = {
'parent': {
'execution_id': 'some_awesome_exec_id',
'user': 'dad'
},
'user': 'son',
'k1': 'v1'
}
task_params = {
'exec_id': {'default': '{{action_context.parent.execution_id}}'},
'k2': {},
'foo': {'default': 1}
}
action_node = Node(name='test_action_context_params', ref='core.local', params=task_params)
rendered_params = runner._resolve_params(action_node, {}, {}, {}, chain_context)
self.assertEqual(rendered_params['exec_id']['default'], 'some_awesome_exec_id')
def test_render_params_action_context_non_existent_member(self):
runner = acr.get_runner()
chain_context = {
'parent': {
'execution_id': 'some_awesome_exec_id',
'user': 'dad'
},
'user': 'son',
'k1': 'v1'
}
task_params = {
'exec_id': {'default': '{{action_context.parent.yo_gimme_tha_key}}'},
'k2': {},
'foo': {'default': 1}
}
action_node = Node(name='test_action_context_params', ref='core.local', params=task_params)
try:
rendered_params = runner._resolve_params(action_node, {}, {}, {}, chain_context)
self.fail('Should have thrown an instance of %s' % ParameterRenderingFailedException)
except ParameterRenderingFailedException:
pass
|
<commit_before><commit_msg>Add unit test for action_context params rendering in chain<commit_after>
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2actions.runners import actionchainrunner as acr
from st2common.exceptions.action import ParameterRenderingFailedException
from st2common.models.system.actionchain import Node
class ActionChainRunnerResolveParamsTests(unittest2.TestCase):
def test_render_params_action_context(self):
runner = acr.get_runner()
chain_context = {
'parent': {
'execution_id': 'some_awesome_exec_id',
'user': 'dad'
},
'user': 'son',
'k1': 'v1'
}
task_params = {
'exec_id': {'default': '{{action_context.parent.execution_id}}'},
'k2': {},
'foo': {'default': 1}
}
action_node = Node(name='test_action_context_params', ref='core.local', params=task_params)
rendered_params = runner._resolve_params(action_node, {}, {}, {}, chain_context)
self.assertEqual(rendered_params['exec_id']['default'], 'some_awesome_exec_id')
def test_render_params_action_context_non_existent_member(self):
runner = acr.get_runner()
chain_context = {
'parent': {
'execution_id': 'some_awesome_exec_id',
'user': 'dad'
},
'user': 'son',
'k1': 'v1'
}
task_params = {
'exec_id': {'default': '{{action_context.parent.yo_gimme_tha_key}}'},
'k2': {},
'foo': {'default': 1}
}
action_node = Node(name='test_action_context_params', ref='core.local', params=task_params)
try:
rendered_params = runner._resolve_params(action_node, {}, {}, {}, chain_context)
self.fail('Should have thrown an instance of %s' % ParameterRenderingFailedException)
except ParameterRenderingFailedException:
pass
|
Add unit test for action_context params rendering in chain# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2actions.runners import actionchainrunner as acr
from st2common.exceptions.action import ParameterRenderingFailedException
from st2common.models.system.actionchain import Node
class ActionChainRunnerResolveParamsTests(unittest2.TestCase):
def test_render_params_action_context(self):
runner = acr.get_runner()
chain_context = {
'parent': {
'execution_id': 'some_awesome_exec_id',
'user': 'dad'
},
'user': 'son',
'k1': 'v1'
}
task_params = {
'exec_id': {'default': '{{action_context.parent.execution_id}}'},
'k2': {},
'foo': {'default': 1}
}
action_node = Node(name='test_action_context_params', ref='core.local', params=task_params)
rendered_params = runner._resolve_params(action_node, {}, {}, {}, chain_context)
self.assertEqual(rendered_params['exec_id']['default'], 'some_awesome_exec_id')
def test_render_params_action_context_non_existent_member(self):
runner = acr.get_runner()
chain_context = {
'parent': {
'execution_id': 'some_awesome_exec_id',
'user': 'dad'
},
'user': 'son',
'k1': 'v1'
}
task_params = {
'exec_id': {'default': '{{action_context.parent.yo_gimme_tha_key}}'},
'k2': {},
'foo': {'default': 1}
}
action_node = Node(name='test_action_context_params', ref='core.local', params=task_params)
try:
rendered_params = runner._resolve_params(action_node, {}, {}, {}, chain_context)
self.fail('Should have thrown an instance of %s' % ParameterRenderingFailedException)
except ParameterRenderingFailedException:
pass
|
<commit_before><commit_msg>Add unit test for action_context params rendering in chain<commit_after># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2actions.runners import actionchainrunner as acr
from st2common.exceptions.action import ParameterRenderingFailedException
from st2common.models.system.actionchain import Node
class ActionChainRunnerResolveParamsTests(unittest2.TestCase):
def test_render_params_action_context(self):
runner = acr.get_runner()
chain_context = {
'parent': {
'execution_id': 'some_awesome_exec_id',
'user': 'dad'
},
'user': 'son',
'k1': 'v1'
}
task_params = {
'exec_id': {'default': '{{action_context.parent.execution_id}}'},
'k2': {},
'foo': {'default': 1}
}
action_node = Node(name='test_action_context_params', ref='core.local', params=task_params)
rendered_params = runner._resolve_params(action_node, {}, {}, {}, chain_context)
self.assertEqual(rendered_params['exec_id']['default'], 'some_awesome_exec_id')
def test_render_params_action_context_non_existent_member(self):
runner = acr.get_runner()
chain_context = {
'parent': {
'execution_id': 'some_awesome_exec_id',
'user': 'dad'
},
'user': 'son',
'k1': 'v1'
}
task_params = {
'exec_id': {'default': '{{action_context.parent.yo_gimme_tha_key}}'},
'k2': {},
'foo': {'default': 1}
}
action_node = Node(name='test_action_context_params', ref='core.local', params=task_params)
try:
rendered_params = runner._resolve_params(action_node, {}, {}, {}, chain_context)
self.fail('Should have thrown an instance of %s' % ParameterRenderingFailedException)
except ParameterRenderingFailedException:
pass
|
|
fb725482a68d0418c1bb563e4787e0389138f2ca
|
ecosystem/python/sdk/account_address.py
|
ecosystem/python/sdk/account_address.py
|
# Copyright (c) Aptos
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import hashlib
import io
import typing
import unittest
from bcs import Deserializer, Serializer
import ed25519
class AccountAddress:
address: bytes
LENGTH: int = 32
def __init__(self, address: bytes):
self.address = address
if len(address) != AccountAddress.LENGTH:
raise Exception("Expected address of length 32")
def __eq__(self, other: AccountAddress) -> bool:
return self.address == other.address
def __str__(self):
return self.hex()
def hex(self) -> str:
return f"0x{self.address.hex()}"
def from_hex(address: str) -> AccountAddress:
addr = address
if address[0:2] == "0x":
addr = address[2:]
if len(addr) < AccountAddress.LENGTH * 2:
pad = "0" * (AccountAddress.LENGTH * 2 - len(addr))
addr = pad + addr
return AccountAddress(bytes.fromhex(addr))
def from_key(key: ed25519.PublicKey) -> AccountAddress:
hasher = hashlib.sha3_256()
hasher.update(key.key.encode() + b'\x00')
return AccountAddress(hasher.digest())
def deserialize(deserializer: Deserializer) -> AccountAddress:
return AccountAddress(deserializer.fixed_bytes(AccountAddress.LENGTH))
def serialize(self, serializer: Serializer):
serializer.fixed_bytes(self.address)
|
Add support for account address
|
[python-sdk] Add support for account address
While there's not much to account addresses, having convenience to move
from hex, bytes, and public keys is useful. Much of this will need to be
moved to the authenticator library as it is built out.
|
Python
|
apache-2.0
|
aptos-labs/aptos-core,aptos-labs/aptos-core,aptos-labs/aptos-core,aptos-labs/aptos-core,aptos-labs/aptos-core,aptos-labs/aptos-core,aptos-labs/aptos-core
|
[python-sdk] Add support for account address
While there's not much to account addresses, having convenience to move
from hex, bytes, and public keys is useful. Much of this will need to be
moved to the authenticator library as it is built out.
|
# Copyright (c) Aptos
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import hashlib
import io
import typing
import unittest
from bcs import Deserializer, Serializer
import ed25519
class AccountAddress:
address: bytes
LENGTH: int = 32
def __init__(self, address: bytes):
self.address = address
if len(address) != AccountAddress.LENGTH:
raise Exception("Expected address of length 32")
def __eq__(self, other: AccountAddress) -> bool:
return self.address == other.address
def __str__(self):
return self.hex()
def hex(self) -> str:
return f"0x{self.address.hex()}"
def from_hex(address: str) -> AccountAddress:
addr = address
if address[0:2] == "0x":
addr = address[2:]
if len(addr) < AccountAddress.LENGTH * 2:
pad = "0" * (AccountAddress.LENGTH * 2 - len(addr))
addr = pad + addr
return AccountAddress(bytes.fromhex(addr))
def from_key(key: ed25519.PublicKey) -> AccountAddress:
hasher = hashlib.sha3_256()
hasher.update(key.key.encode() + b'\x00')
return AccountAddress(hasher.digest())
def deserialize(deserializer: Deserializer) -> AccountAddress:
return AccountAddress(deserializer.fixed_bytes(AccountAddress.LENGTH))
def serialize(self, serializer: Serializer):
serializer.fixed_bytes(self.address)
|
<commit_before><commit_msg>[python-sdk] Add support for account address
While there's not much to account addresses, having convenience to move
from hex, bytes, and public keys is useful. Much of this will need to be
moved to the authenticator library as it is built out.<commit_after>
|
# Copyright (c) Aptos
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import hashlib
import io
import typing
import unittest
from bcs import Deserializer, Serializer
import ed25519
class AccountAddress:
address: bytes
LENGTH: int = 32
def __init__(self, address: bytes):
self.address = address
if len(address) != AccountAddress.LENGTH:
raise Exception("Expected address of length 32")
def __eq__(self, other: AccountAddress) -> bool:
return self.address == other.address
def __str__(self):
return self.hex()
def hex(self) -> str:
return f"0x{self.address.hex()}"
def from_hex(address: str) -> AccountAddress:
addr = address
if address[0:2] == "0x":
addr = address[2:]
if len(addr) < AccountAddress.LENGTH * 2:
pad = "0" * (AccountAddress.LENGTH * 2 - len(addr))
addr = pad + addr
return AccountAddress(bytes.fromhex(addr))
def from_key(key: ed25519.PublicKey) -> AccountAddress:
hasher = hashlib.sha3_256()
hasher.update(key.key.encode() + b'\x00')
return AccountAddress(hasher.digest())
def deserialize(deserializer: Deserializer) -> AccountAddress:
return AccountAddress(deserializer.fixed_bytes(AccountAddress.LENGTH))
def serialize(self, serializer: Serializer):
serializer.fixed_bytes(self.address)
|
[python-sdk] Add support for account address
While there's not much to account addresses, having convenience to move
from hex, bytes, and public keys is useful. Much of this will need to be
moved to the authenticator library as it is built out.# Copyright (c) Aptos
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import hashlib
import io
import typing
import unittest
from bcs import Deserializer, Serializer
import ed25519
class AccountAddress:
address: bytes
LENGTH: int = 32
def __init__(self, address: bytes):
self.address = address
if len(address) != AccountAddress.LENGTH:
raise Exception("Expected address of length 32")
def __eq__(self, other: AccountAddress) -> bool:
return self.address == other.address
def __str__(self):
return self.hex()
def hex(self) -> str:
return f"0x{self.address.hex()}"
def from_hex(address: str) -> AccountAddress:
addr = address
if address[0:2] == "0x":
addr = address[2:]
if len(addr) < AccountAddress.LENGTH * 2:
pad = "0" * (AccountAddress.LENGTH * 2 - len(addr))
addr = pad + addr
return AccountAddress(bytes.fromhex(addr))
def from_key(key: ed25519.PublicKey) -> AccountAddress:
hasher = hashlib.sha3_256()
hasher.update(key.key.encode() + b'\x00')
return AccountAddress(hasher.digest())
def deserialize(deserializer: Deserializer) -> AccountAddress:
return AccountAddress(deserializer.fixed_bytes(AccountAddress.LENGTH))
def serialize(self, serializer: Serializer):
serializer.fixed_bytes(self.address)
|
<commit_before><commit_msg>[python-sdk] Add support for account address
While there's not much to account addresses, having convenience to move
from hex, bytes, and public keys is useful. Much of this will need to be
moved to the authenticator library as it is built out.<commit_after># Copyright (c) Aptos
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import hashlib
import io
import typing
import unittest
from bcs import Deserializer, Serializer
import ed25519
class AccountAddress:
address: bytes
LENGTH: int = 32
def __init__(self, address: bytes):
self.address = address
if len(address) != AccountAddress.LENGTH:
raise Exception("Expected address of length 32")
def __eq__(self, other: AccountAddress) -> bool:
return self.address == other.address
def __str__(self):
return self.hex()
def hex(self) -> str:
return f"0x{self.address.hex()}"
def from_hex(address: str) -> AccountAddress:
addr = address
if address[0:2] == "0x":
addr = address[2:]
if len(addr) < AccountAddress.LENGTH * 2:
pad = "0" * (AccountAddress.LENGTH * 2 - len(addr))
addr = pad + addr
return AccountAddress(bytes.fromhex(addr))
def from_key(key: ed25519.PublicKey) -> AccountAddress:
hasher = hashlib.sha3_256()
hasher.update(key.key.encode() + b'\x00')
return AccountAddress(hasher.digest())
def deserialize(deserializer: Deserializer) -> AccountAddress:
return AccountAddress(deserializer.fixed_bytes(AccountAddress.LENGTH))
def serialize(self, serializer: Serializer):
serializer.fixed_bytes(self.address)
|
|
e0b5807948b7994d036c4f4354714aba0c194b4d
|
scripts/tidy_ecr_repo.py
|
scripts/tidy_ecr_repo.py
|
#!/usr/bin/env python
# -*- encoding: utf-8
import datetime as dt
import getpass
import boto3
import click
def role_arn_to_session(**kwargs):
client = boto3.client("sts")
response = client.assume_role(**kwargs)
return boto3.Session(
aws_access_key_id=response["Credentials"]["AccessKeyId"],
aws_secret_access_key=response["Credentials"]["SecretAccessKey"],
aws_session_token=response["Credentials"]["SessionToken"]
)
def describe_images(ecr_client, repo_name):
paginator = ecr_client.get_paginator("describe_images")
for page in paginator.paginate(repositoryName=repo_name):
yield from page["imageDetails"]
@click.command()
@click.argument("repo_name")
@click.option("--account_id", default="760097843905")
@click.option("--older_than", default=500, type=int)
def main(repo_name, account_id, older_than):
sess = role_arn_to_session(
RoleArn="arn:aws:iam::%s:role/admin" % account_id,
RoleSessionName="%s--%s" % (getpass.getuser(), __file__)
)
ecr_client = sess.client("ecr")
images_to_delete = []
full_repo_name = "uk.ac.wellcome/%s" % repo_name
for image in describe_images(ecr_client, repo_name=full_repo_name):
when_pushed = dt.datetime.now(dt.timezone.utc) - image["imagePushedAt"]
if when_pushed.days > 500:
images_to_delete.append({"imageDigest": image["imageDigest"]})
confirm = click.confirm("About to delete %d images" % len(images_to_delete))
ecr_client.batch_delete_image(
repositoryName=full_repo_name,
imageIds=images_to_delete
)
if __name__ == "__main__":
main()
|
Add a script for deleting ECR images older than N days
|
Add a script for deleting ECR images older than N days
|
Python
|
mit
|
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
|
Add a script for deleting ECR images older than N days
|
#!/usr/bin/env python
# -*- encoding: utf-8
import datetime as dt
import getpass
import boto3
import click
def role_arn_to_session(**kwargs):
client = boto3.client("sts")
response = client.assume_role(**kwargs)
return boto3.Session(
aws_access_key_id=response["Credentials"]["AccessKeyId"],
aws_secret_access_key=response["Credentials"]["SecretAccessKey"],
aws_session_token=response["Credentials"]["SessionToken"]
)
def describe_images(ecr_client, repo_name):
paginator = ecr_client.get_paginator("describe_images")
for page in paginator.paginate(repositoryName=repo_name):
yield from page["imageDetails"]
@click.command()
@click.argument("repo_name")
@click.option("--account_id", default="760097843905")
@click.option("--older_than", default=500, type=int)
def main(repo_name, account_id, older_than):
sess = role_arn_to_session(
RoleArn="arn:aws:iam::%s:role/admin" % account_id,
RoleSessionName="%s--%s" % (getpass.getuser(), __file__)
)
ecr_client = sess.client("ecr")
images_to_delete = []
full_repo_name = "uk.ac.wellcome/%s" % repo_name
for image in describe_images(ecr_client, repo_name=full_repo_name):
when_pushed = dt.datetime.now(dt.timezone.utc) - image["imagePushedAt"]
if when_pushed.days > 500:
images_to_delete.append({"imageDigest": image["imageDigest"]})
confirm = click.confirm("About to delete %d images" % len(images_to_delete))
ecr_client.batch_delete_image(
repositoryName=full_repo_name,
imageIds=images_to_delete
)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add a script for deleting ECR images older than N days<commit_after>
|
#!/usr/bin/env python
# -*- encoding: utf-8
import datetime as dt
import getpass
import boto3
import click
def role_arn_to_session(**kwargs):
client = boto3.client("sts")
response = client.assume_role(**kwargs)
return boto3.Session(
aws_access_key_id=response["Credentials"]["AccessKeyId"],
aws_secret_access_key=response["Credentials"]["SecretAccessKey"],
aws_session_token=response["Credentials"]["SessionToken"]
)
def describe_images(ecr_client, repo_name):
paginator = ecr_client.get_paginator("describe_images")
for page in paginator.paginate(repositoryName=repo_name):
yield from page["imageDetails"]
@click.command()
@click.argument("repo_name")
@click.option("--account_id", default="760097843905")
@click.option("--older_than", default=500, type=int)
def main(repo_name, account_id, older_than):
sess = role_arn_to_session(
RoleArn="arn:aws:iam::%s:role/admin" % account_id,
RoleSessionName="%s--%s" % (getpass.getuser(), __file__)
)
ecr_client = sess.client("ecr")
images_to_delete = []
full_repo_name = "uk.ac.wellcome/%s" % repo_name
for image in describe_images(ecr_client, repo_name=full_repo_name):
when_pushed = dt.datetime.now(dt.timezone.utc) - image["imagePushedAt"]
if when_pushed.days > 500:
images_to_delete.append({"imageDigest": image["imageDigest"]})
confirm = click.confirm("About to delete %d images" % len(images_to_delete))
ecr_client.batch_delete_image(
repositoryName=full_repo_name,
imageIds=images_to_delete
)
if __name__ == "__main__":
main()
|
Add a script for deleting ECR images older than N days#!/usr/bin/env python
# -*- encoding: utf-8
import datetime as dt
import getpass
import boto3
import click
def role_arn_to_session(**kwargs):
client = boto3.client("sts")
response = client.assume_role(**kwargs)
return boto3.Session(
aws_access_key_id=response["Credentials"]["AccessKeyId"],
aws_secret_access_key=response["Credentials"]["SecretAccessKey"],
aws_session_token=response["Credentials"]["SessionToken"]
)
def describe_images(ecr_client, repo_name):
paginator = ecr_client.get_paginator("describe_images")
for page in paginator.paginate(repositoryName=repo_name):
yield from page["imageDetails"]
@click.command()
@click.argument("repo_name")
@click.option("--account_id", default="760097843905")
@click.option("--older_than", default=500, type=int)
def main(repo_name, account_id, older_than):
sess = role_arn_to_session(
RoleArn="arn:aws:iam::%s:role/admin" % account_id,
RoleSessionName="%s--%s" % (getpass.getuser(), __file__)
)
ecr_client = sess.client("ecr")
images_to_delete = []
full_repo_name = "uk.ac.wellcome/%s" % repo_name
for image in describe_images(ecr_client, repo_name=full_repo_name):
when_pushed = dt.datetime.now(dt.timezone.utc) - image["imagePushedAt"]
if when_pushed.days > 500:
images_to_delete.append({"imageDigest": image["imageDigest"]})
confirm = click.confirm("About to delete %d images" % len(images_to_delete))
ecr_client.batch_delete_image(
repositoryName=full_repo_name,
imageIds=images_to_delete
)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add a script for deleting ECR images older than N days<commit_after>#!/usr/bin/env python
# -*- encoding: utf-8
import datetime as dt
import getpass
import boto3
import click
def role_arn_to_session(**kwargs):
client = boto3.client("sts")
response = client.assume_role(**kwargs)
return boto3.Session(
aws_access_key_id=response["Credentials"]["AccessKeyId"],
aws_secret_access_key=response["Credentials"]["SecretAccessKey"],
aws_session_token=response["Credentials"]["SessionToken"]
)
def describe_images(ecr_client, repo_name):
paginator = ecr_client.get_paginator("describe_images")
for page in paginator.paginate(repositoryName=repo_name):
yield from page["imageDetails"]
@click.command()
@click.argument("repo_name")
@click.option("--account_id", default="760097843905")
@click.option("--older_than", default=500, type=int)
def main(repo_name, account_id, older_than):
sess = role_arn_to_session(
RoleArn="arn:aws:iam::%s:role/admin" % account_id,
RoleSessionName="%s--%s" % (getpass.getuser(), __file__)
)
ecr_client = sess.client("ecr")
images_to_delete = []
full_repo_name = "uk.ac.wellcome/%s" % repo_name
for image in describe_images(ecr_client, repo_name=full_repo_name):
when_pushed = dt.datetime.now(dt.timezone.utc) - image["imagePushedAt"]
if when_pushed.days > 500:
images_to_delete.append({"imageDigest": image["imageDigest"]})
confirm = click.confirm("About to delete %d images" % len(images_to_delete))
ecr_client.batch_delete_image(
repositoryName=full_repo_name,
imageIds=images_to_delete
)
if __name__ == "__main__":
main()
|
|
5503b6f20ed6bfb28c5d3285b36c7ec809ced55d
|
salt/modules/philips_hue.py
|
salt/modules/philips_hue.py
|
# -*- coding: utf-8 -*-
'''
Philips HUE lamps module for proxy.
'''
from __future__ import absolute_import
import sys
__virtualname__ = 'hue'
__proxyenabled__ = ['philips_hue']
def _proxy():
'''
Get proxy.
'''
return __opts__['proxymodule']
def __virtual__():
'''
Start the Philips HUE only for proxies.
'''
def _mkf(cmd_name, doc):
def _cmd(*args, **kw):
return _proxy()[_proxy().loaded_base_name + "." + cmd_name](*args, **kw)
return _cmd
import salt.proxy.philips_hue as hue
for method in dir(hue):
if method.startswith('call_'):
setattr(sys.modules[__name__], method[5:], _mkf(method, getattr(hue, method).__doc__))
del hue
return _proxy() and __virtualname__ or False
|
Implement Philips HUE wrapper caller for Minion Proxy
|
Implement Philips HUE wrapper caller for Minion Proxy
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Implement Philips HUE wrapper caller for Minion Proxy
|
# -*- coding: utf-8 -*-
'''
Philips HUE lamps module for proxy.
'''
from __future__ import absolute_import
import sys
__virtualname__ = 'hue'
__proxyenabled__ = ['philips_hue']
def _proxy():
'''
Get proxy.
'''
return __opts__['proxymodule']
def __virtual__():
'''
Start the Philips HUE only for proxies.
'''
def _mkf(cmd_name, doc):
def _cmd(*args, **kw):
return _proxy()[_proxy().loaded_base_name + "." + cmd_name](*args, **kw)
return _cmd
import salt.proxy.philips_hue as hue
for method in dir(hue):
if method.startswith('call_'):
setattr(sys.modules[__name__], method[5:], _mkf(method, getattr(hue, method).__doc__))
del hue
return _proxy() and __virtualname__ or False
|
<commit_before><commit_msg>Implement Philips HUE wrapper caller for Minion Proxy<commit_after>
|
# -*- coding: utf-8 -*-
'''
Philips HUE lamps module for proxy.
'''
from __future__ import absolute_import
import sys
__virtualname__ = 'hue'
__proxyenabled__ = ['philips_hue']
def _proxy():
'''
Get proxy.
'''
return __opts__['proxymodule']
def __virtual__():
'''
Start the Philips HUE only for proxies.
'''
def _mkf(cmd_name, doc):
def _cmd(*args, **kw):
return _proxy()[_proxy().loaded_base_name + "." + cmd_name](*args, **kw)
return _cmd
import salt.proxy.philips_hue as hue
for method in dir(hue):
if method.startswith('call_'):
setattr(sys.modules[__name__], method[5:], _mkf(method, getattr(hue, method).__doc__))
del hue
return _proxy() and __virtualname__ or False
|
Implement Philips HUE wrapper caller for Minion Proxy# -*- coding: utf-8 -*-
'''
Philips HUE lamps module for proxy.
'''
from __future__ import absolute_import
import sys
__virtualname__ = 'hue'
__proxyenabled__ = ['philips_hue']
def _proxy():
'''
Get proxy.
'''
return __opts__['proxymodule']
def __virtual__():
'''
Start the Philips HUE only for proxies.
'''
def _mkf(cmd_name, doc):
def _cmd(*args, **kw):
return _proxy()[_proxy().loaded_base_name + "." + cmd_name](*args, **kw)
return _cmd
import salt.proxy.philips_hue as hue
for method in dir(hue):
if method.startswith('call_'):
setattr(sys.modules[__name__], method[5:], _mkf(method, getattr(hue, method).__doc__))
del hue
return _proxy() and __virtualname__ or False
|
<commit_before><commit_msg>Implement Philips HUE wrapper caller for Minion Proxy<commit_after># -*- coding: utf-8 -*-
'''
Philips HUE lamps module for proxy.
'''
from __future__ import absolute_import
import sys
__virtualname__ = 'hue'
__proxyenabled__ = ['philips_hue']
def _proxy():
'''
Get proxy.
'''
return __opts__['proxymodule']
def __virtual__():
'''
Start the Philips HUE only for proxies.
'''
def _mkf(cmd_name, doc):
def _cmd(*args, **kw):
return _proxy()[_proxy().loaded_base_name + "." + cmd_name](*args, **kw)
return _cmd
import salt.proxy.philips_hue as hue
for method in dir(hue):
if method.startswith('call_'):
setattr(sys.modules[__name__], method[5:], _mkf(method, getattr(hue, method).__doc__))
del hue
return _proxy() and __virtualname__ or False
|
|
9e7363f3ad7521914eeb85d20f6dc3a6987400c4
|
examples/example_test_coverage_upload.py
|
examples/example_test_coverage_upload.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import glob
import datetime
from teamscale_client import TeamscaleClient
from teamscale_client.constants import CoverageFormats
TEAMSCALE_URL = "http://localhost:8080"
USERNAME = "admin"
PASSWORD = "admin"
PROJECT_NAME = "test"
if __name__ == '__main__':
client = TeamscaleClient(TEAMSCALE_URL, USERNAME, PASSWORD, PROJECT_NAME)
files = [ file for file in glob.glob("/path/to/coverage/files/*.xml")]
client.upload_coverage_data(files, CoverageFormats.CTC , datetime.datetime.now(), "Upload coverage", "test-partition")
|
Add example for coverage upload
|
Add example for coverage upload
|
Python
|
apache-2.0
|
cqse/teamscale-client-python
|
Add example for coverage upload
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import glob
import datetime
from teamscale_client import TeamscaleClient
from teamscale_client.constants import CoverageFormats
TEAMSCALE_URL = "http://localhost:8080"
USERNAME = "admin"
PASSWORD = "admin"
PROJECT_NAME = "test"
if __name__ == '__main__':
client = TeamscaleClient(TEAMSCALE_URL, USERNAME, PASSWORD, PROJECT_NAME)
files = [ file for file in glob.glob("/path/to/coverage/files/*.xml")]
client.upload_coverage_data(files, CoverageFormats.CTC , datetime.datetime.now(), "Upload coverage", "test-partition")
|
<commit_before><commit_msg>Add example for coverage upload<commit_after>
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import glob
import datetime
from teamscale_client import TeamscaleClient
from teamscale_client.constants import CoverageFormats
TEAMSCALE_URL = "http://localhost:8080"
USERNAME = "admin"
PASSWORD = "admin"
PROJECT_NAME = "test"
if __name__ == '__main__':
client = TeamscaleClient(TEAMSCALE_URL, USERNAME, PASSWORD, PROJECT_NAME)
files = [ file for file in glob.glob("/path/to/coverage/files/*.xml")]
client.upload_coverage_data(files, CoverageFormats.CTC , datetime.datetime.now(), "Upload coverage", "test-partition")
|
Add example for coverage uploadfrom __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import glob
import datetime
from teamscale_client import TeamscaleClient
from teamscale_client.constants import CoverageFormats
TEAMSCALE_URL = "http://localhost:8080"
USERNAME = "admin"
PASSWORD = "admin"
PROJECT_NAME = "test"
if __name__ == '__main__':
client = TeamscaleClient(TEAMSCALE_URL, USERNAME, PASSWORD, PROJECT_NAME)
files = [ file for file in glob.glob("/path/to/coverage/files/*.xml")]
client.upload_coverage_data(files, CoverageFormats.CTC , datetime.datetime.now(), "Upload coverage", "test-partition")
|
<commit_before><commit_msg>Add example for coverage upload<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import glob
import datetime
from teamscale_client import TeamscaleClient
from teamscale_client.constants import CoverageFormats
TEAMSCALE_URL = "http://localhost:8080"
USERNAME = "admin"
PASSWORD = "admin"
PROJECT_NAME = "test"
if __name__ == '__main__':
client = TeamscaleClient(TEAMSCALE_URL, USERNAME, PASSWORD, PROJECT_NAME)
files = [ file for file in glob.glob("/path/to/coverage/files/*.xml")]
client.upload_coverage_data(files, CoverageFormats.CTC , datetime.datetime.now(), "Upload coverage", "test-partition")
|
|
15f930276b8922d4fd58885cee282793e7dd7a96
|
scripts/dummytemperature.py
|
scripts/dummytemperature.py
|
#!/usr/bin/env python
from dbus.mainloop.glib import DBusGMainLoop
import gobject
import argparse
import logging
import sys
import os
# our own packages
sys.path.insert(1, os.path.join(os.path.dirname(__file__), '../ext/velib_python'))
from dbusdummyservice import DbusDummyService
from logger import setup_logging
# Argument parsing
parser = argparse.ArgumentParser(
description='dummy dbus service'
)
parser.add_argument("-n", "--name", help="the D-Bus service you want me to claim",
type=str, default="com.victronenergy.temperature.builtin_adc5_di0")
args = parser.parse_args()
print(__file__ + " is starting up, use -h argument to see optional arguments")
logger = setup_logging(debug=True)
# Have a mainloop, so we can send/receive asynchronous calls to and from dbus
DBusGMainLoop(set_as_default=True)
s = DbusDummyService(
servicename=args.name,
deviceinstance=0,
paths={
'/Status': {'initial': 0},
'/Temperature': {'initial': None},
'/TemperatureType': {'initial': 0},
'/Scale': {'initial': 1},
'/Offset': {'initial': 0},
},
productname='Generic Temperature sensor',
connection='ADC port 1')
logger.info('Connected to dbus, and switching over to gobject.MainLoop() (= event based)')
mainloop = gobject.MainLoop()
mainloop.run()
|
Add dummy temperature script (for testing).
|
Add dummy temperature script (for testing).
|
Python
|
mit
|
victronenergy/dbus-systemcalc-py
|
Add dummy temperature script (for testing).
|
#!/usr/bin/env python
from dbus.mainloop.glib import DBusGMainLoop
import gobject
import argparse
import logging
import sys
import os
# our own packages
sys.path.insert(1, os.path.join(os.path.dirname(__file__), '../ext/velib_python'))
from dbusdummyservice import DbusDummyService
from logger import setup_logging
# Argument parsing
parser = argparse.ArgumentParser(
description='dummy dbus service'
)
parser.add_argument("-n", "--name", help="the D-Bus service you want me to claim",
type=str, default="com.victronenergy.temperature.builtin_adc5_di0")
args = parser.parse_args()
print(__file__ + " is starting up, use -h argument to see optional arguments")
logger = setup_logging(debug=True)
# Have a mainloop, so we can send/receive asynchronous calls to and from dbus
DBusGMainLoop(set_as_default=True)
s = DbusDummyService(
servicename=args.name,
deviceinstance=0,
paths={
'/Status': {'initial': 0},
'/Temperature': {'initial': None},
'/TemperatureType': {'initial': 0},
'/Scale': {'initial': 1},
'/Offset': {'initial': 0},
},
productname='Generic Temperature sensor',
connection='ADC port 1')
logger.info('Connected to dbus, and switching over to gobject.MainLoop() (= event based)')
mainloop = gobject.MainLoop()
mainloop.run()
|
<commit_before><commit_msg>Add dummy temperature script (for testing).<commit_after>
|
#!/usr/bin/env python
from dbus.mainloop.glib import DBusGMainLoop
import gobject
import argparse
import logging
import sys
import os
# our own packages
sys.path.insert(1, os.path.join(os.path.dirname(__file__), '../ext/velib_python'))
from dbusdummyservice import DbusDummyService
from logger import setup_logging
# Argument parsing
parser = argparse.ArgumentParser(
description='dummy dbus service'
)
parser.add_argument("-n", "--name", help="the D-Bus service you want me to claim",
type=str, default="com.victronenergy.temperature.builtin_adc5_di0")
args = parser.parse_args()
print(__file__ + " is starting up, use -h argument to see optional arguments")
logger = setup_logging(debug=True)
# Have a mainloop, so we can send/receive asynchronous calls to and from dbus
DBusGMainLoop(set_as_default=True)
s = DbusDummyService(
servicename=args.name,
deviceinstance=0,
paths={
'/Status': {'initial': 0},
'/Temperature': {'initial': None},
'/TemperatureType': {'initial': 0},
'/Scale': {'initial': 1},
'/Offset': {'initial': 0},
},
productname='Generic Temperature sensor',
connection='ADC port 1')
logger.info('Connected to dbus, and switching over to gobject.MainLoop() (= event based)')
mainloop = gobject.MainLoop()
mainloop.run()
|
Add dummy temperature script (for testing).#!/usr/bin/env python
from dbus.mainloop.glib import DBusGMainLoop
import gobject
import argparse
import logging
import sys
import os
# our own packages
sys.path.insert(1, os.path.join(os.path.dirname(__file__), '../ext/velib_python'))
from dbusdummyservice import DbusDummyService
from logger import setup_logging
# Argument parsing
parser = argparse.ArgumentParser(
description='dummy dbus service'
)
parser.add_argument("-n", "--name", help="the D-Bus service you want me to claim",
type=str, default="com.victronenergy.temperature.builtin_adc5_di0")
args = parser.parse_args()
print(__file__ + " is starting up, use -h argument to see optional arguments")
logger = setup_logging(debug=True)
# Have a mainloop, so we can send/receive asynchronous calls to and from dbus
DBusGMainLoop(set_as_default=True)
s = DbusDummyService(
servicename=args.name,
deviceinstance=0,
paths={
'/Status': {'initial': 0},
'/Temperature': {'initial': None},
'/TemperatureType': {'initial': 0},
'/Scale': {'initial': 1},
'/Offset': {'initial': 0},
},
productname='Generic Temperature sensor',
connection='ADC port 1')
logger.info('Connected to dbus, and switching over to gobject.MainLoop() (= event based)')
mainloop = gobject.MainLoop()
mainloop.run()
|
<commit_before><commit_msg>Add dummy temperature script (for testing).<commit_after>#!/usr/bin/env python
from dbus.mainloop.glib import DBusGMainLoop
import gobject
import argparse
import logging
import sys
import os
# our own packages
sys.path.insert(1, os.path.join(os.path.dirname(__file__), '../ext/velib_python'))
from dbusdummyservice import DbusDummyService
from logger import setup_logging
# Argument parsing
parser = argparse.ArgumentParser(
description='dummy dbus service'
)
parser.add_argument("-n", "--name", help="the D-Bus service you want me to claim",
type=str, default="com.victronenergy.temperature.builtin_adc5_di0")
args = parser.parse_args()
print(__file__ + " is starting up, use -h argument to see optional arguments")
logger = setup_logging(debug=True)
# Have a mainloop, so we can send/receive asynchronous calls to and from dbus
DBusGMainLoop(set_as_default=True)
s = DbusDummyService(
servicename=args.name,
deviceinstance=0,
paths={
'/Status': {'initial': 0},
'/Temperature': {'initial': None},
'/TemperatureType': {'initial': 0},
'/Scale': {'initial': 1},
'/Offset': {'initial': 0},
},
productname='Generic Temperature sensor',
connection='ADC port 1')
logger.info('Connected to dbus, and switching over to gobject.MainLoop() (= event based)')
mainloop = gobject.MainLoop()
mainloop.run()
|
|
a00c9f1d7d25daa149305d211d1c653e7f0b72f3
|
scripts/download-jamendo.py
|
scripts/download-jamendo.py
|
#!/usr/bin/env python
# Jamendo database dumps can be fetched from: http://img.jamendo.com/data/dbdump_artistalbumtrack.xml.gz
import xml.etree.cElementTree as ElementTree
import sys, gzip, time, os.path, urllib
class DownloadJamendo:
def __init__(self, destination):
if not os.path.exists(destination):
os.mkdir(destination)
self.destination = destination
def parse(self, dump):
for event, elem in ElementTree.iterparse(dump):
if elem.tag == "artist":
self.proc_artist(elem)
def proc_artist(self, elem):
for artist_e in elem.getchildren():
if artist_e.tag == "Albums":
for album_e in artist_e.getchildren():
self.proc_album(album_e)
def proc_album(self, elem):
for album_e in elem.getchildren():
if album_e.tag == "Tracks":
for track_e in album_e.getchildren():
self.proc_track(track_e)
def proc_track(self, elem):
track_id = None
track_license = None
for track_e in elem.getchildren():
if track_e.tag == "id":
track_id = int(track_e.text)
if track_e.tag == "license":
track_license = track_e.text
if track_id and track_license:
if self.free_license(track_license):
trackurl = "http://api.jamendo.com/get2/stream/track/redirect/?id=%d&streamencoding=ogg2" % track_id
trackfile = os.path.join(self.destination, "%d.ogg" % track_id)
if os.path.exists(trackfile):
print "Already downloaded track %d" % track_id
else:
print "Downloading %s to %s" % (trackurl, trackfile)
urllib.urlretrieve(trackurl, trackfile)
def free_license(self, license):
return ("http://creativecommons.org/licenses/by-sa" in license or "http://creativecommons.org/licenses/by/" in license or "http://artlibre.org/licence.php/lal.html" in license)
if __name__ == "__main__":
if len(sys.argv) != 3:
print "Usage: download-jamendo.py <database dump> <destination>"
sys.exit(1)
if sys.argv[1][-2:] == "gz":
dump = gzip.open(sys.argv[1], "r")
else:
dump = open(sys.argv[1], "r")
downloader = DownloadJamendo(sys.argv[2])
downloader.parse(dump)
|
Add a script to download all the fully free tracks from Jamendo (as Ogg Vorbis)
|
Add a script to download all the fully free tracks from Jamendo (as Ogg Vorbis)
|
Python
|
agpl-3.0
|
foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm
|
Add a script to download all the fully free tracks from Jamendo (as Ogg Vorbis)
|
#!/usr/bin/env python
# Jamendo database dumps can be fetched from: http://img.jamendo.com/data/dbdump_artistalbumtrack.xml.gz
import xml.etree.cElementTree as ElementTree
import sys, gzip, time, os.path, urllib
class DownloadJamendo:
def __init__(self, destination):
if not os.path.exists(destination):
os.mkdir(destination)
self.destination = destination
def parse(self, dump):
for event, elem in ElementTree.iterparse(dump):
if elem.tag == "artist":
self.proc_artist(elem)
def proc_artist(self, elem):
for artist_e in elem.getchildren():
if artist_e.tag == "Albums":
for album_e in artist_e.getchildren():
self.proc_album(album_e)
def proc_album(self, elem):
for album_e in elem.getchildren():
if album_e.tag == "Tracks":
for track_e in album_e.getchildren():
self.proc_track(track_e)
def proc_track(self, elem):
track_id = None
track_license = None
for track_e in elem.getchildren():
if track_e.tag == "id":
track_id = int(track_e.text)
if track_e.tag == "license":
track_license = track_e.text
if track_id and track_license:
if self.free_license(track_license):
trackurl = "http://api.jamendo.com/get2/stream/track/redirect/?id=%d&streamencoding=ogg2" % track_id
trackfile = os.path.join(self.destination, "%d.ogg" % track_id)
if os.path.exists(trackfile):
print "Already downloaded track %d" % track_id
else:
print "Downloading %s to %s" % (trackurl, trackfile)
urllib.urlretrieve(trackurl, trackfile)
def free_license(self, license):
return ("http://creativecommons.org/licenses/by-sa" in license or "http://creativecommons.org/licenses/by/" in license or "http://artlibre.org/licence.php/lal.html" in license)
if __name__ == "__main__":
if len(sys.argv) != 3:
print "Usage: download-jamendo.py <database dump> <destination>"
sys.exit(1)
if sys.argv[1][-2:] == "gz":
dump = gzip.open(sys.argv[1], "r")
else:
dump = open(sys.argv[1], "r")
downloader = DownloadJamendo(sys.argv[2])
downloader.parse(dump)
|
<commit_before><commit_msg>Add a script to download all the fully free tracks from Jamendo (as Ogg Vorbis)<commit_after>
|
#!/usr/bin/env python
# Jamendo database dumps can be fetched from: http://img.jamendo.com/data/dbdump_artistalbumtrack.xml.gz
import xml.etree.cElementTree as ElementTree
import sys, gzip, time, os.path, urllib
class DownloadJamendo:
def __init__(self, destination):
if not os.path.exists(destination):
os.mkdir(destination)
self.destination = destination
def parse(self, dump):
for event, elem in ElementTree.iterparse(dump):
if elem.tag == "artist":
self.proc_artist(elem)
def proc_artist(self, elem):
for artist_e in elem.getchildren():
if artist_e.tag == "Albums":
for album_e in artist_e.getchildren():
self.proc_album(album_e)
def proc_album(self, elem):
for album_e in elem.getchildren():
if album_e.tag == "Tracks":
for track_e in album_e.getchildren():
self.proc_track(track_e)
def proc_track(self, elem):
track_id = None
track_license = None
for track_e in elem.getchildren():
if track_e.tag == "id":
track_id = int(track_e.text)
if track_e.tag == "license":
track_license = track_e.text
if track_id and track_license:
if self.free_license(track_license):
trackurl = "http://api.jamendo.com/get2/stream/track/redirect/?id=%d&streamencoding=ogg2" % track_id
trackfile = os.path.join(self.destination, "%d.ogg" % track_id)
if os.path.exists(trackfile):
print "Already downloaded track %d" % track_id
else:
print "Downloading %s to %s" % (trackurl, trackfile)
urllib.urlretrieve(trackurl, trackfile)
def free_license(self, license):
return ("http://creativecommons.org/licenses/by-sa" in license or "http://creativecommons.org/licenses/by/" in license or "http://artlibre.org/licence.php/lal.html" in license)
if __name__ == "__main__":
if len(sys.argv) != 3:
print "Usage: download-jamendo.py <database dump> <destination>"
sys.exit(1)
if sys.argv[1][-2:] == "gz":
dump = gzip.open(sys.argv[1], "r")
else:
dump = open(sys.argv[1], "r")
downloader = DownloadJamendo(sys.argv[2])
downloader.parse(dump)
|
Add a script to download all the fully free tracks from Jamendo (as Ogg Vorbis)#!/usr/bin/env python
# Jamendo database dumps can be fetched from: http://img.jamendo.com/data/dbdump_artistalbumtrack.xml.gz
import xml.etree.cElementTree as ElementTree
import sys, gzip, time, os.path, urllib
class DownloadJamendo:
def __init__(self, destination):
if not os.path.exists(destination):
os.mkdir(destination)
self.destination = destination
def parse(self, dump):
for event, elem in ElementTree.iterparse(dump):
if elem.tag == "artist":
self.proc_artist(elem)
def proc_artist(self, elem):
for artist_e in elem.getchildren():
if artist_e.tag == "Albums":
for album_e in artist_e.getchildren():
self.proc_album(album_e)
def proc_album(self, elem):
for album_e in elem.getchildren():
if album_e.tag == "Tracks":
for track_e in album_e.getchildren():
self.proc_track(track_e)
def proc_track(self, elem):
track_id = None
track_license = None
for track_e in elem.getchildren():
if track_e.tag == "id":
track_id = int(track_e.text)
if track_e.tag == "license":
track_license = track_e.text
if track_id and track_license:
if self.free_license(track_license):
trackurl = "http://api.jamendo.com/get2/stream/track/redirect/?id=%d&streamencoding=ogg2" % track_id
trackfile = os.path.join(self.destination, "%d.ogg" % track_id)
if os.path.exists(trackfile):
print "Already downloaded track %d" % track_id
else:
print "Downloading %s to %s" % (trackurl, trackfile)
urllib.urlretrieve(trackurl, trackfile)
def free_license(self, license):
return ("http://creativecommons.org/licenses/by-sa" in license or "http://creativecommons.org/licenses/by/" in license or "http://artlibre.org/licence.php/lal.html" in license)
if __name__ == "__main__":
if len(sys.argv) != 3:
print "Usage: download-jamendo.py <database dump> <destination>"
sys.exit(1)
if sys.argv[1][-2:] == "gz":
dump = gzip.open(sys.argv[1], "r")
else:
dump = open(sys.argv[1], "r")
downloader = DownloadJamendo(sys.argv[2])
downloader.parse(dump)
|
<commit_before><commit_msg>Add a script to download all the fully free tracks from Jamendo (as Ogg Vorbis)<commit_after>#!/usr/bin/env python
# Jamendo database dumps can be fetched from: http://img.jamendo.com/data/dbdump_artistalbumtrack.xml.gz
import xml.etree.cElementTree as ElementTree
import sys, gzip, time, os.path, urllib
class DownloadJamendo:
def __init__(self, destination):
if not os.path.exists(destination):
os.mkdir(destination)
self.destination = destination
def parse(self, dump):
for event, elem in ElementTree.iterparse(dump):
if elem.tag == "artist":
self.proc_artist(elem)
def proc_artist(self, elem):
for artist_e in elem.getchildren():
if artist_e.tag == "Albums":
for album_e in artist_e.getchildren():
self.proc_album(album_e)
def proc_album(self, elem):
for album_e in elem.getchildren():
if album_e.tag == "Tracks":
for track_e in album_e.getchildren():
self.proc_track(track_e)
def proc_track(self, elem):
track_id = None
track_license = None
for track_e in elem.getchildren():
if track_e.tag == "id":
track_id = int(track_e.text)
if track_e.tag == "license":
track_license = track_e.text
if track_id and track_license:
if self.free_license(track_license):
trackurl = "http://api.jamendo.com/get2/stream/track/redirect/?id=%d&streamencoding=ogg2" % track_id
trackfile = os.path.join(self.destination, "%d.ogg" % track_id)
if os.path.exists(trackfile):
print "Already downloaded track %d" % track_id
else:
print "Downloading %s to %s" % (trackurl, trackfile)
urllib.urlretrieve(trackurl, trackfile)
def free_license(self, license):
return ("http://creativecommons.org/licenses/by-sa" in license or "http://creativecommons.org/licenses/by/" in license or "http://artlibre.org/licence.php/lal.html" in license)
if __name__ == "__main__":
if len(sys.argv) != 3:
print "Usage: download-jamendo.py <database dump> <destination>"
sys.exit(1)
if sys.argv[1][-2:] == "gz":
dump = gzip.open(sys.argv[1], "r")
else:
dump = open(sys.argv[1], "r")
downloader = DownloadJamendo(sys.argv[2])
downloader.parse(dump)
|
|
0b32ce31d3c066f83565e678d66faeb58e835ae9
|
bvg_cli.py
|
bvg_cli.py
|
import requests
from lxml import html
BVG_URL = 'http://mobil.bvg.de/Fahrinfo/bin/stboard.bin/dox?'
def request_station_ids(station_name):
''' Requests the station ids for the provided station name.
The function has two different outcomes dependend on how distinctive
the station name is. A list of possibel stations or the one station.
Return a tuple (data, ok). Data holdes the <stations> with their name
and id. The status flag can be True or False if there are network problems.
'''
r = requests.get(BVG_URL, data={'input' : station_name})
# network
if r.status_code != 200:
return None, False
tree = html.fromstring(r.content)
data = []
# possibel stations
if tree.cssselect('span.error'):
for station in tree.cssselect('span.select a'):
station_name = station.text.strip()
# TODO: clean up direct list access
station_id = station.get("href").split('&')[1].split('=')[1]
data.append((station_name,station_id))
return data, True
# one station
# TODO: clean up direct list access
station_name = tree.cssselect('span.desc strong')[0].text
station_id = tree.cssselect('p.links a')[0].get('href').split('&')[1].split('=')[1]
return (station_name, station_id), True
|
Add function to request station ids
|
Add function to request station ids
|
Python
|
mit
|
behrtam/bvg-cli,behrtam/bvg-cli
|
Add function to request station ids
|
import requests
from lxml import html
BVG_URL = 'http://mobil.bvg.de/Fahrinfo/bin/stboard.bin/dox?'
def request_station_ids(station_name):
''' Requests the station ids for the provided station name.
The function has two different outcomes dependend on how distinctive
the station name is. A list of possibel stations or the one station.
Return a tuple (data, ok). Data holdes the <stations> with their name
and id. The status flag can be True or False if there are network problems.
'''
r = requests.get(BVG_URL, data={'input' : station_name})
# network
if r.status_code != 200:
return None, False
tree = html.fromstring(r.content)
data = []
# possibel stations
if tree.cssselect('span.error'):
for station in tree.cssselect('span.select a'):
station_name = station.text.strip()
# TODO: clean up direct list access
station_id = station.get("href").split('&')[1].split('=')[1]
data.append((station_name,station_id))
return data, True
# one station
# TODO: clean up direct list access
station_name = tree.cssselect('span.desc strong')[0].text
station_id = tree.cssselect('p.links a')[0].get('href').split('&')[1].split('=')[1]
return (station_name, station_id), True
|
<commit_before><commit_msg>Add function to request station ids<commit_after>
|
import requests
from lxml import html
BVG_URL = 'http://mobil.bvg.de/Fahrinfo/bin/stboard.bin/dox?'
def request_station_ids(station_name):
''' Requests the station ids for the provided station name.
The function has two different outcomes dependend on how distinctive
the station name is. A list of possibel stations or the one station.
Return a tuple (data, ok). Data holdes the <stations> with their name
and id. The status flag can be True or False if there are network problems.
'''
r = requests.get(BVG_URL, data={'input' : station_name})
# network
if r.status_code != 200:
return None, False
tree = html.fromstring(r.content)
data = []
# possibel stations
if tree.cssselect('span.error'):
for station in tree.cssselect('span.select a'):
station_name = station.text.strip()
# TODO: clean up direct list access
station_id = station.get("href").split('&')[1].split('=')[1]
data.append((station_name,station_id))
return data, True
# one station
# TODO: clean up direct list access
station_name = tree.cssselect('span.desc strong')[0].text
station_id = tree.cssselect('p.links a')[0].get('href').split('&')[1].split('=')[1]
return (station_name, station_id), True
|
Add function to request station idsimport requests
from lxml import html
BVG_URL = 'http://mobil.bvg.de/Fahrinfo/bin/stboard.bin/dox?'
def request_station_ids(station_name):
''' Requests the station ids for the provided station name.
The function has two different outcomes dependend on how distinctive
the station name is. A list of possibel stations or the one station.
Return a tuple (data, ok). Data holdes the <stations> with their name
and id. The status flag can be True or False if there are network problems.
'''
r = requests.get(BVG_URL, data={'input' : station_name})
# network
if r.status_code != 200:
return None, False
tree = html.fromstring(r.content)
data = []
# possibel stations
if tree.cssselect('span.error'):
for station in tree.cssselect('span.select a'):
station_name = station.text.strip()
# TODO: clean up direct list access
station_id = station.get("href").split('&')[1].split('=')[1]
data.append((station_name,station_id))
return data, True
# one station
# TODO: clean up direct list access
station_name = tree.cssselect('span.desc strong')[0].text
station_id = tree.cssselect('p.links a')[0].get('href').split('&')[1].split('=')[1]
return (station_name, station_id), True
|
<commit_before><commit_msg>Add function to request station ids<commit_after>import requests
from lxml import html
BVG_URL = 'http://mobil.bvg.de/Fahrinfo/bin/stboard.bin/dox?'
def request_station_ids(station_name):
''' Requests the station ids for the provided station name.
The function has two different outcomes dependend on how distinctive
the station name is. A list of possibel stations or the one station.
Return a tuple (data, ok). Data holdes the <stations> with their name
and id. The status flag can be True or False if there are network problems.
'''
r = requests.get(BVG_URL, data={'input' : station_name})
# network
if r.status_code != 200:
return None, False
tree = html.fromstring(r.content)
data = []
# possibel stations
if tree.cssselect('span.error'):
for station in tree.cssselect('span.select a'):
station_name = station.text.strip()
# TODO: clean up direct list access
station_id = station.get("href").split('&')[1].split('=')[1]
data.append((station_name,station_id))
return data, True
# one station
# TODO: clean up direct list access
station_name = tree.cssselect('span.desc strong')[0].text
station_id = tree.cssselect('p.links a')[0].get('href').split('&')[1].split('=')[1]
return (station_name, station_id), True
|
|
0f853e6044be1a3430f92b3ec14f83432aa3635c
|
intelmq/bots/outputs/misp/output_api.py
|
intelmq/bots/outputs/misp/output_api.py
|
"""Connect to MISP instance and add event as MISPObject if not reported already.
SPDX-FileCopyrightText: 2020 Intevation GmbH <https://intevation.de>
SPDX-License-Identifier: AGPL-3.0-or-later
Funding: of initial version by SUNET
Author(s):
* Bernhard Reiter <bernhard@intevation.de>
Parameters:
- misp_url: URL of the MISP server
- misp_key: API key for accessing MISP
- misp_verify: true or false, check the validity of the certificate
TODO, this is just a stub, WIP
Tested with pymisp v2.4.120 (which needs python v>=3.6).
"""
import json
from uuid import uuid4
from intelmq.lib.bot import OutputBot
from intelmq.lib.exceptions import MissingDependencyError
try:
import pymisp
except ImportError:
MISPEvent = None
class MISPAPIOutputBot(OutputBot):
is_multithreadable = False
def init(self):
if MISPEvent is None:
raise MissingDependencyError('pymisp', version='>=2.4.120')
# Initialize MISP connection
self.misp = PyMISP(self.parameters.misp_url,
self.parameters.misp_key,
self.parameters.http_verify_cert)
self.current_event = None
self.misp_org = pymisp.MISPOrganisation()
self.misp_org.name = self.parameters.misp_org_name
self.misp_org.uuid = self.parameters.misp_org_uuid
self.current_event = MISPEvent()
def process(self):
self.current_event = MISPEvent()
self.current_event.info = ('IntelMQ event {begin} - {end}'
''.format(begin=self.min_time_current.isoformat(),
end=self.max_time_current.isoformat()))
self.current_event.set_date(datetime.date.today())
self.current_event.Orgc = self.misp_org
self.current_event.uuid = str(uuid4())
event = self.receive_message().to_dict(jsondict_as_string=True)
obj = self.current_event.add_object(name='intelmq_event')
for object_relation, value in event.items():
try:
obj.add_attribute(object_relation, value=value)
except NewAttributeError:
# This entry isn't listed in the harmonization file, ignoring.
pass
self.acknowledge_message()
@staticmethod
def check(parameters):
pass
BOT = MISPAPIOutputBot
|
Add stub for output bot misp api
|
Add stub for output bot misp api
|
Python
|
agpl-3.0
|
certtools/intelmq,aaronkaplan/intelmq,aaronkaplan/intelmq,certtools/intelmq,certtools/intelmq,aaronkaplan/intelmq
|
Add stub for output bot misp api
|
"""Connect to MISP instance and add event as MISPObject if not reported already.
SPDX-FileCopyrightText: 2020 Intevation GmbH <https://intevation.de>
SPDX-License-Identifier: AGPL-3.0-or-later
Funding: of initial version by SUNET
Author(s):
* Bernhard Reiter <bernhard@intevation.de>
Parameters:
- misp_url: URL of the MISP server
- misp_key: API key for accessing MISP
- misp_verify: true or false, check the validity of the certificate
TODO, this is just a stub, WIP
Tested with pymisp v2.4.120 (which needs python v>=3.6).
"""
import json
from uuid import uuid4
from intelmq.lib.bot import OutputBot
from intelmq.lib.exceptions import MissingDependencyError
try:
import pymisp
except ImportError:
MISPEvent = None
class MISPAPIOutputBot(OutputBot):
is_multithreadable = False
def init(self):
if MISPEvent is None:
raise MissingDependencyError('pymisp', version='>=2.4.120')
# Initialize MISP connection
self.misp = PyMISP(self.parameters.misp_url,
self.parameters.misp_key,
self.parameters.http_verify_cert)
self.current_event = None
self.misp_org = pymisp.MISPOrganisation()
self.misp_org.name = self.parameters.misp_org_name
self.misp_org.uuid = self.parameters.misp_org_uuid
self.current_event = MISPEvent()
def process(self):
self.current_event = MISPEvent()
self.current_event.info = ('IntelMQ event {begin} - {end}'
''.format(begin=self.min_time_current.isoformat(),
end=self.max_time_current.isoformat()))
self.current_event.set_date(datetime.date.today())
self.current_event.Orgc = self.misp_org
self.current_event.uuid = str(uuid4())
event = self.receive_message().to_dict(jsondict_as_string=True)
obj = self.current_event.add_object(name='intelmq_event')
for object_relation, value in event.items():
try:
obj.add_attribute(object_relation, value=value)
except NewAttributeError:
# This entry isn't listed in the harmonization file, ignoring.
pass
self.acknowledge_message()
@staticmethod
def check(parameters):
pass
BOT = MISPAPIOutputBot
|
<commit_before><commit_msg>Add stub for output bot misp api<commit_after>
|
"""Connect to MISP instance and add event as MISPObject if not reported already.
SPDX-FileCopyrightText: 2020 Intevation GmbH <https://intevation.de>
SPDX-License-Identifier: AGPL-3.0-or-later
Funding: of initial version by SUNET
Author(s):
* Bernhard Reiter <bernhard@intevation.de>
Parameters:
- misp_url: URL of the MISP server
- misp_key: API key for accessing MISP
- misp_verify: true or false, check the validity of the certificate
TODO, this is just a stub, WIP
Tested with pymisp v2.4.120 (which needs python v>=3.6).
"""
import json
from uuid import uuid4
from intelmq.lib.bot import OutputBot
from intelmq.lib.exceptions import MissingDependencyError
try:
import pymisp
except ImportError:
MISPEvent = None
class MISPAPIOutputBot(OutputBot):
is_multithreadable = False
def init(self):
if MISPEvent is None:
raise MissingDependencyError('pymisp', version='>=2.4.120')
# Initialize MISP connection
self.misp = PyMISP(self.parameters.misp_url,
self.parameters.misp_key,
self.parameters.http_verify_cert)
self.current_event = None
self.misp_org = pymisp.MISPOrganisation()
self.misp_org.name = self.parameters.misp_org_name
self.misp_org.uuid = self.parameters.misp_org_uuid
self.current_event = MISPEvent()
def process(self):
self.current_event = MISPEvent()
self.current_event.info = ('IntelMQ event {begin} - {end}'
''.format(begin=self.min_time_current.isoformat(),
end=self.max_time_current.isoformat()))
self.current_event.set_date(datetime.date.today())
self.current_event.Orgc = self.misp_org
self.current_event.uuid = str(uuid4())
event = self.receive_message().to_dict(jsondict_as_string=True)
obj = self.current_event.add_object(name='intelmq_event')
for object_relation, value in event.items():
try:
obj.add_attribute(object_relation, value=value)
except NewAttributeError:
# This entry isn't listed in the harmonization file, ignoring.
pass
self.acknowledge_message()
@staticmethod
def check(parameters):
pass
BOT = MISPAPIOutputBot
|
Add stub for output bot misp api"""Connect to MISP instance and add event as MISPObject if not reported already.
SPDX-FileCopyrightText: 2020 Intevation GmbH <https://intevation.de>
SPDX-License-Identifier: AGPL-3.0-or-later
Funding: of initial version by SUNET
Author(s):
* Bernhard Reiter <bernhard@intevation.de>
Parameters:
- misp_url: URL of the MISP server
- misp_key: API key for accessing MISP
- misp_verify: true or false, check the validity of the certificate
TODO, this is just a stub, WIP
Tested with pymisp v2.4.120 (which needs python v>=3.6).
"""
import json
from uuid import uuid4
from intelmq.lib.bot import OutputBot
from intelmq.lib.exceptions import MissingDependencyError
try:
import pymisp
except ImportError:
MISPEvent = None
class MISPAPIOutputBot(OutputBot):
is_multithreadable = False
def init(self):
if MISPEvent is None:
raise MissingDependencyError('pymisp', version='>=2.4.120')
# Initialize MISP connection
self.misp = PyMISP(self.parameters.misp_url,
self.parameters.misp_key,
self.parameters.http_verify_cert)
self.current_event = None
self.misp_org = pymisp.MISPOrganisation()
self.misp_org.name = self.parameters.misp_org_name
self.misp_org.uuid = self.parameters.misp_org_uuid
self.current_event = MISPEvent()
def process(self):
self.current_event = MISPEvent()
self.current_event.info = ('IntelMQ event {begin} - {end}'
''.format(begin=self.min_time_current.isoformat(),
end=self.max_time_current.isoformat()))
self.current_event.set_date(datetime.date.today())
self.current_event.Orgc = self.misp_org
self.current_event.uuid = str(uuid4())
event = self.receive_message().to_dict(jsondict_as_string=True)
obj = self.current_event.add_object(name='intelmq_event')
for object_relation, value in event.items():
try:
obj.add_attribute(object_relation, value=value)
except NewAttributeError:
# This entry isn't listed in the harmonization file, ignoring.
pass
self.acknowledge_message()
@staticmethod
def check(parameters):
pass
BOT = MISPAPIOutputBot
|
<commit_before><commit_msg>Add stub for output bot misp api<commit_after>"""Connect to MISP instance and add event as MISPObject if not reported already.
SPDX-FileCopyrightText: 2020 Intevation GmbH <https://intevation.de>
SPDX-License-Identifier: AGPL-3.0-or-later
Funding: of initial version by SUNET
Author(s):
* Bernhard Reiter <bernhard@intevation.de>
Parameters:
- misp_url: URL of the MISP server
- misp_key: API key for accessing MISP
- misp_verify: true or false, check the validity of the certificate
TODO, this is just a stub, WIP
Tested with pymisp v2.4.120 (which needs python v>=3.6).
"""
import json
from uuid import uuid4
from intelmq.lib.bot import OutputBot
from intelmq.lib.exceptions import MissingDependencyError
try:
import pymisp
except ImportError:
MISPEvent = None
class MISPAPIOutputBot(OutputBot):
is_multithreadable = False
def init(self):
if MISPEvent is None:
raise MissingDependencyError('pymisp', version='>=2.4.120')
# Initialize MISP connection
self.misp = PyMISP(self.parameters.misp_url,
self.parameters.misp_key,
self.parameters.http_verify_cert)
self.current_event = None
self.misp_org = pymisp.MISPOrganisation()
self.misp_org.name = self.parameters.misp_org_name
self.misp_org.uuid = self.parameters.misp_org_uuid
self.current_event = MISPEvent()
def process(self):
self.current_event = MISPEvent()
self.current_event.info = ('IntelMQ event {begin} - {end}'
''.format(begin=self.min_time_current.isoformat(),
end=self.max_time_current.isoformat()))
self.current_event.set_date(datetime.date.today())
self.current_event.Orgc = self.misp_org
self.current_event.uuid = str(uuid4())
event = self.receive_message().to_dict(jsondict_as_string=True)
obj = self.current_event.add_object(name='intelmq_event')
for object_relation, value in event.items():
try:
obj.add_attribute(object_relation, value=value)
except NewAttributeError:
# This entry isn't listed in the harmonization file, ignoring.
pass
self.acknowledge_message()
@staticmethod
def check(parameters):
pass
BOT = MISPAPIOutputBot
|
|
2936357c27dfcb91b55c12c83ac8dd1bbc42be14
|
src/DataSounds/editops.py
|
src/DataSounds/editops.py
|
#/usr/bin/env python
# -*- coding: utf-8 -*-
from difflib import SequenceMatcher
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from sebastian.lilypond.interp import parse
from sebastian.midi.write_midi import SMF
from DataSounds.sounds import build_scale
def tone_down(note, scale):
pos = scale.index(note[0])
if pos <= 0:
tone = scale[-1][0] + ','
return tone
def tone_up(note, scale):
pos = scale.index(note[0])
tone = scale[(pos + 1) % len(scale)]
if pos == len(scale) - 1:
if ',' in note:
note = note[:-1]
else:
tone = tone + "'"
return tone
def get_music(a, b, key='C', mode='major'):
midi_out = StringIO()
scale = build_scale(key, mode, octaves=1)
matcher = SequenceMatcher(None, a, b)
tone = key.lower()
melodies = [tone]
for tag, i1, i2, j1, j2 in matcher.get_opcodes():
next_note = None
if tag == 'replace':
next_note = 'r'
elif tag == 'equal':
next_note = tone
elif tag == 'delete':
tone = tone_down(tone, scale)
next_note = tone
elif tag == 'insert':
tone = tone_up(tone, scale)
next_note = tone
melodies += [next_note] * ((i2 - i1) or 1)
s = SMF([parse(" ".join(melodies))])
s.write(midi_out)
return midi_out
|
Add a new method, edit operations.
|
Add a new method, edit operations.
* Still need to think more, but for now we have four operations:
replace, equal, delete, insert. These operation are mapped to
rest note, repeat or stretch note, go down one tone, go up one tone.
* Use sebastian for tone up/tone down, current functions are too hackish.
|
Python
|
bsd-3-clause
|
DataSounds/DataSounds
|
Add a new method, edit operations.
* Still need to think more, but for now we have four operations:
replace, equal, delete, insert. These operation are mapped to
rest note, repeat or stretch note, go down one tone, go up one tone.
* Use sebastian for tone up/tone down, current functions are too hackish.
|
#/usr/bin/env python
# -*- coding: utf-8 -*-
from difflib import SequenceMatcher
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from sebastian.lilypond.interp import parse
from sebastian.midi.write_midi import SMF
from DataSounds.sounds import build_scale
def tone_down(note, scale):
pos = scale.index(note[0])
if pos <= 0:
tone = scale[-1][0] + ','
return tone
def tone_up(note, scale):
pos = scale.index(note[0])
tone = scale[(pos + 1) % len(scale)]
if pos == len(scale) - 1:
if ',' in note:
note = note[:-1]
else:
tone = tone + "'"
return tone
def get_music(a, b, key='C', mode='major'):
midi_out = StringIO()
scale = build_scale(key, mode, octaves=1)
matcher = SequenceMatcher(None, a, b)
tone = key.lower()
melodies = [tone]
for tag, i1, i2, j1, j2 in matcher.get_opcodes():
next_note = None
if tag == 'replace':
next_note = 'r'
elif tag == 'equal':
next_note = tone
elif tag == 'delete':
tone = tone_down(tone, scale)
next_note = tone
elif tag == 'insert':
tone = tone_up(tone, scale)
next_note = tone
melodies += [next_note] * ((i2 - i1) or 1)
s = SMF([parse(" ".join(melodies))])
s.write(midi_out)
return midi_out
|
<commit_before><commit_msg>Add a new method, edit operations.
* Still need to think more, but for now we have four operations:
replace, equal, delete, insert. These operation are mapped to
rest note, repeat or stretch note, go down one tone, go up one tone.
* Use sebastian for tone up/tone down, current functions are too hackish.<commit_after>
|
#/usr/bin/env python
# -*- coding: utf-8 -*-
from difflib import SequenceMatcher
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from sebastian.lilypond.interp import parse
from sebastian.midi.write_midi import SMF
from DataSounds.sounds import build_scale
def tone_down(note, scale):
pos = scale.index(note[0])
if pos <= 0:
tone = scale[-1][0] + ','
return tone
def tone_up(note, scale):
pos = scale.index(note[0])
tone = scale[(pos + 1) % len(scale)]
if pos == len(scale) - 1:
if ',' in note:
note = note[:-1]
else:
tone = tone + "'"
return tone
def get_music(a, b, key='C', mode='major'):
midi_out = StringIO()
scale = build_scale(key, mode, octaves=1)
matcher = SequenceMatcher(None, a, b)
tone = key.lower()
melodies = [tone]
for tag, i1, i2, j1, j2 in matcher.get_opcodes():
next_note = None
if tag == 'replace':
next_note = 'r'
elif tag == 'equal':
next_note = tone
elif tag == 'delete':
tone = tone_down(tone, scale)
next_note = tone
elif tag == 'insert':
tone = tone_up(tone, scale)
next_note = tone
melodies += [next_note] * ((i2 - i1) or 1)
s = SMF([parse(" ".join(melodies))])
s.write(midi_out)
return midi_out
|
Add a new method, edit operations.
* Still need to think more, but for now we have four operations:
replace, equal, delete, insert. These operation are mapped to
rest note, repeat or stretch note, go down one tone, go up one tone.
* Use sebastian for tone up/tone down, current functions are too hackish.#/usr/bin/env python
# -*- coding: utf-8 -*-
from difflib import SequenceMatcher
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from sebastian.lilypond.interp import parse
from sebastian.midi.write_midi import SMF
from DataSounds.sounds import build_scale
def tone_down(note, scale):
pos = scale.index(note[0])
if pos <= 0:
tone = scale[-1][0] + ','
return tone
def tone_up(note, scale):
pos = scale.index(note[0])
tone = scale[(pos + 1) % len(scale)]
if pos == len(scale) - 1:
if ',' in note:
note = note[:-1]
else:
tone = tone + "'"
return tone
def get_music(a, b, key='C', mode='major'):
midi_out = StringIO()
scale = build_scale(key, mode, octaves=1)
matcher = SequenceMatcher(None, a, b)
tone = key.lower()
melodies = [tone]
for tag, i1, i2, j1, j2 in matcher.get_opcodes():
next_note = None
if tag == 'replace':
next_note = 'r'
elif tag == 'equal':
next_note = tone
elif tag == 'delete':
tone = tone_down(tone, scale)
next_note = tone
elif tag == 'insert':
tone = tone_up(tone, scale)
next_note = tone
melodies += [next_note] * ((i2 - i1) or 1)
s = SMF([parse(" ".join(melodies))])
s.write(midi_out)
return midi_out
|
<commit_before><commit_msg>Add a new method, edit operations.
* Still need to think more, but for now we have four operations:
replace, equal, delete, insert. These operation are mapped to
rest note, repeat or stretch note, go down one tone, go up one tone.
* Use sebastian for tone up/tone down, current functions are too hackish.<commit_after>#/usr/bin/env python
# -*- coding: utf-8 -*-
from difflib import SequenceMatcher
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from sebastian.lilypond.interp import parse
from sebastian.midi.write_midi import SMF
from DataSounds.sounds import build_scale
def tone_down(note, scale):
pos = scale.index(note[0])
if pos <= 0:
tone = scale[-1][0] + ','
return tone
def tone_up(note, scale):
pos = scale.index(note[0])
tone = scale[(pos + 1) % len(scale)]
if pos == len(scale) - 1:
if ',' in note:
note = note[:-1]
else:
tone = tone + "'"
return tone
def get_music(a, b, key='C', mode='major'):
midi_out = StringIO()
scale = build_scale(key, mode, octaves=1)
matcher = SequenceMatcher(None, a, b)
tone = key.lower()
melodies = [tone]
for tag, i1, i2, j1, j2 in matcher.get_opcodes():
next_note = None
if tag == 'replace':
next_note = 'r'
elif tag == 'equal':
next_note = tone
elif tag == 'delete':
tone = tone_down(tone, scale)
next_note = tone
elif tag == 'insert':
tone = tone_up(tone, scale)
next_note = tone
melodies += [next_note] * ((i2 - i1) or 1)
s = SMF([parse(" ".join(melodies))])
s.write(midi_out)
return midi_out
|
|
b3d44b52dac01a2f161e43872e2fdd86a0cd1b3e
|
accounting/apps/clients/urls.py
|
accounting/apps/clients/urls.py
|
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
# Clients
url(r'^client/$',
views.ClientListView.as_view(),
name="client-list"),
url(r'^client/create/$',
views.ClientCreateView.as_view(),
name="client-create"),
url(r'^client/(?P<pk>\d)/edit/$',
views.ClientUpdateView.as_view(),
name="client-edit"),
url(r'^client/(?P<pk>\d)/detail/$',
views.ClientDetailView.as_view(),
name="client-detail"),
)
|
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
# Clients
url(r'^client/$',
views.ClientListView.as_view(),
name="client-list"),
url(r'^client/create/$',
views.ClientCreateView.as_view(),
name="client-create"),
url(r'^client/(?P<pk>\d+)/edit/$',
views.ClientUpdateView.as_view(),
name="client-edit"),
url(r'^client/(?P<pk>\d+)/detail/$',
views.ClientDetailView.as_view(),
name="client-detail"),
)
|
Support client after the 9th :dancer:
|
Support client after the 9th :dancer:
|
Python
|
mit
|
kenjhim/django-accounting,dulaccc/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting,dulaccc/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting,kenjhim/django-accounting
|
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
# Clients
url(r'^client/$',
views.ClientListView.as_view(),
name="client-list"),
url(r'^client/create/$',
views.ClientCreateView.as_view(),
name="client-create"),
url(r'^client/(?P<pk>\d)/edit/$',
views.ClientUpdateView.as_view(),
name="client-edit"),
url(r'^client/(?P<pk>\d)/detail/$',
views.ClientDetailView.as_view(),
name="client-detail"),
)
Support client after the 9th :dancer:
|
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
# Clients
url(r'^client/$',
views.ClientListView.as_view(),
name="client-list"),
url(r'^client/create/$',
views.ClientCreateView.as_view(),
name="client-create"),
url(r'^client/(?P<pk>\d+)/edit/$',
views.ClientUpdateView.as_view(),
name="client-edit"),
url(r'^client/(?P<pk>\d+)/detail/$',
views.ClientDetailView.as_view(),
name="client-detail"),
)
|
<commit_before>from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
# Clients
url(r'^client/$',
views.ClientListView.as_view(),
name="client-list"),
url(r'^client/create/$',
views.ClientCreateView.as_view(),
name="client-create"),
url(r'^client/(?P<pk>\d)/edit/$',
views.ClientUpdateView.as_view(),
name="client-edit"),
url(r'^client/(?P<pk>\d)/detail/$',
views.ClientDetailView.as_view(),
name="client-detail"),
)
<commit_msg>Support client after the 9th :dancer:<commit_after>
|
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
# Clients
url(r'^client/$',
views.ClientListView.as_view(),
name="client-list"),
url(r'^client/create/$',
views.ClientCreateView.as_view(),
name="client-create"),
url(r'^client/(?P<pk>\d+)/edit/$',
views.ClientUpdateView.as_view(),
name="client-edit"),
url(r'^client/(?P<pk>\d+)/detail/$',
views.ClientDetailView.as_view(),
name="client-detail"),
)
|
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
# Clients
url(r'^client/$',
views.ClientListView.as_view(),
name="client-list"),
url(r'^client/create/$',
views.ClientCreateView.as_view(),
name="client-create"),
url(r'^client/(?P<pk>\d)/edit/$',
views.ClientUpdateView.as_view(),
name="client-edit"),
url(r'^client/(?P<pk>\d)/detail/$',
views.ClientDetailView.as_view(),
name="client-detail"),
)
Support client after the 9th :dancer:from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
# Clients
url(r'^client/$',
views.ClientListView.as_view(),
name="client-list"),
url(r'^client/create/$',
views.ClientCreateView.as_view(),
name="client-create"),
url(r'^client/(?P<pk>\d+)/edit/$',
views.ClientUpdateView.as_view(),
name="client-edit"),
url(r'^client/(?P<pk>\d+)/detail/$',
views.ClientDetailView.as_view(),
name="client-detail"),
)
|
<commit_before>from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
# Clients
url(r'^client/$',
views.ClientListView.as_view(),
name="client-list"),
url(r'^client/create/$',
views.ClientCreateView.as_view(),
name="client-create"),
url(r'^client/(?P<pk>\d)/edit/$',
views.ClientUpdateView.as_view(),
name="client-edit"),
url(r'^client/(?P<pk>\d)/detail/$',
views.ClientDetailView.as_view(),
name="client-detail"),
)
<commit_msg>Support client after the 9th :dancer:<commit_after>from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
# Clients
url(r'^client/$',
views.ClientListView.as_view(),
name="client-list"),
url(r'^client/create/$',
views.ClientCreateView.as_view(),
name="client-create"),
url(r'^client/(?P<pk>\d+)/edit/$',
views.ClientUpdateView.as_view(),
name="client-edit"),
url(r'^client/(?P<pk>\d+)/detail/$',
views.ClientDetailView.as_view(),
name="client-detail"),
)
|
db409d2d5f8a4862e6119e40852d5305f9dbbb9a
|
scripts/random-system-sound.py
|
scripts/random-system-sound.py
|
import os
import random
import subprocess
SOUNDS_DIR = '/System/Library/Sounds'
SOUNDS = os.listdir(SOUNDS_DIR)
if random.randrange(0, 100) < 4:
to_play = os.path.join(SOUNDS_DIR, random.choice(SOUNDS))
subprocess.call(['afplay', '-v', str(random.choice(range(10, 51))), to_play])
|
Add script to play random system sound
|
Add script to play random system sound
|
Python
|
unlicense
|
epochblue/annoy-a-tron,epochblue/annoy-a-tron
|
Add script to play random system sound
|
import os
import random
import subprocess
SOUNDS_DIR = '/System/Library/Sounds'
SOUNDS = os.listdir(SOUNDS_DIR)
if random.randrange(0, 100) < 4:
to_play = os.path.join(SOUNDS_DIR, random.choice(SOUNDS))
subprocess.call(['afplay', '-v', str(random.choice(range(10, 51))), to_play])
|
<commit_before><commit_msg>Add script to play random system sound<commit_after>
|
import os
import random
import subprocess
SOUNDS_DIR = '/System/Library/Sounds'
SOUNDS = os.listdir(SOUNDS_DIR)
if random.randrange(0, 100) < 4:
to_play = os.path.join(SOUNDS_DIR, random.choice(SOUNDS))
subprocess.call(['afplay', '-v', str(random.choice(range(10, 51))), to_play])
|
Add script to play random system soundimport os
import random
import subprocess
SOUNDS_DIR = '/System/Library/Sounds'
SOUNDS = os.listdir(SOUNDS_DIR)
if random.randrange(0, 100) < 4:
to_play = os.path.join(SOUNDS_DIR, random.choice(SOUNDS))
subprocess.call(['afplay', '-v', str(random.choice(range(10, 51))), to_play])
|
<commit_before><commit_msg>Add script to play random system sound<commit_after>import os
import random
import subprocess
SOUNDS_DIR = '/System/Library/Sounds'
SOUNDS = os.listdir(SOUNDS_DIR)
if random.randrange(0, 100) < 4:
to_play = os.path.join(SOUNDS_DIR, random.choice(SOUNDS))
subprocess.call(['afplay', '-v', str(random.choice(range(10, 51))), to_play])
|
|
8bd3aac86d6ea22f87d3f4d88f9916f128c426ee
|
mne/preprocessing/tests/test_maxwell.py
|
mne/preprocessing/tests/test_maxwell.py
|
# Author: Mark Wronkiewicz <wronk@uw.edu>
#
# License: BSD (3-clause)
import os.path as op
import warnings
from nose.tools import assert_true, assert_equal
from numpy.testing import assert_array_almost_equal
import numpy as np
from ...io import Raw
from ..maxwell import _sss_basis, maxwell_filter, _sph_harmonic
from scipy.special import sph_harm as scipy_sph_harm
warnings.simplefilter('always') # Always throw warnings
def test_sss_basis():
"""Test that the multipolar moment basis is computed correctly"""
deg = 1
order = 1
polar = np.array([np.pi / 7., np.pi / 9., np.pi / 20.])
azimuth = np.array([np.pi / 11., np.pi * 1.9, np.pi * 1.3])
# Internal calculation: _sph_harmonic(degree, order, azimuth, polar)
sph_harmonic = _sph_harmonic(deg, order, azimuth, polar)
# Check against scipy: sph_harm(order, degree, azimuth, polar)
sph_harmonic_scipy = np.real(scipy_sph_harm(deg, order, azimuth, polar))
assert_array_almost_equal(sph_harmonic, sph_harmonic_scipy)
def test_maxwell_filter():
"""Test Maxwell filter against precomputed test set"""
pass
|
Add tests for maxwell filtering
|
ENH: Add tests for maxwell filtering
|
Python
|
bsd-3-clause
|
matthew-tucker/mne-python,yousrabk/mne-python,wronk/mne-python,Eric89GXL/mne-python,lorenzo-desantis/mne-python,kingjr/mne-python,mne-tools/mne-python,ARudiuk/mne-python,matthew-tucker/mne-python,adykstra/mne-python,antiface/mne-python,trachelr/mne-python,antiface/mne-python,olafhauk/mne-python,drammock/mne-python,wronk/mne-python,jniediek/mne-python,Teekuningas/mne-python,teonlamont/mne-python,cmoutard/mne-python,olafhauk/mne-python,drammock/mne-python,larsoner/mne-python,jaeilepp/mne-python,pravsripad/mne-python,teonlamont/mne-python,leggitta/mne-python,jaeilepp/mne-python,leggitta/mne-python,Eric89GXL/mne-python,larsoner/mne-python,kingjr/mne-python,dimkal/mne-python,mne-tools/mne-python,olafhauk/mne-python,andyh616/mne-python,lorenzo-desantis/mne-python,pravsripad/mne-python,rkmaddox/mne-python,pravsripad/mne-python,Teekuningas/mne-python,kambysese/mne-python,jmontoyam/mne-python,wmvanvliet/mne-python,bloyl/mne-python,bloyl/mne-python,alexandrebarachant/mne-python,jmontoyam/mne-python,mne-tools/mne-python,Teekuningas/mne-python,adykstra/mne-python,cjayb/mne-python,kingjr/mne-python,yousrabk/mne-python,nicproulx/mne-python,larsoner/mne-python,dimkal/mne-python,cjayb/mne-python,cmoutard/mne-python,kambysese/mne-python,trachelr/mne-python,ARudiuk/mne-python,wmvanvliet/mne-python,drammock/mne-python,rkmaddox/mne-python,wmvanvliet/mne-python,jniediek/mne-python,nicproulx/mne-python,alexandrebarachant/mne-python,andyh616/mne-python
|
ENH: Add tests for maxwell filtering
|
# Author: Mark Wronkiewicz <wronk@uw.edu>
#
# License: BSD (3-clause)
import os.path as op
import warnings
from nose.tools import assert_true, assert_equal
from numpy.testing import assert_array_almost_equal
import numpy as np
from ...io import Raw
from ..maxwell import _sss_basis, maxwell_filter, _sph_harmonic
from scipy.special import sph_harm as scipy_sph_harm
warnings.simplefilter('always') # Always throw warnings
def test_sss_basis():
"""Test that the multipolar moment basis is computed correctly"""
deg = 1
order = 1
polar = np.array([np.pi / 7., np.pi / 9., np.pi / 20.])
azimuth = np.array([np.pi / 11., np.pi * 1.9, np.pi * 1.3])
# Internal calculation: _sph_harmonic(degree, order, azimuth, polar)
sph_harmonic = _sph_harmonic(deg, order, azimuth, polar)
# Check against scipy: sph_harm(order, degree, azimuth, polar)
sph_harmonic_scipy = np.real(scipy_sph_harm(deg, order, azimuth, polar))
assert_array_almost_equal(sph_harmonic, sph_harmonic_scipy)
def test_maxwell_filter():
"""Test Maxwell filter against precomputed test set"""
pass
|
<commit_before><commit_msg>ENH: Add tests for maxwell filtering<commit_after>
|
# Author: Mark Wronkiewicz <wronk@uw.edu>
#
# License: BSD (3-clause)
import os.path as op
import warnings
from nose.tools import assert_true, assert_equal
from numpy.testing import assert_array_almost_equal
import numpy as np
from ...io import Raw
from ..maxwell import _sss_basis, maxwell_filter, _sph_harmonic
from scipy.special import sph_harm as scipy_sph_harm
warnings.simplefilter('always') # Always throw warnings
def test_sss_basis():
"""Test that the multipolar moment basis is computed correctly"""
deg = 1
order = 1
polar = np.array([np.pi / 7., np.pi / 9., np.pi / 20.])
azimuth = np.array([np.pi / 11., np.pi * 1.9, np.pi * 1.3])
# Internal calculation: _sph_harmonic(degree, order, azimuth, polar)
sph_harmonic = _sph_harmonic(deg, order, azimuth, polar)
# Check against scipy: sph_harm(order, degree, azimuth, polar)
sph_harmonic_scipy = np.real(scipy_sph_harm(deg, order, azimuth, polar))
assert_array_almost_equal(sph_harmonic, sph_harmonic_scipy)
def test_maxwell_filter():
"""Test Maxwell filter against precomputed test set"""
pass
|
ENH: Add tests for maxwell filtering# Author: Mark Wronkiewicz <wronk@uw.edu>
#
# License: BSD (3-clause)
import os.path as op
import warnings
from nose.tools import assert_true, assert_equal
from numpy.testing import assert_array_almost_equal
import numpy as np
from ...io import Raw
from ..maxwell import _sss_basis, maxwell_filter, _sph_harmonic
from scipy.special import sph_harm as scipy_sph_harm
warnings.simplefilter('always') # Always throw warnings
def test_sss_basis():
"""Test that the multipolar moment basis is computed correctly"""
deg = 1
order = 1
polar = np.array([np.pi / 7., np.pi / 9., np.pi / 20.])
azimuth = np.array([np.pi / 11., np.pi * 1.9, np.pi * 1.3])
# Internal calculation: _sph_harmonic(degree, order, azimuth, polar)
sph_harmonic = _sph_harmonic(deg, order, azimuth, polar)
# Check against scipy: sph_harm(order, degree, azimuth, polar)
sph_harmonic_scipy = np.real(scipy_sph_harm(deg, order, azimuth, polar))
assert_array_almost_equal(sph_harmonic, sph_harmonic_scipy)
def test_maxwell_filter():
"""Test Maxwell filter against precomputed test set"""
pass
|
<commit_before><commit_msg>ENH: Add tests for maxwell filtering<commit_after># Author: Mark Wronkiewicz <wronk@uw.edu>
#
# License: BSD (3-clause)
import os.path as op
import warnings
from nose.tools import assert_true, assert_equal
from numpy.testing import assert_array_almost_equal
import numpy as np
from ...io import Raw
from ..maxwell import _sss_basis, maxwell_filter, _sph_harmonic
from scipy.special import sph_harm as scipy_sph_harm
warnings.simplefilter('always') # Always throw warnings
def test_sss_basis():
"""Test that the multipolar moment basis is computed correctly"""
deg = 1
order = 1
polar = np.array([np.pi / 7., np.pi / 9., np.pi / 20.])
azimuth = np.array([np.pi / 11., np.pi * 1.9, np.pi * 1.3])
# Internal calculation: _sph_harmonic(degree, order, azimuth, polar)
sph_harmonic = _sph_harmonic(deg, order, azimuth, polar)
# Check against scipy: sph_harm(order, degree, azimuth, polar)
sph_harmonic_scipy = np.real(scipy_sph_harm(deg, order, azimuth, polar))
assert_array_almost_equal(sph_harmonic, sph_harmonic_scipy)
def test_maxwell_filter():
"""Test Maxwell filter against precomputed test set"""
pass
|
|
0323189a504f27f14d60c8c3ebdb40ea160d7f79
|
source/clique/collection.py
|
source/clique/collection.py
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import re
class Collection(object):
'''Represent group of items that differ only by numerical component.'''
def __init__(self, head, tail, padding, indexes=None):
'''Initialise collection.
*head* is the leading common part whilst *tail* is the trailing
common part.
*padding* specifies the "width" of the numerical component. An index
will be padded with zeros to fill this width. A *padding* of zero
implies no padding and width may be any size so long as no leading
zeros are present.
*indexes* can specify a set of numerical indexes to initially populate
the collection with.
'''
super(Collection, self).__init__()
self.head = head
self.tail = tail
self.padding = padding
self.indexes = set()
if indexes is not None:
self.indexes.update(indexes)
self._pattern = re.compile('^{0}(?P<index>(?P<padding>0*)\d+?){1}$'
.format(self.head, self.tail))
def __iter__(self):
'''Return iterator over items in collection.'''
def __contains__(self, item):
'''Return whether *item* is present in collection.'''
def match(self, item):
'''Return whether *item* matches this collection pattern.
If a match is successful return data about the match otherwise return
None.
'''
def add(self, item):
'''Add *item* to collection.
raise :py:class:`~clique.error.CollectionError` if *item* cannot be
added to the collection.
'''
def remove(self, item):
'''Remove *item* from collection.
raise :py:class:`~clique.error.CollectionError` if *item* cannot be
removed from the collection.
'''
def format(self, pattern):
'''Return string representation as specified by *pattern*.'''
def is_contiguous(self):
'''Return whether entire collection is contiguous.'''
def holes(self):
'''Return holes in collection.
Return as list of :py:class:`~clique.collection.Collection` instances.
'''
def merge(self, collection):
'''Merge *collection* into this collection.
If the *collection* is compatible with this collection then update
indexes with all indexes in *collection*.
'''
def separate(self):
'''Return contiguous parts of collection as separate collections.
Return as list of :py:class:`~clique.collection.Collection` instances.
'''
|
Add initial interface for Collection class with stubs for methods.
|
Add initial interface for Collection class with stubs for methods.
A Collection will represent a group of items with a common numerical
component.
|
Python
|
apache-2.0
|
4degrees/clique
|
Add initial interface for Collection class with stubs for methods.
A Collection will represent a group of items with a common numerical
component.
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import re
class Collection(object):
'''Represent group of items that differ only by numerical component.'''
def __init__(self, head, tail, padding, indexes=None):
'''Initialise collection.
*head* is the leading common part whilst *tail* is the trailing
common part.
*padding* specifies the "width" of the numerical component. An index
will be padded with zeros to fill this width. A *padding* of zero
implies no padding and width may be any size so long as no leading
zeros are present.
*indexes* can specify a set of numerical indexes to initially populate
the collection with.
'''
super(Collection, self).__init__()
self.head = head
self.tail = tail
self.padding = padding
self.indexes = set()
if indexes is not None:
self.indexes.update(indexes)
self._pattern = re.compile('^{0}(?P<index>(?P<padding>0*)\d+?){1}$'
.format(self.head, self.tail))
def __iter__(self):
'''Return iterator over items in collection.'''
def __contains__(self, item):
'''Return whether *item* is present in collection.'''
def match(self, item):
'''Return whether *item* matches this collection pattern.
If a match is successful return data about the match otherwise return
None.
'''
def add(self, item):
'''Add *item* to collection.
raise :py:class:`~clique.error.CollectionError` if *item* cannot be
added to the collection.
'''
def remove(self, item):
'''Remove *item* from collection.
raise :py:class:`~clique.error.CollectionError` if *item* cannot be
removed from the collection.
'''
def format(self, pattern):
'''Return string representation as specified by *pattern*.'''
def is_contiguous(self):
'''Return whether entire collection is contiguous.'''
def holes(self):
'''Return holes in collection.
Return as list of :py:class:`~clique.collection.Collection` instances.
'''
def merge(self, collection):
'''Merge *collection* into this collection.
If the *collection* is compatible with this collection then update
indexes with all indexes in *collection*.
'''
def separate(self):
'''Return contiguous parts of collection as separate collections.
Return as list of :py:class:`~clique.collection.Collection` instances.
'''
|
<commit_before><commit_msg>Add initial interface for Collection class with stubs for methods.
A Collection will represent a group of items with a common numerical
component.<commit_after>
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import re
class Collection(object):
'''Represent group of items that differ only by numerical component.'''
def __init__(self, head, tail, padding, indexes=None):
'''Initialise collection.
*head* is the leading common part whilst *tail* is the trailing
common part.
*padding* specifies the "width" of the numerical component. An index
will be padded with zeros to fill this width. A *padding* of zero
implies no padding and width may be any size so long as no leading
zeros are present.
*indexes* can specify a set of numerical indexes to initially populate
the collection with.
'''
super(Collection, self).__init__()
self.head = head
self.tail = tail
self.padding = padding
self.indexes = set()
if indexes is not None:
self.indexes.update(indexes)
self._pattern = re.compile('^{0}(?P<index>(?P<padding>0*)\d+?){1}$'
.format(self.head, self.tail))
def __iter__(self):
'''Return iterator over items in collection.'''
def __contains__(self, item):
'''Return whether *item* is present in collection.'''
def match(self, item):
'''Return whether *item* matches this collection pattern.
If a match is successful return data about the match otherwise return
None.
'''
def add(self, item):
'''Add *item* to collection.
raise :py:class:`~clique.error.CollectionError` if *item* cannot be
added to the collection.
'''
def remove(self, item):
'''Remove *item* from collection.
raise :py:class:`~clique.error.CollectionError` if *item* cannot be
removed from the collection.
'''
def format(self, pattern):
'''Return string representation as specified by *pattern*.'''
def is_contiguous(self):
'''Return whether entire collection is contiguous.'''
def holes(self):
'''Return holes in collection.
Return as list of :py:class:`~clique.collection.Collection` instances.
'''
def merge(self, collection):
'''Merge *collection* into this collection.
If the *collection* is compatible with this collection then update
indexes with all indexes in *collection*.
'''
def separate(self):
'''Return contiguous parts of collection as separate collections.
Return as list of :py:class:`~clique.collection.Collection` instances.
'''
|
Add initial interface for Collection class with stubs for methods.
A Collection will represent a group of items with a common numerical
component.# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import re
class Collection(object):
'''Represent group of items that differ only by numerical component.'''
def __init__(self, head, tail, padding, indexes=None):
'''Initialise collection.
*head* is the leading common part whilst *tail* is the trailing
common part.
*padding* specifies the "width" of the numerical component. An index
will be padded with zeros to fill this width. A *padding* of zero
implies no padding and width may be any size so long as no leading
zeros are present.
*indexes* can specify a set of numerical indexes to initially populate
the collection with.
'''
super(Collection, self).__init__()
self.head = head
self.tail = tail
self.padding = padding
self.indexes = set()
if indexes is not None:
self.indexes.update(indexes)
self._pattern = re.compile('^{0}(?P<index>(?P<padding>0*)\d+?){1}$'
.format(self.head, self.tail))
def __iter__(self):
'''Return iterator over items in collection.'''
def __contains__(self, item):
'''Return whether *item* is present in collection.'''
def match(self, item):
'''Return whether *item* matches this collection pattern.
If a match is successful return data about the match otherwise return
None.
'''
def add(self, item):
'''Add *item* to collection.
raise :py:class:`~clique.error.CollectionError` if *item* cannot be
added to the collection.
'''
def remove(self, item):
'''Remove *item* from collection.
raise :py:class:`~clique.error.CollectionError` if *item* cannot be
removed from the collection.
'''
def format(self, pattern):
'''Return string representation as specified by *pattern*.'''
def is_contiguous(self):
'''Return whether entire collection is contiguous.'''
def holes(self):
'''Return holes in collection.
Return as list of :py:class:`~clique.collection.Collection` instances.
'''
def merge(self, collection):
'''Merge *collection* into this collection.
If the *collection* is compatible with this collection then update
indexes with all indexes in *collection*.
'''
def separate(self):
'''Return contiguous parts of collection as separate collections.
Return as list of :py:class:`~clique.collection.Collection` instances.
'''
|
<commit_before><commit_msg>Add initial interface for Collection class with stubs for methods.
A Collection will represent a group of items with a common numerical
component.<commit_after># :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import re
class Collection(object):
'''Represent group of items that differ only by numerical component.'''
def __init__(self, head, tail, padding, indexes=None):
'''Initialise collection.
*head* is the leading common part whilst *tail* is the trailing
common part.
*padding* specifies the "width" of the numerical component. An index
will be padded with zeros to fill this width. A *padding* of zero
implies no padding and width may be any size so long as no leading
zeros are present.
*indexes* can specify a set of numerical indexes to initially populate
the collection with.
'''
super(Collection, self).__init__()
self.head = head
self.tail = tail
self.padding = padding
self.indexes = set()
if indexes is not None:
self.indexes.update(indexes)
self._pattern = re.compile('^{0}(?P<index>(?P<padding>0*)\d+?){1}$'
.format(self.head, self.tail))
def __iter__(self):
'''Return iterator over items in collection.'''
def __contains__(self, item):
'''Return whether *item* is present in collection.'''
def match(self, item):
'''Return whether *item* matches this collection pattern.
If a match is successful return data about the match otherwise return
None.
'''
def add(self, item):
'''Add *item* to collection.
raise :py:class:`~clique.error.CollectionError` if *item* cannot be
added to the collection.
'''
def remove(self, item):
'''Remove *item* from collection.
raise :py:class:`~clique.error.CollectionError` if *item* cannot be
removed from the collection.
'''
def format(self, pattern):
'''Return string representation as specified by *pattern*.'''
def is_contiguous(self):
'''Return whether entire collection is contiguous.'''
def holes(self):
'''Return holes in collection.
Return as list of :py:class:`~clique.collection.Collection` instances.
'''
def merge(self, collection):
'''Merge *collection* into this collection.
If the *collection* is compatible with this collection then update
indexes with all indexes in *collection*.
'''
def separate(self):
'''Return contiguous parts of collection as separate collections.
Return as list of :py:class:`~clique.collection.Collection` instances.
'''
|
|
d79a6877efab4ee4346bc085cb564a4e315e6293
|
scripts/convert_kanjidic2.py
|
scripts/convert_kanjidic2.py
|
#!/usr/bin/env python2
import json
with open("kanjidic2.json", "rb") as f:
raw = f.read()
kanjidic2 = json.loads(raw)
# Makes a dictionary where each key is a kanji, instead of a big list of objects
transformed = {kanjidic2[i]["literal"]: kanjidic2[i]
for i in xrange(len(kanjidic2))}
with open("kanji.json", "wb") as f:
f.write(json.dumps(transformed))
|
Add script for converting kanjidic2
|
Add script for converting kanjidic2
|
Python
|
apache-2.0
|
Tenchi2xh/rem-v2,Tenchi2xh/rem-v2,Tenchi2xh/rem-v2
|
Add script for converting kanjidic2
|
#!/usr/bin/env python2
import json
with open("kanjidic2.json", "rb") as f:
raw = f.read()
kanjidic2 = json.loads(raw)
# Makes a dictionary where each key is a kanji, instead of a big list of objects
transformed = {kanjidic2[i]["literal"]: kanjidic2[i]
for i in xrange(len(kanjidic2))}
with open("kanji.json", "wb") as f:
f.write(json.dumps(transformed))
|
<commit_before><commit_msg>Add script for converting kanjidic2<commit_after>
|
#!/usr/bin/env python2
import json
with open("kanjidic2.json", "rb") as f:
raw = f.read()
kanjidic2 = json.loads(raw)
# Makes a dictionary where each key is a kanji, instead of a big list of objects
transformed = {kanjidic2[i]["literal"]: kanjidic2[i]
for i in xrange(len(kanjidic2))}
with open("kanji.json", "wb") as f:
f.write(json.dumps(transformed))
|
Add script for converting kanjidic2#!/usr/bin/env python2
import json
with open("kanjidic2.json", "rb") as f:
raw = f.read()
kanjidic2 = json.loads(raw)
# Makes a dictionary where each key is a kanji, instead of a big list of objects
transformed = {kanjidic2[i]["literal"]: kanjidic2[i]
for i in xrange(len(kanjidic2))}
with open("kanji.json", "wb") as f:
f.write(json.dumps(transformed))
|
<commit_before><commit_msg>Add script for converting kanjidic2<commit_after>#!/usr/bin/env python2
import json
with open("kanjidic2.json", "rb") as f:
raw = f.read()
kanjidic2 = json.loads(raw)
# Makes a dictionary where each key is a kanji, instead of a big list of objects
transformed = {kanjidic2[i]["literal"]: kanjidic2[i]
for i in xrange(len(kanjidic2))}
with open("kanji.json", "wb") as f:
f.write(json.dumps(transformed))
|
|
ec894ced711d7c4ad6c0e0146792df8b05b998b3
|
cyder/base/eav/tests/test_utils.py
|
cyder/base/eav/tests/test_utils.py
|
from django.core.exceptions import ValidationError
from django.test import TestCase
from cyder.base.eav import utils as u
class TestUtils(TestCase):
def _valid(self, func, value_list):
for value in value_list:
self.assertTrue(func(value))
def _invalid(self, func, value_list):
for value in value_list:
self.assertFalse(func(value))
def test_is_hex_byte(self):
self._valid(u.is_hex_byte, ('01', '23', '45', '67', '89', 'ab', 'cd',
'ef', 'AB', 'CD', 'EF'))
self._invalid(u.is_hex_byte, (
'012', # too many digits
'no', # invalid byte
'0x01', # '0x' not allowed (we already know it's hex)
'-1a', # negative bytes not allowed
))
def test_is_hex_byte_sequence(self):
self._valid(u.is_hex_byte_sequence, ('01', '01:23:45:67:89:ab:cd:ef'))
self._invalid(u.is_hex_byte_sequence, (
'012:34', # invalid byte
'01::23', # too many consecutive colons
'01:', # trailing colon
':01', # leading colon
'yes:no', # invalid bytes
))
|
Add tests for is_hex_byte and is_hex_byte_sequence
|
Add tests for is_hex_byte and is_hex_byte_sequence
|
Python
|
bsd-3-clause
|
OSU-Net/cyder,murrown/cyder,murrown/cyder,akeym/cyder,drkitty/cyder,akeym/cyder,drkitty/cyder,zeeman/cyder,OSU-Net/cyder,akeym/cyder,drkitty/cyder,OSU-Net/cyder,murrown/cyder,drkitty/cyder,OSU-Net/cyder,zeeman/cyder,zeeman/cyder,akeym/cyder,murrown/cyder,zeeman/cyder
|
Add tests for is_hex_byte and is_hex_byte_sequence
|
from django.core.exceptions import ValidationError
from django.test import TestCase
from cyder.base.eav import utils as u
class TestUtils(TestCase):
def _valid(self, func, value_list):
for value in value_list:
self.assertTrue(func(value))
def _invalid(self, func, value_list):
for value in value_list:
self.assertFalse(func(value))
def test_is_hex_byte(self):
self._valid(u.is_hex_byte, ('01', '23', '45', '67', '89', 'ab', 'cd',
'ef', 'AB', 'CD', 'EF'))
self._invalid(u.is_hex_byte, (
'012', # too many digits
'no', # invalid byte
'0x01', # '0x' not allowed (we already know it's hex)
'-1a', # negative bytes not allowed
))
def test_is_hex_byte_sequence(self):
self._valid(u.is_hex_byte_sequence, ('01', '01:23:45:67:89:ab:cd:ef'))
self._invalid(u.is_hex_byte_sequence, (
'012:34', # invalid byte
'01::23', # too many consecutive colons
'01:', # trailing colon
':01', # leading colon
'yes:no', # invalid bytes
))
|
<commit_before><commit_msg>Add tests for is_hex_byte and is_hex_byte_sequence<commit_after>
|
from django.core.exceptions import ValidationError
from django.test import TestCase
from cyder.base.eav import utils as u
class TestUtils(TestCase):
def _valid(self, func, value_list):
for value in value_list:
self.assertTrue(func(value))
def _invalid(self, func, value_list):
for value in value_list:
self.assertFalse(func(value))
def test_is_hex_byte(self):
self._valid(u.is_hex_byte, ('01', '23', '45', '67', '89', 'ab', 'cd',
'ef', 'AB', 'CD', 'EF'))
self._invalid(u.is_hex_byte, (
'012', # too many digits
'no', # invalid byte
'0x01', # '0x' not allowed (we already know it's hex)
'-1a', # negative bytes not allowed
))
def test_is_hex_byte_sequence(self):
self._valid(u.is_hex_byte_sequence, ('01', '01:23:45:67:89:ab:cd:ef'))
self._invalid(u.is_hex_byte_sequence, (
'012:34', # invalid byte
'01::23', # too many consecutive colons
'01:', # trailing colon
':01', # leading colon
'yes:no', # invalid bytes
))
|
Add tests for is_hex_byte and is_hex_byte_sequencefrom django.core.exceptions import ValidationError
from django.test import TestCase
from cyder.base.eav import utils as u
class TestUtils(TestCase):
def _valid(self, func, value_list):
for value in value_list:
self.assertTrue(func(value))
def _invalid(self, func, value_list):
for value in value_list:
self.assertFalse(func(value))
def test_is_hex_byte(self):
self._valid(u.is_hex_byte, ('01', '23', '45', '67', '89', 'ab', 'cd',
'ef', 'AB', 'CD', 'EF'))
self._invalid(u.is_hex_byte, (
'012', # too many digits
'no', # invalid byte
'0x01', # '0x' not allowed (we already know it's hex)
'-1a', # negative bytes not allowed
))
def test_is_hex_byte_sequence(self):
self._valid(u.is_hex_byte_sequence, ('01', '01:23:45:67:89:ab:cd:ef'))
self._invalid(u.is_hex_byte_sequence, (
'012:34', # invalid byte
'01::23', # too many consecutive colons
'01:', # trailing colon
':01', # leading colon
'yes:no', # invalid bytes
))
|
<commit_before><commit_msg>Add tests for is_hex_byte and is_hex_byte_sequence<commit_after>from django.core.exceptions import ValidationError
from django.test import TestCase
from cyder.base.eav import utils as u
class TestUtils(TestCase):
def _valid(self, func, value_list):
for value in value_list:
self.assertTrue(func(value))
def _invalid(self, func, value_list):
for value in value_list:
self.assertFalse(func(value))
def test_is_hex_byte(self):
self._valid(u.is_hex_byte, ('01', '23', '45', '67', '89', 'ab', 'cd',
'ef', 'AB', 'CD', 'EF'))
self._invalid(u.is_hex_byte, (
'012', # too many digits
'no', # invalid byte
'0x01', # '0x' not allowed (we already know it's hex)
'-1a', # negative bytes not allowed
))
def test_is_hex_byte_sequence(self):
self._valid(u.is_hex_byte_sequence, ('01', '01:23:45:67:89:ab:cd:ef'))
self._invalid(u.is_hex_byte_sequence, (
'012:34', # invalid byte
'01::23', # too many consecutive colons
'01:', # trailing colon
':01', # leading colon
'yes:no', # invalid bytes
))
|
|
af75ac1c52026e704f586dbd2dd709bb43a9184e
|
extruder/extrude_template.py
|
extruder/extrude_template.py
|
from __future__ import print_function, division, absolute_import
from argparse import ArgumentParser
def main(args=None):
if args is None:
parser = ArgumentParser('Tool for generating skeleton package-'
'building directory.')
parser.add_argument('--appveyor-secret', default='Fill me in',
help="Appveyor secret containing BINSTAR_TOKEN")
parser.add_argument('--travis-secret', default='Fill me in',
help="Travis-CI secret containing BINSTAR_TOKEN")
args = parser.parse_args()
if __name__ == '__main__':
main()
|
Add stub of script to generate a skeleton build directory
|
Add stub of script to generate a skeleton build directory
|
Python
|
bsd-3-clause
|
astropy/conda-build-tools,astropy/conda-build-tools
|
Add stub of script to generate a skeleton build directory
|
from __future__ import print_function, division, absolute_import
from argparse import ArgumentParser
def main(args=None):
if args is None:
parser = ArgumentParser('Tool for generating skeleton package-'
'building directory.')
parser.add_argument('--appveyor-secret', default='Fill me in',
help="Appveyor secret containing BINSTAR_TOKEN")
parser.add_argument('--travis-secret', default='Fill me in',
help="Travis-CI secret containing BINSTAR_TOKEN")
args = parser.parse_args()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add stub of script to generate a skeleton build directory<commit_after>
|
from __future__ import print_function, division, absolute_import
from argparse import ArgumentParser
def main(args=None):
if args is None:
parser = ArgumentParser('Tool for generating skeleton package-'
'building directory.')
parser.add_argument('--appveyor-secret', default='Fill me in',
help="Appveyor secret containing BINSTAR_TOKEN")
parser.add_argument('--travis-secret', default='Fill me in',
help="Travis-CI secret containing BINSTAR_TOKEN")
args = parser.parse_args()
if __name__ == '__main__':
main()
|
Add stub of script to generate a skeleton build directoryfrom __future__ import print_function, division, absolute_import
from argparse import ArgumentParser
def main(args=None):
if args is None:
parser = ArgumentParser('Tool for generating skeleton package-'
'building directory.')
parser.add_argument('--appveyor-secret', default='Fill me in',
help="Appveyor secret containing BINSTAR_TOKEN")
parser.add_argument('--travis-secret', default='Fill me in',
help="Travis-CI secret containing BINSTAR_TOKEN")
args = parser.parse_args()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add stub of script to generate a skeleton build directory<commit_after>from __future__ import print_function, division, absolute_import
from argparse import ArgumentParser
def main(args=None):
if args is None:
parser = ArgumentParser('Tool for generating skeleton package-'
'building directory.')
parser.add_argument('--appveyor-secret', default='Fill me in',
help="Appveyor secret containing BINSTAR_TOKEN")
parser.add_argument('--travis-secret', default='Fill me in',
help="Travis-CI secret containing BINSTAR_TOKEN")
args = parser.parse_args()
if __name__ == '__main__':
main()
|
|
256829c6ab4b04ce1b576d26597f4630e050309c
|
src/ggrc_gdrive_integration/migrations/versions/20170311201102_5405cc1ae721_fix_audit_context_on_folders.py
|
src/ggrc_gdrive_integration/migrations/versions/20170311201102_5405cc1ae721_fix_audit_context_on_folders.py
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Fix audit context on folders
Create Date: 2017-03-11 20:11:02.652252
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = '5405cc1ae721'
down_revision = '395186a2d8'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
# Clear object_folder
sql = """
UPDATE object_folders AS of
JOIN audits AS au ON
of.folderable_id = au.id AND
of.folderable_type = "Audit"
SET of.context_id = au.context_id
"""
op.execute(sql)
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
pass
|
Fix audit context for object_folders
|
Fix audit context for object_folders
|
Python
|
apache-2.0
|
AleksNeStu/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,plamut/ggrc-core
|
Fix audit context for object_folders
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Fix audit context on folders
Create Date: 2017-03-11 20:11:02.652252
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = '5405cc1ae721'
down_revision = '395186a2d8'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
# Clear object_folder
sql = """
UPDATE object_folders AS of
JOIN audits AS au ON
of.folderable_id = au.id AND
of.folderable_type = "Audit"
SET of.context_id = au.context_id
"""
op.execute(sql)
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
pass
|
<commit_before><commit_msg>Fix audit context for object_folders<commit_after>
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Fix audit context on folders
Create Date: 2017-03-11 20:11:02.652252
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = '5405cc1ae721'
down_revision = '395186a2d8'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
# Clear object_folder
sql = """
UPDATE object_folders AS of
JOIN audits AS au ON
of.folderable_id = au.id AND
of.folderable_type = "Audit"
SET of.context_id = au.context_id
"""
op.execute(sql)
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
pass
|
Fix audit context for object_folders# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Fix audit context on folders
Create Date: 2017-03-11 20:11:02.652252
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = '5405cc1ae721'
down_revision = '395186a2d8'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
# Clear object_folder
sql = """
UPDATE object_folders AS of
JOIN audits AS au ON
of.folderable_id = au.id AND
of.folderable_type = "Audit"
SET of.context_id = au.context_id
"""
op.execute(sql)
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
pass
|
<commit_before><commit_msg>Fix audit context for object_folders<commit_after># Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Fix audit context on folders
Create Date: 2017-03-11 20:11:02.652252
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = '5405cc1ae721'
down_revision = '395186a2d8'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
# Clear object_folder
sql = """
UPDATE object_folders AS of
JOIN audits AS au ON
of.folderable_id = au.id AND
of.folderable_type = "Audit"
SET of.context_id = au.context_id
"""
op.execute(sql)
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
pass
|
|
1909e42563dff9b711795ac4da2a954dd53737cd
|
tempest/api/identity/admin/v3/test_users_negative.py
|
tempest/api/identity/admin/v3/test_users_negative.py
|
# Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib import exceptions as lib_exc
from tempest.api.identity import base
from tempest.common.utils import data_utils
from tempest import test
class UsersNegativeTest(base.BaseIdentityV3AdminTest):
@test.attr(type=['negative'])
@test.idempotent_id('e75f006c-89cc-477b-874d-588e4eab4b17')
def test_create_user_for_non_existent_domain(self):
# Attempt to create a user in a non-existent domain should fail
u_name = data_utils.rand_name('user')
u_email = u_name + '@testmail.tm'
u_password = data_utils.rand_name('pass')
self.assertRaises(lib_exc.NotFound, self.client.create_user,
u_name, u_password,
email=u_email,
domain_id=data_utils.rand_uuid_hex())
|
Add keystone v3 user negative cases
|
Add keystone v3 user negative cases
Implement the keystone v3 user negative case:
test_create_user_for_non_existent_domain
Change-Id: I644cfb0bea4abe2932a759ff86f446043170488d
Partial-Bug: 1513748
|
Python
|
apache-2.0
|
LIS/lis-tempest,Juniper/tempest,sebrandon1/tempest,openstack/tempest,masayukig/tempest,Tesora/tesora-tempest,zsoltdudas/lis-tempest,bigswitch/tempest,masayukig/tempest,vedujoshi/tempest,LIS/lis-tempest,sebrandon1/tempest,Juniper/tempest,bigswitch/tempest,Tesora/tesora-tempest,cisco-openstack/tempest,openstack/tempest,cisco-openstack/tempest,zsoltdudas/lis-tempest,vedujoshi/tempest
|
Add keystone v3 user negative cases
Implement the keystone v3 user negative case:
test_create_user_for_non_existent_domain
Change-Id: I644cfb0bea4abe2932a759ff86f446043170488d
Partial-Bug: 1513748
|
# Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib import exceptions as lib_exc
from tempest.api.identity import base
from tempest.common.utils import data_utils
from tempest import test
class UsersNegativeTest(base.BaseIdentityV3AdminTest):
@test.attr(type=['negative'])
@test.idempotent_id('e75f006c-89cc-477b-874d-588e4eab4b17')
def test_create_user_for_non_existent_domain(self):
# Attempt to create a user in a non-existent domain should fail
u_name = data_utils.rand_name('user')
u_email = u_name + '@testmail.tm'
u_password = data_utils.rand_name('pass')
self.assertRaises(lib_exc.NotFound, self.client.create_user,
u_name, u_password,
email=u_email,
domain_id=data_utils.rand_uuid_hex())
|
<commit_before><commit_msg>Add keystone v3 user negative cases
Implement the keystone v3 user negative case:
test_create_user_for_non_existent_domain
Change-Id: I644cfb0bea4abe2932a759ff86f446043170488d
Partial-Bug: 1513748<commit_after>
|
# Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib import exceptions as lib_exc
from tempest.api.identity import base
from tempest.common.utils import data_utils
from tempest import test
class UsersNegativeTest(base.BaseIdentityV3AdminTest):
@test.attr(type=['negative'])
@test.idempotent_id('e75f006c-89cc-477b-874d-588e4eab4b17')
def test_create_user_for_non_existent_domain(self):
# Attempt to create a user in a non-existent domain should fail
u_name = data_utils.rand_name('user')
u_email = u_name + '@testmail.tm'
u_password = data_utils.rand_name('pass')
self.assertRaises(lib_exc.NotFound, self.client.create_user,
u_name, u_password,
email=u_email,
domain_id=data_utils.rand_uuid_hex())
|
Add keystone v3 user negative cases
Implement the keystone v3 user negative case:
test_create_user_for_non_existent_domain
Change-Id: I644cfb0bea4abe2932a759ff86f446043170488d
Partial-Bug: 1513748# Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib import exceptions as lib_exc
from tempest.api.identity import base
from tempest.common.utils import data_utils
from tempest import test
class UsersNegativeTest(base.BaseIdentityV3AdminTest):
@test.attr(type=['negative'])
@test.idempotent_id('e75f006c-89cc-477b-874d-588e4eab4b17')
def test_create_user_for_non_existent_domain(self):
# Attempt to create a user in a non-existent domain should fail
u_name = data_utils.rand_name('user')
u_email = u_name + '@testmail.tm'
u_password = data_utils.rand_name('pass')
self.assertRaises(lib_exc.NotFound, self.client.create_user,
u_name, u_password,
email=u_email,
domain_id=data_utils.rand_uuid_hex())
|
<commit_before><commit_msg>Add keystone v3 user negative cases
Implement the keystone v3 user negative case:
test_create_user_for_non_existent_domain
Change-Id: I644cfb0bea4abe2932a759ff86f446043170488d
Partial-Bug: 1513748<commit_after># Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib import exceptions as lib_exc
from tempest.api.identity import base
from tempest.common.utils import data_utils
from tempest import test
class UsersNegativeTest(base.BaseIdentityV3AdminTest):
@test.attr(type=['negative'])
@test.idempotent_id('e75f006c-89cc-477b-874d-588e4eab4b17')
def test_create_user_for_non_existent_domain(self):
# Attempt to create a user in a non-existent domain should fail
u_name = data_utils.rand_name('user')
u_email = u_name + '@testmail.tm'
u_password = data_utils.rand_name('pass')
self.assertRaises(lib_exc.NotFound, self.client.create_user,
u_name, u_password,
email=u_email,
domain_id=data_utils.rand_uuid_hex())
|
|
4f88f23104c2f15e2f0919931033ce9047a68036
|
src/diamond/handler/Handler.py
|
src/diamond/handler/Handler.py
|
# coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
|
# coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Lock()
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
if self.lock.locked():
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
|
Change to using threading.Lock instead of threading.Condition, and check lock state before trying to release it during the finally block
|
Change to using threading.Lock instead of threading.Condition, and check lock state before trying to release it during the finally block
|
Python
|
mit
|
zoidbergwill/Diamond,Netuitive/Diamond,EzyInsights/Diamond,mfriedenhagen/Diamond,codepython/Diamond,thardie/Diamond,acquia/Diamond,cannium/Diamond,timchenxiaoyu/Diamond,Clever/Diamond,janisz/Diamond-1,datafiniti/Diamond,tuenti/Diamond,krbaker/Diamond,Ensighten/Diamond,mfriedenhagen/Diamond,tusharmakkar08/Diamond,sebbrandt87/Diamond,eMerzh/Diamond-1,zoidbergwill/Diamond,ceph/Diamond,EzyInsights/Diamond,socialwareinc/Diamond,Precis/Diamond,mzupan/Diamond,TinLe/Diamond,MediaMath/Diamond,anandbhoraskar/Diamond,eMerzh/Diamond-1,Netuitive/Diamond,tuenti/Diamond,szibis/Diamond,hamelg/Diamond,Slach/Diamond,datafiniti/Diamond,zoidbergwill/Diamond,Precis/Diamond,tellapart/Diamond,Ssawa/Diamond,Slach/Diamond,mfriedenhagen/Diamond,python-diamond/Diamond,TAKEALOT/Diamond,Nihn/Diamond-1,janisz/Diamond-1,rtoma/Diamond,jaingaurav/Diamond,CYBERBUGJR/Diamond,python-diamond/Diamond,jriguera/Diamond,timchenxiaoyu/Diamond,ramjothikumar/Diamond,timchenxiaoyu/Diamond,mzupan/Diamond,joel-airspring/Diamond,codepython/Diamond,Ormod/Diamond,CYBERBUGJR/Diamond,metamx/Diamond,Basis/Diamond,skbkontur/Diamond,hamelg/Diamond,actmd/Diamond,ceph/Diamond,works-mobile/Diamond,skbkontur/Diamond,Ormod/Diamond,actmd/Diamond,mzupan/Diamond,Clever/Diamond,dcsquared13/Diamond,saucelabs/Diamond,hvnsweeting/Diamond,codepython/Diamond,saucelabs/Diamond,stuartbfox/Diamond,szibis/Diamond,gg7/diamond,tellapart/Diamond,russss/Diamond,hamelg/Diamond,signalfx/Diamond,tusharmakkar08/Diamond,Nihn/Diamond-1,dcsquared13/Diamond,CYBERBUGJR/Diamond,szibis/Diamond,MichaelDoyle/Diamond,disqus/Diamond,acquia/Diamond,works-mobile/Diamond,Netuitive/netuitive-diamond,rtoma/Diamond,eMerzh/Diamond-1,szibis/Diamond,TinLe/Diamond,Slach/Diamond,saucelabs/Diamond,Clever/Diamond,zoidbergwill/Diamond,jaingaurav/Diamond,Ensighten/Diamond,skbkontur/Diamond,Nihn/Diamond-1,jriguera/Diamond,dcsquared13/Diamond,dcsquared13/Diamond,russss/Diamond,TinLe/Diamond,Netuitive/netuitive-diamond,datafiniti/Diamond,signalfx/Diamond,thardie/Diamond,stuartbfox/Diamond,jumping/Diamond,cannium/Diamond,Ormod/Diamond,MediaMath/Diamond,works-mobile/Diamond,gg7/diamond,tuenti/Diamond,Precis/Diamond,saucelabs/Diamond,MichaelDoyle/Diamond,Ensighten/Diamond,tuenti/Diamond,Slach/Diamond,mzupan/Diamond,Netuitive/Diamond,thardie/Diamond,datafiniti/Diamond,codepython/Diamond,jumping/Diamond,mfriedenhagen/Diamond,MediaMath/Diamond,ramjothikumar/Diamond,joel-airspring/Diamond,disqus/Diamond,disqus/Diamond,bmhatfield/Diamond,jumping/Diamond,works-mobile/Diamond,krbaker/Diamond,metamx/Diamond,ramjothikumar/Diamond,TAKEALOT/Diamond,bmhatfield/Diamond,gg7/diamond,signalfx/Diamond,anandbhoraskar/Diamond,jaingaurav/Diamond,bmhatfield/Diamond,socialwareinc/Diamond,tellapart/Diamond,jriguera/Diamond,hvnsweeting/Diamond,tusharmakkar08/Diamond,tusharmakkar08/Diamond,ceph/Diamond,krbaker/Diamond,Ensighten/Diamond,janisz/Diamond-1,hvnsweeting/Diamond,krbaker/Diamond,eMerzh/Diamond-1,CYBERBUGJR/Diamond,socialwareinc/Diamond,signalfx/Diamond,stuartbfox/Diamond,tellapart/Diamond,Precis/Diamond,joel-airspring/Diamond,TinLe/Diamond,Ssawa/Diamond,hvnsweeting/Diamond,h00dy/Diamond,thardie/Diamond,acquia/Diamond,Netuitive/netuitive-diamond,sebbrandt87/Diamond,MichaelDoyle/Diamond,EzyInsights/Diamond,sebbrandt87/Diamond,Ormod/Diamond,h00dy/Diamond,jaingaurav/Diamond,Clever/Diamond,sebbrandt87/Diamond,Basis/Diamond,ramjothikumar/Diamond,actmd/Diamond,ceph/Diamond,Netuitive/netuitive-diamond,cannium/Diamond,cannium/Diamond,janisz/Diamond-1,jriguera/Diamond,hamelg/Diamond,anandbhoraskar/Diamond,h00dy/Diamond,skbkontur/Diamond,python-diamond/Diamond,Ssawa/Diamond,EzyInsights/Diamond,socialwareinc/Diamond,Nihn/Diamond-1,bmhatfield/Diamond,joel-airspring/Diamond,rtoma/Diamond,russss/Diamond,Basis/Diamond,stuartbfox/Diamond,Netuitive/Diamond,rtoma/Diamond,MichaelDoyle/Diamond,Basis/Diamond,anandbhoraskar/Diamond,TAKEALOT/Diamond,russss/Diamond,h00dy/Diamond,Ssawa/Diamond,actmd/Diamond,jumping/Diamond,acquia/Diamond,MediaMath/Diamond,timchenxiaoyu/Diamond,TAKEALOT/Diamond,gg7/diamond,metamx/Diamond
|
# coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
Change to using threading.Lock instead of threading.Condition, and check lock state before trying to release it during the finally block
|
# coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Lock()
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
if self.lock.locked():
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
|
<commit_before># coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
<commit_msg>Change to using threading.Lock instead of threading.Condition, and check lock state before trying to release it during the finally block<commit_after>
|
# coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Lock()
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
if self.lock.locked():
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
|
# coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
Change to using threading.Lock instead of threading.Condition, and check lock state before trying to release it during the finally block# coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Lock()
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
if self.lock.locked():
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
|
<commit_before># coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Condition(threading.Lock())
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
<commit_msg>Change to using threading.Lock instead of threading.Condition, and check lock state before trying to release it during the finally block<commit_after># coding=utf-8
import logging
import threading
import traceback
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None):
"""
Create a new instance of the Handler class
"""
# Initialize Log
self.log = logging.getLogger('diamond')
# Initialize Data
self.config = config
# Initialize Lock
self.lock = threading.Lock()
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
try:
try:
self.log.debug("Running Handler %s locked" % (self))
self.lock.acquire()
self.process(metric)
self.lock.release()
except Exception:
self.log.error(traceback.format_exc())
finally:
if self.lock.locked():
self.lock.release()
self.log.debug("Unlocked Handler %s" % (self))
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
|
a2841c9e5c1f69f14c3870381def382cc6d0ddf9
|
mrbelvedereci/repository/migrations/0002_auto_20171028_1509.py
|
mrbelvedereci/repository/migrations/0002_auto_20171028_1509.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-10-28 15:09
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('repository', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='branch',
options={'ordering': ['repo__name', 'repo__owner', 'name'], 'verbose_name_plural': 'branches'},
),
migrations.AlterModelOptions(
name='repository',
options={'ordering': ['name', 'owner'], 'verbose_name_plural': 'repositories'},
),
]
|
Add migration for Meta changes on Branch and Repository
|
Add migration for Meta changes on Branch and Repository
|
Python
|
bsd-3-clause
|
SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci
|
Add migration for Meta changes on Branch and Repository
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-10-28 15:09
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('repository', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='branch',
options={'ordering': ['repo__name', 'repo__owner', 'name'], 'verbose_name_plural': 'branches'},
),
migrations.AlterModelOptions(
name='repository',
options={'ordering': ['name', 'owner'], 'verbose_name_plural': 'repositories'},
),
]
|
<commit_before><commit_msg>Add migration for Meta changes on Branch and Repository<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-10-28 15:09
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('repository', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='branch',
options={'ordering': ['repo__name', 'repo__owner', 'name'], 'verbose_name_plural': 'branches'},
),
migrations.AlterModelOptions(
name='repository',
options={'ordering': ['name', 'owner'], 'verbose_name_plural': 'repositories'},
),
]
|
Add migration for Meta changes on Branch and Repository# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-10-28 15:09
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('repository', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='branch',
options={'ordering': ['repo__name', 'repo__owner', 'name'], 'verbose_name_plural': 'branches'},
),
migrations.AlterModelOptions(
name='repository',
options={'ordering': ['name', 'owner'], 'verbose_name_plural': 'repositories'},
),
]
|
<commit_before><commit_msg>Add migration for Meta changes on Branch and Repository<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-10-28 15:09
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('repository', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='branch',
options={'ordering': ['repo__name', 'repo__owner', 'name'], 'verbose_name_plural': 'branches'},
),
migrations.AlterModelOptions(
name='repository',
options={'ordering': ['name', 'owner'], 'verbose_name_plural': 'repositories'},
),
]
|
|
f2820ad7022c6db78186d04d11a1ded1f02fa7e6
|
landlab/components/stream_power/examples/test_voronoi_sp.py
|
landlab/components/stream_power/examples/test_voronoi_sp.py
|
from landlab import VoronoiDelaunayGrid # , RasterModelGrid
from landlab.components.flow_routing.route_flow_dn import FlowRouter
from landlab.components.stream_power.stream_power import StreamPowerEroder
import numpy as np
x, y = np.random.rand(50), np.random.rand(50)
mg = VoronoiDelaunayGrid(x,y)
#mg = RasterModelGrid(4,5)
mg.add_field('node', 'topographic__elevation', mg.node_x, copy=True)
fr = FlowRouter(mg)
spe = StreamPowerEroder(mg, 'drive_sp_params.txt')
for i in xrange(100):
fr.route_flow()
spe.erode(1.)
|
Test driver for Voronoi stream power
|
Test driver for Voronoi stream power
What it says on the tin. Very simple.
|
Python
|
mit
|
RondaStrauch/landlab,laijingtao/landlab,Carralex/landlab,cmshobe/landlab,Carralex/landlab,amandersillinois/landlab,ManuSchmi88/landlab,cmshobe/landlab,ManuSchmi88/landlab,SiccarPoint/landlab,SiccarPoint/landlab,landlab/landlab,landlab/landlab,Carralex/landlab,RondaStrauch/landlab,ManuSchmi88/landlab,csherwood-usgs/landlab,csherwood-usgs/landlab,RondaStrauch/landlab,laijingtao/landlab,cmshobe/landlab,landlab/landlab,amandersillinois/landlab
|
Test driver for Voronoi stream power
What it says on the tin. Very simple.
|
from landlab import VoronoiDelaunayGrid # , RasterModelGrid
from landlab.components.flow_routing.route_flow_dn import FlowRouter
from landlab.components.stream_power.stream_power import StreamPowerEroder
import numpy as np
x, y = np.random.rand(50), np.random.rand(50)
mg = VoronoiDelaunayGrid(x,y)
#mg = RasterModelGrid(4,5)
mg.add_field('node', 'topographic__elevation', mg.node_x, copy=True)
fr = FlowRouter(mg)
spe = StreamPowerEroder(mg, 'drive_sp_params.txt')
for i in xrange(100):
fr.route_flow()
spe.erode(1.)
|
<commit_before><commit_msg>Test driver for Voronoi stream power
What it says on the tin. Very simple.<commit_after>
|
from landlab import VoronoiDelaunayGrid # , RasterModelGrid
from landlab.components.flow_routing.route_flow_dn import FlowRouter
from landlab.components.stream_power.stream_power import StreamPowerEroder
import numpy as np
x, y = np.random.rand(50), np.random.rand(50)
mg = VoronoiDelaunayGrid(x,y)
#mg = RasterModelGrid(4,5)
mg.add_field('node', 'topographic__elevation', mg.node_x, copy=True)
fr = FlowRouter(mg)
spe = StreamPowerEroder(mg, 'drive_sp_params.txt')
for i in xrange(100):
fr.route_flow()
spe.erode(1.)
|
Test driver for Voronoi stream power
What it says on the tin. Very simple.from landlab import VoronoiDelaunayGrid # , RasterModelGrid
from landlab.components.flow_routing.route_flow_dn import FlowRouter
from landlab.components.stream_power.stream_power import StreamPowerEroder
import numpy as np
x, y = np.random.rand(50), np.random.rand(50)
mg = VoronoiDelaunayGrid(x,y)
#mg = RasterModelGrid(4,5)
mg.add_field('node', 'topographic__elevation', mg.node_x, copy=True)
fr = FlowRouter(mg)
spe = StreamPowerEroder(mg, 'drive_sp_params.txt')
for i in xrange(100):
fr.route_flow()
spe.erode(1.)
|
<commit_before><commit_msg>Test driver for Voronoi stream power
What it says on the tin. Very simple.<commit_after>from landlab import VoronoiDelaunayGrid # , RasterModelGrid
from landlab.components.flow_routing.route_flow_dn import FlowRouter
from landlab.components.stream_power.stream_power import StreamPowerEroder
import numpy as np
x, y = np.random.rand(50), np.random.rand(50)
mg = VoronoiDelaunayGrid(x,y)
#mg = RasterModelGrid(4,5)
mg.add_field('node', 'topographic__elevation', mg.node_x, copy=True)
fr = FlowRouter(mg)
spe = StreamPowerEroder(mg, 'drive_sp_params.txt')
for i in xrange(100):
fr.route_flow()
spe.erode(1.)
|
|
deb281740b10855f1d419445b29d07e41619da49
|
src/inv_kinematics.py
|
src/inv_kinematics.py
|
#!/usr/bin/env python3
import math
def get_angles(x, y, z):
"""Return joint angles based on passed position."""
# Values below are given in milimeters
b = 35 # Shoulder width
l1 = 120 # Shoulder to elbow length
l2 = 100 # elbow to hand length
# Shoulder to hand length
d = math.sqrt(x ** 2 + y ** 2)
# Hand from shoulder offset on z axis
z_ = z - b
# Helper
cosineTheta2 = (d ** 2 + z_ ** 2 - l1 ** 2 - l2 ** 2) / 2 / l1 / l2
# Shoulder rotation
theta0 = math.atan2(y, x)
# Lift of the arm in the shoulder
theta2 = math.atan2(math.sqrt(1 - (cosineTheta2) ** 2), cosineTheta2)
# Elbow rotation
theta1 = math.atan2(z_, d) - \
math.atan2(l2 * math.sin(theta2), l1 + l2 * math.cos(theta2))
# Round values to specific angles
result = []
for value in [theta0, theta1, theta2]:
# Convert from radians to degrees
value = round(math.degrees(value))
# If negative value then angle is in 3rd and 4th quandrant, transform
# the value to apsolute angle value
if value < 0:
value = 360 + value
result.append(value)
return result
def main():
from server import main
main()
if __name__ == '__main__':
main()
|
Add function to solve inverse kinematics task
|
Add function to solve inverse kinematics task
|
Python
|
mit
|
saleone/bachelor-thesis
|
Add function to solve inverse kinematics task
|
#!/usr/bin/env python3
import math
def get_angles(x, y, z):
"""Return joint angles based on passed position."""
# Values below are given in milimeters
b = 35 # Shoulder width
l1 = 120 # Shoulder to elbow length
l2 = 100 # elbow to hand length
# Shoulder to hand length
d = math.sqrt(x ** 2 + y ** 2)
# Hand from shoulder offset on z axis
z_ = z - b
# Helper
cosineTheta2 = (d ** 2 + z_ ** 2 - l1 ** 2 - l2 ** 2) / 2 / l1 / l2
# Shoulder rotation
theta0 = math.atan2(y, x)
# Lift of the arm in the shoulder
theta2 = math.atan2(math.sqrt(1 - (cosineTheta2) ** 2), cosineTheta2)
# Elbow rotation
theta1 = math.atan2(z_, d) - \
math.atan2(l2 * math.sin(theta2), l1 + l2 * math.cos(theta2))
# Round values to specific angles
result = []
for value in [theta0, theta1, theta2]:
# Convert from radians to degrees
value = round(math.degrees(value))
# If negative value then angle is in 3rd and 4th quandrant, transform
# the value to apsolute angle value
if value < 0:
value = 360 + value
result.append(value)
return result
def main():
from server import main
main()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add function to solve inverse kinematics task<commit_after>
|
#!/usr/bin/env python3
import math
def get_angles(x, y, z):
"""Return joint angles based on passed position."""
# Values below are given in milimeters
b = 35 # Shoulder width
l1 = 120 # Shoulder to elbow length
l2 = 100 # elbow to hand length
# Shoulder to hand length
d = math.sqrt(x ** 2 + y ** 2)
# Hand from shoulder offset on z axis
z_ = z - b
# Helper
cosineTheta2 = (d ** 2 + z_ ** 2 - l1 ** 2 - l2 ** 2) / 2 / l1 / l2
# Shoulder rotation
theta0 = math.atan2(y, x)
# Lift of the arm in the shoulder
theta2 = math.atan2(math.sqrt(1 - (cosineTheta2) ** 2), cosineTheta2)
# Elbow rotation
theta1 = math.atan2(z_, d) - \
math.atan2(l2 * math.sin(theta2), l1 + l2 * math.cos(theta2))
# Round values to specific angles
result = []
for value in [theta0, theta1, theta2]:
# Convert from radians to degrees
value = round(math.degrees(value))
# If negative value then angle is in 3rd and 4th quandrant, transform
# the value to apsolute angle value
if value < 0:
value = 360 + value
result.append(value)
return result
def main():
from server import main
main()
if __name__ == '__main__':
main()
|
Add function to solve inverse kinematics task#!/usr/bin/env python3
import math
def get_angles(x, y, z):
"""Return joint angles based on passed position."""
# Values below are given in milimeters
b = 35 # Shoulder width
l1 = 120 # Shoulder to elbow length
l2 = 100 # elbow to hand length
# Shoulder to hand length
d = math.sqrt(x ** 2 + y ** 2)
# Hand from shoulder offset on z axis
z_ = z - b
# Helper
cosineTheta2 = (d ** 2 + z_ ** 2 - l1 ** 2 - l2 ** 2) / 2 / l1 / l2
# Shoulder rotation
theta0 = math.atan2(y, x)
# Lift of the arm in the shoulder
theta2 = math.atan2(math.sqrt(1 - (cosineTheta2) ** 2), cosineTheta2)
# Elbow rotation
theta1 = math.atan2(z_, d) - \
math.atan2(l2 * math.sin(theta2), l1 + l2 * math.cos(theta2))
# Round values to specific angles
result = []
for value in [theta0, theta1, theta2]:
# Convert from radians to degrees
value = round(math.degrees(value))
# If negative value then angle is in 3rd and 4th quandrant, transform
# the value to apsolute angle value
if value < 0:
value = 360 + value
result.append(value)
return result
def main():
from server import main
main()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add function to solve inverse kinematics task<commit_after>#!/usr/bin/env python3
import math
def get_angles(x, y, z):
"""Return joint angles based on passed position."""
# Values below are given in milimeters
b = 35 # Shoulder width
l1 = 120 # Shoulder to elbow length
l2 = 100 # elbow to hand length
# Shoulder to hand length
d = math.sqrt(x ** 2 + y ** 2)
# Hand from shoulder offset on z axis
z_ = z - b
# Helper
cosineTheta2 = (d ** 2 + z_ ** 2 - l1 ** 2 - l2 ** 2) / 2 / l1 / l2
# Shoulder rotation
theta0 = math.atan2(y, x)
# Lift of the arm in the shoulder
theta2 = math.atan2(math.sqrt(1 - (cosineTheta2) ** 2), cosineTheta2)
# Elbow rotation
theta1 = math.atan2(z_, d) - \
math.atan2(l2 * math.sin(theta2), l1 + l2 * math.cos(theta2))
# Round values to specific angles
result = []
for value in [theta0, theta1, theta2]:
# Convert from radians to degrees
value = round(math.degrees(value))
# If negative value then angle is in 3rd and 4th quandrant, transform
# the value to apsolute angle value
if value < 0:
value = 360 + value
result.append(value)
return result
def main():
from server import main
main()
if __name__ == '__main__':
main()
|
|
011111f423d8a50fc66a383c0f28c76a9854105a
|
heat/tests/test_barbican_client.py
|
heat/tests/test_barbican_client.py
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.tests import common
from heat.tests import utils
class BarbicanClientPluginTest(common.HeatTestCase):
def test_create(self):
context = utils.dummy_context()
plugin = context.clients.client_plugin('barbican')
client = plugin.client()
self.assertIsNotNone(client.orders)
|
Add test for barbican client
|
Add test for barbican client
Change-Id: I9236e62f3259cfd560e3837830191012d107e853
Partial-Bug: #1461967
|
Python
|
apache-2.0
|
jasondunsmore/heat,cryptickp/heat,noironetworks/heat,cwolferh/heat-scratch,maestro-hybrid-cloud/heat,noironetworks/heat,rh-s/heat,miguelgrinberg/heat,jasondunsmore/heat,steveb/heat,gonzolino/heat,dragorosson/heat,pratikmallya/heat,srznew/heat,steveb/heat,dims/heat,pratikmallya/heat,dragorosson/heat,cwolferh/heat-scratch,takeshineshiro/heat,srznew/heat,openstack/heat,gonzolino/heat,openstack/heat,takeshineshiro/heat,maestro-hybrid-cloud/heat,dims/heat,cryptickp/heat,rh-s/heat,miguelgrinberg/heat
|
Add test for barbican client
Change-Id: I9236e62f3259cfd560e3837830191012d107e853
Partial-Bug: #1461967
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.tests import common
from heat.tests import utils
class BarbicanClientPluginTest(common.HeatTestCase):
def test_create(self):
context = utils.dummy_context()
plugin = context.clients.client_plugin('barbican')
client = plugin.client()
self.assertIsNotNone(client.orders)
|
<commit_before><commit_msg>Add test for barbican client
Change-Id: I9236e62f3259cfd560e3837830191012d107e853
Partial-Bug: #1461967<commit_after>
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.tests import common
from heat.tests import utils
class BarbicanClientPluginTest(common.HeatTestCase):
def test_create(self):
context = utils.dummy_context()
plugin = context.clients.client_plugin('barbican')
client = plugin.client()
self.assertIsNotNone(client.orders)
|
Add test for barbican client
Change-Id: I9236e62f3259cfd560e3837830191012d107e853
Partial-Bug: #1461967#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.tests import common
from heat.tests import utils
class BarbicanClientPluginTest(common.HeatTestCase):
def test_create(self):
context = utils.dummy_context()
plugin = context.clients.client_plugin('barbican')
client = plugin.client()
self.assertIsNotNone(client.orders)
|
<commit_before><commit_msg>Add test for barbican client
Change-Id: I9236e62f3259cfd560e3837830191012d107e853
Partial-Bug: #1461967<commit_after>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.tests import common
from heat.tests import utils
class BarbicanClientPluginTest(common.HeatTestCase):
def test_create(self):
context = utils.dummy_context()
plugin = context.clients.client_plugin('barbican')
client = plugin.client()
self.assertIsNotNone(client.orders)
|
|
653e8e229e27001ff66d7c9f886fae272e0446b9
|
examples/trafos/wavelet_trafo.py
|
examples/trafos/wavelet_trafo.py
|
# Copyright 2014-2016 The ODL development group
#
# This file is part of ODL.
#
# ODL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ODL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ODL. If not, see <http://www.gnu.org/licenses/>.
"""Simple example on the usage of the Wavelet Transform."""
import odl
# Discretized space: discretized functions on the rectangle [-1, 1] x [-1, 1]
# with 512 samples per dimension.
space = odl.uniform_discr([-1, -1], [1, 1], (256, 256))
# Make the Wavelet transform operator on this space. The range is calculated
# automatically. The default backend is PyWavelets (pywt).
wavelet_op = odl.trafos.WaveletTransform(space, nscales=2, wbasis='Haar')
# Create a phantom and its wavelet transfrom and display them.
phantom = odl.phantom.shepp_logan(space, modified=True)
phantom.show(title='Shepp-Logan phantom')
# Note that the wavelet transform is a vector in rn.
phantom_wt = wavelet_op(phantom)
phantom_wt.show(title='wavelet transform')
# It may however (for some choices of wbasis) be interpreted as a vector in the
# domain of the transformation
phantom_wt_2d = space.element(phantom_wt)
phantom_wt_2d.show('wavelet transform in 2d')
# Calculate the inverse transform.
phantom_wt_inv = wavelet_op.inverse(phantom_wt)
phantom_wt_inv.show(title='wavelet transform inverted')
|
Add example on how to use WaveletTransform
|
DOC: Add example on how to use WaveletTransform
|
Python
|
mpl-2.0
|
odlgroup/odl,aringh/odl,kohr-h/odl,aringh/odl,odlgroup/odl,kohr-h/odl
|
DOC: Add example on how to use WaveletTransform
|
# Copyright 2014-2016 The ODL development group
#
# This file is part of ODL.
#
# ODL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ODL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ODL. If not, see <http://www.gnu.org/licenses/>.
"""Simple example on the usage of the Wavelet Transform."""
import odl
# Discretized space: discretized functions on the rectangle [-1, 1] x [-1, 1]
# with 512 samples per dimension.
space = odl.uniform_discr([-1, -1], [1, 1], (256, 256))
# Make the Wavelet transform operator on this space. The range is calculated
# automatically. The default backend is PyWavelets (pywt).
wavelet_op = odl.trafos.WaveletTransform(space, nscales=2, wbasis='Haar')
# Create a phantom and its wavelet transfrom and display them.
phantom = odl.phantom.shepp_logan(space, modified=True)
phantom.show(title='Shepp-Logan phantom')
# Note that the wavelet transform is a vector in rn.
phantom_wt = wavelet_op(phantom)
phantom_wt.show(title='wavelet transform')
# It may however (for some choices of wbasis) be interpreted as a vector in the
# domain of the transformation
phantom_wt_2d = space.element(phantom_wt)
phantom_wt_2d.show('wavelet transform in 2d')
# Calculate the inverse transform.
phantom_wt_inv = wavelet_op.inverse(phantom_wt)
phantom_wt_inv.show(title='wavelet transform inverted')
|
<commit_before><commit_msg>DOC: Add example on how to use WaveletTransform<commit_after>
|
# Copyright 2014-2016 The ODL development group
#
# This file is part of ODL.
#
# ODL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ODL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ODL. If not, see <http://www.gnu.org/licenses/>.
"""Simple example on the usage of the Wavelet Transform."""
import odl
# Discretized space: discretized functions on the rectangle [-1, 1] x [-1, 1]
# with 512 samples per dimension.
space = odl.uniform_discr([-1, -1], [1, 1], (256, 256))
# Make the Wavelet transform operator on this space. The range is calculated
# automatically. The default backend is PyWavelets (pywt).
wavelet_op = odl.trafos.WaveletTransform(space, nscales=2, wbasis='Haar')
# Create a phantom and its wavelet transfrom and display them.
phantom = odl.phantom.shepp_logan(space, modified=True)
phantom.show(title='Shepp-Logan phantom')
# Note that the wavelet transform is a vector in rn.
phantom_wt = wavelet_op(phantom)
phantom_wt.show(title='wavelet transform')
# It may however (for some choices of wbasis) be interpreted as a vector in the
# domain of the transformation
phantom_wt_2d = space.element(phantom_wt)
phantom_wt_2d.show('wavelet transform in 2d')
# Calculate the inverse transform.
phantom_wt_inv = wavelet_op.inverse(phantom_wt)
phantom_wt_inv.show(title='wavelet transform inverted')
|
DOC: Add example on how to use WaveletTransform# Copyright 2014-2016 The ODL development group
#
# This file is part of ODL.
#
# ODL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ODL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ODL. If not, see <http://www.gnu.org/licenses/>.
"""Simple example on the usage of the Wavelet Transform."""
import odl
# Discretized space: discretized functions on the rectangle [-1, 1] x [-1, 1]
# with 512 samples per dimension.
space = odl.uniform_discr([-1, -1], [1, 1], (256, 256))
# Make the Wavelet transform operator on this space. The range is calculated
# automatically. The default backend is PyWavelets (pywt).
wavelet_op = odl.trafos.WaveletTransform(space, nscales=2, wbasis='Haar')
# Create a phantom and its wavelet transfrom and display them.
phantom = odl.phantom.shepp_logan(space, modified=True)
phantom.show(title='Shepp-Logan phantom')
# Note that the wavelet transform is a vector in rn.
phantom_wt = wavelet_op(phantom)
phantom_wt.show(title='wavelet transform')
# It may however (for some choices of wbasis) be interpreted as a vector in the
# domain of the transformation
phantom_wt_2d = space.element(phantom_wt)
phantom_wt_2d.show('wavelet transform in 2d')
# Calculate the inverse transform.
phantom_wt_inv = wavelet_op.inverse(phantom_wt)
phantom_wt_inv.show(title='wavelet transform inverted')
|
<commit_before><commit_msg>DOC: Add example on how to use WaveletTransform<commit_after># Copyright 2014-2016 The ODL development group
#
# This file is part of ODL.
#
# ODL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ODL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ODL. If not, see <http://www.gnu.org/licenses/>.
"""Simple example on the usage of the Wavelet Transform."""
import odl
# Discretized space: discretized functions on the rectangle [-1, 1] x [-1, 1]
# with 512 samples per dimension.
space = odl.uniform_discr([-1, -1], [1, 1], (256, 256))
# Make the Wavelet transform operator on this space. The range is calculated
# automatically. The default backend is PyWavelets (pywt).
wavelet_op = odl.trafos.WaveletTransform(space, nscales=2, wbasis='Haar')
# Create a phantom and its wavelet transfrom and display them.
phantom = odl.phantom.shepp_logan(space, modified=True)
phantom.show(title='Shepp-Logan phantom')
# Note that the wavelet transform is a vector in rn.
phantom_wt = wavelet_op(phantom)
phantom_wt.show(title='wavelet transform')
# It may however (for some choices of wbasis) be interpreted as a vector in the
# domain of the transformation
phantom_wt_2d = space.element(phantom_wt)
phantom_wt_2d.show('wavelet transform in 2d')
# Calculate the inverse transform.
phantom_wt_inv = wavelet_op.inverse(phantom_wt)
phantom_wt_inv.show(title='wavelet transform inverted')
|
|
13dffb897617f87aaaee247095107d7011e002d5
|
kubernetes/test/test_api_client.py
|
kubernetes/test/test_api_client.py
|
# coding: utf-8
import atexit
import weakref
import unittest
import kubernetes
class TestApiClient(unittest.TestCase):
def test_context_manager_closes_threadpool(self):
with kubernetes.client.ApiClient() as client:
self.assertIsNotNone(client.pool)
pool_ref = weakref.ref(client._pool)
self.assertIsNotNone(pool_ref())
self.assertIsNone(pool_ref())
def test_atexit_closes_threadpool(self):
client = kubernetes.client.ApiClient()
self.assertIsNotNone(client.pool)
self.assertIsNotNone(client._pool)
atexit._run_exitfuncs()
self.assertIsNone(client._pool)
|
Add test to ensure kubernetes client threadpool is cleaned up
|
Add test to ensure kubernetes client threadpool is cleaned up
|
Python
|
apache-2.0
|
kubernetes-client/python,kubernetes-client/python
|
Add test to ensure kubernetes client threadpool is cleaned up
|
# coding: utf-8
import atexit
import weakref
import unittest
import kubernetes
class TestApiClient(unittest.TestCase):
def test_context_manager_closes_threadpool(self):
with kubernetes.client.ApiClient() as client:
self.assertIsNotNone(client.pool)
pool_ref = weakref.ref(client._pool)
self.assertIsNotNone(pool_ref())
self.assertIsNone(pool_ref())
def test_atexit_closes_threadpool(self):
client = kubernetes.client.ApiClient()
self.assertIsNotNone(client.pool)
self.assertIsNotNone(client._pool)
atexit._run_exitfuncs()
self.assertIsNone(client._pool)
|
<commit_before><commit_msg>Add test to ensure kubernetes client threadpool is cleaned up<commit_after>
|
# coding: utf-8
import atexit
import weakref
import unittest
import kubernetes
class TestApiClient(unittest.TestCase):
def test_context_manager_closes_threadpool(self):
with kubernetes.client.ApiClient() as client:
self.assertIsNotNone(client.pool)
pool_ref = weakref.ref(client._pool)
self.assertIsNotNone(pool_ref())
self.assertIsNone(pool_ref())
def test_atexit_closes_threadpool(self):
client = kubernetes.client.ApiClient()
self.assertIsNotNone(client.pool)
self.assertIsNotNone(client._pool)
atexit._run_exitfuncs()
self.assertIsNone(client._pool)
|
Add test to ensure kubernetes client threadpool is cleaned up# coding: utf-8
import atexit
import weakref
import unittest
import kubernetes
class TestApiClient(unittest.TestCase):
def test_context_manager_closes_threadpool(self):
with kubernetes.client.ApiClient() as client:
self.assertIsNotNone(client.pool)
pool_ref = weakref.ref(client._pool)
self.assertIsNotNone(pool_ref())
self.assertIsNone(pool_ref())
def test_atexit_closes_threadpool(self):
client = kubernetes.client.ApiClient()
self.assertIsNotNone(client.pool)
self.assertIsNotNone(client._pool)
atexit._run_exitfuncs()
self.assertIsNone(client._pool)
|
<commit_before><commit_msg>Add test to ensure kubernetes client threadpool is cleaned up<commit_after># coding: utf-8
import atexit
import weakref
import unittest
import kubernetes
class TestApiClient(unittest.TestCase):
def test_context_manager_closes_threadpool(self):
with kubernetes.client.ApiClient() as client:
self.assertIsNotNone(client.pool)
pool_ref = weakref.ref(client._pool)
self.assertIsNotNone(pool_ref())
self.assertIsNone(pool_ref())
def test_atexit_closes_threadpool(self):
client = kubernetes.client.ApiClient()
self.assertIsNotNone(client.pool)
self.assertIsNotNone(client._pool)
atexit._run_exitfuncs()
self.assertIsNone(client._pool)
|
|
0ab00b948be0096cbb71642c27d554b5f3666914
|
django_summernote/migrations/0002_update-help_text.py
|
django_summernote/migrations/0002_update-help_text.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-11 07:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('django_summernote', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='attachment',
name='name',
field=models.CharField(blank=True, help_text=b'Defaults to filename, if left blank', max_length=255, null=True),
),
]
|
Add a migration for updating help_text
|
Add a migration for updating help_text
|
Python
|
mit
|
lqez/django-summernote,summernote/django-summernote,lqez/django-summernote,lqez/django-summernote,summernote/django-summernote,summernote/django-summernote
|
Add a migration for updating help_text
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-11 07:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('django_summernote', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='attachment',
name='name',
field=models.CharField(blank=True, help_text=b'Defaults to filename, if left blank', max_length=255, null=True),
),
]
|
<commit_before><commit_msg>Add a migration for updating help_text<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-11 07:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('django_summernote', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='attachment',
name='name',
field=models.CharField(blank=True, help_text=b'Defaults to filename, if left blank', max_length=255, null=True),
),
]
|
Add a migration for updating help_text# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-11 07:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('django_summernote', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='attachment',
name='name',
field=models.CharField(blank=True, help_text=b'Defaults to filename, if left blank', max_length=255, null=True),
),
]
|
<commit_before><commit_msg>Add a migration for updating help_text<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-11 07:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('django_summernote', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='attachment',
name='name',
field=models.CharField(blank=True, help_text=b'Defaults to filename, if left blank', max_length=255, null=True),
),
]
|
|
e65921311135c60c0f2409da98e412e3646c2be7
|
tempest/api/network/admin/test_negative_quotas.py
|
tempest/api/network/admin/test_negative_quotas.py
|
# Copyright 2015 Cloudwatt
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.network import base
from tempest import test
from tempest_lib import exceptions as lib_exc
class QuotasNegativeTest(base.BaseAdminNetworkTest):
"""Tests the following operations in the Neutron API:
set network quota and exceed this quota
v2.0 of the API is assumed.
It is also assumed that the per-tenant quota extension API is configured
in /etc/neutron/neutron.conf as follows:
quota_driver = neutron.db.quota_db.DbQuotaDriver
"""
@classmethod
def skip_checks(cls):
super(QuotasNegativeTest, cls).skip_checks()
if not test.is_extension_enabled('quotas', 'network'):
msg = "quotas extension not enabled."
raise cls.skipException(msg)
@classmethod
def setup_clients(cls):
super(QuotasNegativeTest, cls).setup_clients()
cls.identity_admin_client = cls.os_adm.identity_client
@test.idempotent_id('644f4e1b-1bf9-4af0-9fd8-eb56ac0f51cf')
def test_network_quota_exceeding(self):
# Set the network quota to two
self.admin_client.update_quotas(self.networks_client.tenant_id,
network=2)
self.addCleanup(self.admin_client.reset_quotas,
self.networks_client.tenant_id)
# Create two networks
n1 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n1['network']['id'])
n2 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n2['network']['id'])
# Try to create a third network while the quota is two
with self.assertRaisesRegexp(
lib_exc.Conflict,
"An object with that identifier already exists\\n" +
"Details.*Quota exceeded for resources: \['network'\].*"):
n3 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n3['network']['id'])
|
Add network quota exceeding negative test
|
Add network quota exceeding negative test
A quota on the network resource is set. More network resources than
allowed are created. The quota exceeding exception is then catched.
Change-Id: Ic6b846a42276a4cccc0b95fc8654f8c2c16d75c5
Implements: blueprint add-test-for-quota-exceeding
Signed-off-by: Antoine Eiche <7eb7c86404e448ce7e6352830c5e67ec86226606@abesis.fr>
|
Python
|
apache-2.0
|
zsoltdudas/lis-tempest,Juniper/tempest,vedujoshi/tempest,openstack/tempest,openstack/tempest,masayukig/tempest,zsoltdudas/lis-tempest,vedujoshi/tempest,sebrandon1/tempest,LIS/lis-tempest,Tesora/tesora-tempest,bigswitch/tempest,bigswitch/tempest,sebrandon1/tempest,cisco-openstack/tempest,cisco-openstack/tempest,LIS/lis-tempest,masayukig/tempest,Tesora/tesora-tempest,Juniper/tempest
|
Add network quota exceeding negative test
A quota on the network resource is set. More network resources than
allowed are created. The quota exceeding exception is then catched.
Change-Id: Ic6b846a42276a4cccc0b95fc8654f8c2c16d75c5
Implements: blueprint add-test-for-quota-exceeding
Signed-off-by: Antoine Eiche <7eb7c86404e448ce7e6352830c5e67ec86226606@abesis.fr>
|
# Copyright 2015 Cloudwatt
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.network import base
from tempest import test
from tempest_lib import exceptions as lib_exc
class QuotasNegativeTest(base.BaseAdminNetworkTest):
"""Tests the following operations in the Neutron API:
set network quota and exceed this quota
v2.0 of the API is assumed.
It is also assumed that the per-tenant quota extension API is configured
in /etc/neutron/neutron.conf as follows:
quota_driver = neutron.db.quota_db.DbQuotaDriver
"""
@classmethod
def skip_checks(cls):
super(QuotasNegativeTest, cls).skip_checks()
if not test.is_extension_enabled('quotas', 'network'):
msg = "quotas extension not enabled."
raise cls.skipException(msg)
@classmethod
def setup_clients(cls):
super(QuotasNegativeTest, cls).setup_clients()
cls.identity_admin_client = cls.os_adm.identity_client
@test.idempotent_id('644f4e1b-1bf9-4af0-9fd8-eb56ac0f51cf')
def test_network_quota_exceeding(self):
# Set the network quota to two
self.admin_client.update_quotas(self.networks_client.tenant_id,
network=2)
self.addCleanup(self.admin_client.reset_quotas,
self.networks_client.tenant_id)
# Create two networks
n1 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n1['network']['id'])
n2 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n2['network']['id'])
# Try to create a third network while the quota is two
with self.assertRaisesRegexp(
lib_exc.Conflict,
"An object with that identifier already exists\\n" +
"Details.*Quota exceeded for resources: \['network'\].*"):
n3 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n3['network']['id'])
|
<commit_before><commit_msg>Add network quota exceeding negative test
A quota on the network resource is set. More network resources than
allowed are created. The quota exceeding exception is then catched.
Change-Id: Ic6b846a42276a4cccc0b95fc8654f8c2c16d75c5
Implements: blueprint add-test-for-quota-exceeding
Signed-off-by: Antoine Eiche <7eb7c86404e448ce7e6352830c5e67ec86226606@abesis.fr><commit_after>
|
# Copyright 2015 Cloudwatt
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.network import base
from tempest import test
from tempest_lib import exceptions as lib_exc
class QuotasNegativeTest(base.BaseAdminNetworkTest):
"""Tests the following operations in the Neutron API:
set network quota and exceed this quota
v2.0 of the API is assumed.
It is also assumed that the per-tenant quota extension API is configured
in /etc/neutron/neutron.conf as follows:
quota_driver = neutron.db.quota_db.DbQuotaDriver
"""
@classmethod
def skip_checks(cls):
super(QuotasNegativeTest, cls).skip_checks()
if not test.is_extension_enabled('quotas', 'network'):
msg = "quotas extension not enabled."
raise cls.skipException(msg)
@classmethod
def setup_clients(cls):
super(QuotasNegativeTest, cls).setup_clients()
cls.identity_admin_client = cls.os_adm.identity_client
@test.idempotent_id('644f4e1b-1bf9-4af0-9fd8-eb56ac0f51cf')
def test_network_quota_exceeding(self):
# Set the network quota to two
self.admin_client.update_quotas(self.networks_client.tenant_id,
network=2)
self.addCleanup(self.admin_client.reset_quotas,
self.networks_client.tenant_id)
# Create two networks
n1 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n1['network']['id'])
n2 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n2['network']['id'])
# Try to create a third network while the quota is two
with self.assertRaisesRegexp(
lib_exc.Conflict,
"An object with that identifier already exists\\n" +
"Details.*Quota exceeded for resources: \['network'\].*"):
n3 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n3['network']['id'])
|
Add network quota exceeding negative test
A quota on the network resource is set. More network resources than
allowed are created. The quota exceeding exception is then catched.
Change-Id: Ic6b846a42276a4cccc0b95fc8654f8c2c16d75c5
Implements: blueprint add-test-for-quota-exceeding
Signed-off-by: Antoine Eiche <7eb7c86404e448ce7e6352830c5e67ec86226606@abesis.fr># Copyright 2015 Cloudwatt
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.network import base
from tempest import test
from tempest_lib import exceptions as lib_exc
class QuotasNegativeTest(base.BaseAdminNetworkTest):
"""Tests the following operations in the Neutron API:
set network quota and exceed this quota
v2.0 of the API is assumed.
It is also assumed that the per-tenant quota extension API is configured
in /etc/neutron/neutron.conf as follows:
quota_driver = neutron.db.quota_db.DbQuotaDriver
"""
@classmethod
def skip_checks(cls):
super(QuotasNegativeTest, cls).skip_checks()
if not test.is_extension_enabled('quotas', 'network'):
msg = "quotas extension not enabled."
raise cls.skipException(msg)
@classmethod
def setup_clients(cls):
super(QuotasNegativeTest, cls).setup_clients()
cls.identity_admin_client = cls.os_adm.identity_client
@test.idempotent_id('644f4e1b-1bf9-4af0-9fd8-eb56ac0f51cf')
def test_network_quota_exceeding(self):
# Set the network quota to two
self.admin_client.update_quotas(self.networks_client.tenant_id,
network=2)
self.addCleanup(self.admin_client.reset_quotas,
self.networks_client.tenant_id)
# Create two networks
n1 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n1['network']['id'])
n2 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n2['network']['id'])
# Try to create a third network while the quota is two
with self.assertRaisesRegexp(
lib_exc.Conflict,
"An object with that identifier already exists\\n" +
"Details.*Quota exceeded for resources: \['network'\].*"):
n3 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n3['network']['id'])
|
<commit_before><commit_msg>Add network quota exceeding negative test
A quota on the network resource is set. More network resources than
allowed are created. The quota exceeding exception is then catched.
Change-Id: Ic6b846a42276a4cccc0b95fc8654f8c2c16d75c5
Implements: blueprint add-test-for-quota-exceeding
Signed-off-by: Antoine Eiche <7eb7c86404e448ce7e6352830c5e67ec86226606@abesis.fr><commit_after># Copyright 2015 Cloudwatt
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.network import base
from tempest import test
from tempest_lib import exceptions as lib_exc
class QuotasNegativeTest(base.BaseAdminNetworkTest):
"""Tests the following operations in the Neutron API:
set network quota and exceed this quota
v2.0 of the API is assumed.
It is also assumed that the per-tenant quota extension API is configured
in /etc/neutron/neutron.conf as follows:
quota_driver = neutron.db.quota_db.DbQuotaDriver
"""
@classmethod
def skip_checks(cls):
super(QuotasNegativeTest, cls).skip_checks()
if not test.is_extension_enabled('quotas', 'network'):
msg = "quotas extension not enabled."
raise cls.skipException(msg)
@classmethod
def setup_clients(cls):
super(QuotasNegativeTest, cls).setup_clients()
cls.identity_admin_client = cls.os_adm.identity_client
@test.idempotent_id('644f4e1b-1bf9-4af0-9fd8-eb56ac0f51cf')
def test_network_quota_exceeding(self):
# Set the network quota to two
self.admin_client.update_quotas(self.networks_client.tenant_id,
network=2)
self.addCleanup(self.admin_client.reset_quotas,
self.networks_client.tenant_id)
# Create two networks
n1 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n1['network']['id'])
n2 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n2['network']['id'])
# Try to create a third network while the quota is two
with self.assertRaisesRegexp(
lib_exc.Conflict,
"An object with that identifier already exists\\n" +
"Details.*Quota exceeded for resources: \['network'\].*"):
n3 = self.networks_client.create_network()
self.addCleanup(self.networks_client.delete_network,
n3['network']['id'])
|
|
a88d5414e7762e87c052ca9a28fe36a28b7d4d46
|
oscar/apps/partner/prices.py
|
oscar/apps/partner/prices.py
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
#: Whether any prices exist
exists = False
#: Whether tax is known for this product (and session)
is_tax_known = False
# Normal price properties
excl_tax = incl_tax = tax = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
"""
The interface that any pricing policy must support
"""
#: Whether any prices exist
exists = False
#: Whether tax is known
is_tax_known = False
#: Normal price properties
excl_tax = incl_tax = tax = None
#: Currency prices are in
currency = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
@property
def currency(self):
return self.stockrecord.price_currency
|
Add currency property to pricing policies
|
Add currency property to pricing policies
|
Python
|
bsd-3-clause
|
pdonadeo/django-oscar,jinnykoo/wuyisj.com,WillisXChen/django-oscar,bschuon/django-oscar,saadatqadri/django-oscar,kapari/django-oscar,adamend/django-oscar,adamend/django-oscar,bnprk/django-oscar,sonofatailor/django-oscar,Jannes123/django-oscar,faratro/django-oscar,MatthewWilkes/django-oscar,sasha0/django-oscar,anentropic/django-oscar,anentropic/django-oscar,manevant/django-oscar,sasha0/django-oscar,dongguangming/django-oscar,saadatqadri/django-oscar,spartonia/django-oscar,DrOctogon/unwash_ecom,faratro/django-oscar,nickpack/django-oscar,QLGu/django-oscar,ademuk/django-oscar,okfish/django-oscar,jmt4/django-oscar,anentropic/django-oscar,adamend/django-oscar,kapari/django-oscar,thechampanurag/django-oscar,okfish/django-oscar,amirrpp/django-oscar,eddiep1101/django-oscar,jlmadurga/django-oscar,pasqualguerrero/django-oscar,michaelkuty/django-oscar,itbabu/django-oscar,rocopartners/django-oscar,ka7eh/django-oscar,bschuon/django-oscar,eddiep1101/django-oscar,john-parton/django-oscar,Jannes123/django-oscar,amirrpp/django-oscar,pasqualguerrero/django-oscar,machtfit/django-oscar,binarydud/django-oscar,jinnykoo/wuyisj,nickpack/django-oscar,Idematica/django-oscar,itbabu/django-oscar,saadatqadri/django-oscar,kapari/django-oscar,MatthewWilkes/django-oscar,john-parton/django-oscar,bnprk/django-oscar,pdonadeo/django-oscar,Bogh/django-oscar,saadatqadri/django-oscar,Bogh/django-oscar,monikasulik/django-oscar,Idematica/django-oscar,Jannes123/django-oscar,ademuk/django-oscar,solarissmoke/django-oscar,mexeniz/django-oscar,nfletton/django-oscar,pasqualguerrero/django-oscar,amirrpp/django-oscar,eddiep1101/django-oscar,jinnykoo/wuyisj,adamend/django-oscar,manevant/django-oscar,ka7eh/django-oscar,jmt4/django-oscar,kapt/django-oscar,lijoantony/django-oscar,solarissmoke/django-oscar,ahmetdaglarbas/e-commerce,jmt4/django-oscar,spartonia/django-oscar,marcoantoniooliveira/labweb,jmt4/django-oscar,rocopartners/django-oscar,pdonadeo/django-oscar,anentropic/django-oscar,WillisXChen/django-oscar,jinnykoo/wuyisj.com,nfletton/django-oscar,josesanch/django-oscar,elliotthill/django-oscar,rocopartners/django-oscar,ahmetdaglarbas/e-commerce,QLGu/django-oscar,amirrpp/django-oscar,okfish/django-oscar,ka7eh/django-oscar,eddiep1101/django-oscar,Idematica/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj.com,solarissmoke/django-oscar,taedori81/django-oscar,jinnykoo/wuyisj,vovanbo/django-oscar,binarydud/django-oscar,django-oscar/django-oscar,nfletton/django-oscar,lijoantony/django-oscar,WadeYuChen/django-oscar,jinnykoo/wuyisj,jlmadurga/django-oscar,sonofatailor/django-oscar,ka7eh/django-oscar,QLGu/django-oscar,machtfit/django-oscar,michaelkuty/django-oscar,john-parton/django-oscar,elliotthill/django-oscar,john-parton/django-oscar,DrOctogon/unwash_ecom,Jannes123/django-oscar,manevant/django-oscar,jlmadurga/django-oscar,sasha0/django-oscar,Bogh/django-oscar,monikasulik/django-oscar,MatthewWilkes/django-oscar,bnprk/django-oscar,WadeYuChen/django-oscar,bnprk/django-oscar,kapt/django-oscar,ademuk/django-oscar,binarydud/django-oscar,dongguangming/django-oscar,Bogh/django-oscar,spartonia/django-oscar,WillisXChen/django-oscar,nickpack/django-oscar,jinnykoo/christmas,itbabu/django-oscar,thechampanurag/django-oscar,jinnykoo/wuyisj.com,vovanbo/django-oscar,okfish/django-oscar,nickpack/django-oscar,makielab/django-oscar,elliotthill/django-oscar,jinnykoo/christmas,manevant/django-oscar,mexeniz/django-oscar,bschuon/django-oscar,dongguangming/django-oscar,django-oscar/django-oscar,taedori81/django-oscar,lijoantony/django-oscar,jlmadurga/django-oscar,thechampanurag/django-oscar,sonofatailor/django-oscar,ahmetdaglarbas/e-commerce,MatthewWilkes/django-oscar,binarydud/django-oscar,josesanch/django-oscar,faratro/django-oscar,taedori81/django-oscar,mexeniz/django-oscar,marcoantoniooliveira/labweb,pdonadeo/django-oscar,machtfit/django-oscar,marcoantoniooliveira/labweb,spartonia/django-oscar,WadeYuChen/django-oscar,kapt/django-oscar,nfletton/django-oscar,taedori81/django-oscar,michaelkuty/django-oscar,QLGu/django-oscar,django-oscar/django-oscar,vovanbo/django-oscar,sasha0/django-oscar,WillisXChen/django-oscar,ahmetdaglarbas/e-commerce,mexeniz/django-oscar,jinnykoo/christmas,marcoantoniooliveira/labweb,makielab/django-oscar,thechampanurag/django-oscar,solarissmoke/django-oscar,kapari/django-oscar,pasqualguerrero/django-oscar,django-oscar/django-oscar,monikasulik/django-oscar,vovanbo/django-oscar,faratro/django-oscar,lijoantony/django-oscar,monikasulik/django-oscar,josesanch/django-oscar,sonofatailor/django-oscar,makielab/django-oscar,WillisXChen/django-oscar,ademuk/django-oscar,WillisXChen/django-oscar,bschuon/django-oscar,michaelkuty/django-oscar,makielab/django-oscar,WadeYuChen/django-oscar,dongguangming/django-oscar,rocopartners/django-oscar,DrOctogon/unwash_ecom
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
#: Whether any prices exist
exists = False
#: Whether tax is known for this product (and session)
is_tax_known = False
# Normal price properties
excl_tax = incl_tax = tax = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
Add currency property to pricing policies
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
"""
The interface that any pricing policy must support
"""
#: Whether any prices exist
exists = False
#: Whether tax is known
is_tax_known = False
#: Normal price properties
excl_tax = incl_tax = tax = None
#: Currency prices are in
currency = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
@property
def currency(self):
return self.stockrecord.price_currency
|
<commit_before>class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
#: Whether any prices exist
exists = False
#: Whether tax is known for this product (and session)
is_tax_known = False
# Normal price properties
excl_tax = incl_tax = tax = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
<commit_msg>Add currency property to pricing policies<commit_after>
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
"""
The interface that any pricing policy must support
"""
#: Whether any prices exist
exists = False
#: Whether tax is known
is_tax_known = False
#: Normal price properties
excl_tax = incl_tax = tax = None
#: Currency prices are in
currency = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
@property
def currency(self):
return self.stockrecord.price_currency
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
#: Whether any prices exist
exists = False
#: Whether tax is known for this product (and session)
is_tax_known = False
# Normal price properties
excl_tax = incl_tax = tax = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
Add currency property to pricing policiesclass TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
"""
The interface that any pricing policy must support
"""
#: Whether any prices exist
exists = False
#: Whether tax is known
is_tax_known = False
#: Normal price properties
excl_tax = incl_tax = tax = None
#: Currency prices are in
currency = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
@property
def currency(self):
return self.stockrecord.price_currency
|
<commit_before>class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
#: Whether any prices exist
exists = False
#: Whether tax is known for this product (and session)
is_tax_known = False
# Normal price properties
excl_tax = incl_tax = tax = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
<commit_msg>Add currency property to pricing policies<commit_after>class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
"""
The interface that any pricing policy must support
"""
#: Whether any prices exist
exists = False
#: Whether tax is known
is_tax_known = False
#: Normal price properties
excl_tax = incl_tax = tax = None
#: Currency prices are in
currency = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
@property
def currency(self):
return self.stockrecord.price_currency
|
3786aea82868eb6b08d99a0caa59b9f7ae6446c9
|
integration_test_generateSyntheticData.py
|
integration_test_generateSyntheticData.py
|
# -*- coding: utf-8 -*-
import unittest
import numpy as np
import numpy.testing as npt
from dipy.core.gradients import gradient_table
from generateSyntheticData import (generateSyntheticInputs,
generateSyntheticOutputsFromMultiTensorModel)
class integration_test_generateSyntheticData(unittest.TestCase):
def test_dataGeneration(self):
voxelsInEachDim = (2, 3, 4)
bvals=1500*np.ones(7)
bvals[0]=0
sq2=np.sqrt(2)/2
bvecs=np.array([[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[sq2, sq2, 0],
[sq2, 0, sq2],
[0, sq2, sq2]])
smallDelta = 12.9
bigDelta = 21.8
gtab = gradient_table(bvals, bvecs, big_delta=bigDelta,
small_delta=smallDelta)
# Diffusion coefficients for white matter tracts, in mm^2/s
#
# Based roughly on values from:
#
# Pierpaoli, Basser, "Towards a Quantitative Assessment of Diffusion
# Anisotropy", Magnetic Resonance in Medicine, 1996; 36(6):893-906.
#
whiteMatterDiffusionEigenvalues = np.array([1500e-6, 400e-6, 400e-6])
tensorEigenvalues = np.tile(whiteMatterDiffusionEigenvalues, (2,1))
inputs = generateSyntheticInputs(voxelsInEachDim, gtab)
outputs = generateSyntheticOutputsFromMultiTensorModel(
voxelsInEachDim, gtab, tensorEigenvalues)
totalNumberOfSamples = np.prod(voxelsInEachDim)*len(bvals)
npt.assert_array_equal(inputs.shape,
(totalNumberOfSamples, 7),
'Input shapes don\'t match')
npt.assert_array_equal(outputs.shape, (totalNumberOfSamples,),
'Output shapes don\'t match')
# add spatial correlations - easiest to mock an exact signal in each
# voxel with known correlation.
def main():
unittest.main()
if __name__ == '__main__':
main()
|
Test that shapes of input and output match up
|
Test that shapes of input and output match up
|
Python
|
bsd-3-clause
|
jsjol/GaussianProcessRegressionForDiffusionMRI,jsjol/GaussianProcessRegressionForDiffusionMRI
|
Test that shapes of input and output match up
|
# -*- coding: utf-8 -*-
import unittest
import numpy as np
import numpy.testing as npt
from dipy.core.gradients import gradient_table
from generateSyntheticData import (generateSyntheticInputs,
generateSyntheticOutputsFromMultiTensorModel)
class integration_test_generateSyntheticData(unittest.TestCase):
def test_dataGeneration(self):
voxelsInEachDim = (2, 3, 4)
bvals=1500*np.ones(7)
bvals[0]=0
sq2=np.sqrt(2)/2
bvecs=np.array([[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[sq2, sq2, 0],
[sq2, 0, sq2],
[0, sq2, sq2]])
smallDelta = 12.9
bigDelta = 21.8
gtab = gradient_table(bvals, bvecs, big_delta=bigDelta,
small_delta=smallDelta)
# Diffusion coefficients for white matter tracts, in mm^2/s
#
# Based roughly on values from:
#
# Pierpaoli, Basser, "Towards a Quantitative Assessment of Diffusion
# Anisotropy", Magnetic Resonance in Medicine, 1996; 36(6):893-906.
#
whiteMatterDiffusionEigenvalues = np.array([1500e-6, 400e-6, 400e-6])
tensorEigenvalues = np.tile(whiteMatterDiffusionEigenvalues, (2,1))
inputs = generateSyntheticInputs(voxelsInEachDim, gtab)
outputs = generateSyntheticOutputsFromMultiTensorModel(
voxelsInEachDim, gtab, tensorEigenvalues)
totalNumberOfSamples = np.prod(voxelsInEachDim)*len(bvals)
npt.assert_array_equal(inputs.shape,
(totalNumberOfSamples, 7),
'Input shapes don\'t match')
npt.assert_array_equal(outputs.shape, (totalNumberOfSamples,),
'Output shapes don\'t match')
# add spatial correlations - easiest to mock an exact signal in each
# voxel with known correlation.
def main():
unittest.main()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Test that shapes of input and output match up<commit_after>
|
# -*- coding: utf-8 -*-
import unittest
import numpy as np
import numpy.testing as npt
from dipy.core.gradients import gradient_table
from generateSyntheticData import (generateSyntheticInputs,
generateSyntheticOutputsFromMultiTensorModel)
class integration_test_generateSyntheticData(unittest.TestCase):
def test_dataGeneration(self):
voxelsInEachDim = (2, 3, 4)
bvals=1500*np.ones(7)
bvals[0]=0
sq2=np.sqrt(2)/2
bvecs=np.array([[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[sq2, sq2, 0],
[sq2, 0, sq2],
[0, sq2, sq2]])
smallDelta = 12.9
bigDelta = 21.8
gtab = gradient_table(bvals, bvecs, big_delta=bigDelta,
small_delta=smallDelta)
# Diffusion coefficients for white matter tracts, in mm^2/s
#
# Based roughly on values from:
#
# Pierpaoli, Basser, "Towards a Quantitative Assessment of Diffusion
# Anisotropy", Magnetic Resonance in Medicine, 1996; 36(6):893-906.
#
whiteMatterDiffusionEigenvalues = np.array([1500e-6, 400e-6, 400e-6])
tensorEigenvalues = np.tile(whiteMatterDiffusionEigenvalues, (2,1))
inputs = generateSyntheticInputs(voxelsInEachDim, gtab)
outputs = generateSyntheticOutputsFromMultiTensorModel(
voxelsInEachDim, gtab, tensorEigenvalues)
totalNumberOfSamples = np.prod(voxelsInEachDim)*len(bvals)
npt.assert_array_equal(inputs.shape,
(totalNumberOfSamples, 7),
'Input shapes don\'t match')
npt.assert_array_equal(outputs.shape, (totalNumberOfSamples,),
'Output shapes don\'t match')
# add spatial correlations - easiest to mock an exact signal in each
# voxel with known correlation.
def main():
unittest.main()
if __name__ == '__main__':
main()
|
Test that shapes of input and output match up# -*- coding: utf-8 -*-
import unittest
import numpy as np
import numpy.testing as npt
from dipy.core.gradients import gradient_table
from generateSyntheticData import (generateSyntheticInputs,
generateSyntheticOutputsFromMultiTensorModel)
class integration_test_generateSyntheticData(unittest.TestCase):
def test_dataGeneration(self):
voxelsInEachDim = (2, 3, 4)
bvals=1500*np.ones(7)
bvals[0]=0
sq2=np.sqrt(2)/2
bvecs=np.array([[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[sq2, sq2, 0],
[sq2, 0, sq2],
[0, sq2, sq2]])
smallDelta = 12.9
bigDelta = 21.8
gtab = gradient_table(bvals, bvecs, big_delta=bigDelta,
small_delta=smallDelta)
# Diffusion coefficients for white matter tracts, in mm^2/s
#
# Based roughly on values from:
#
# Pierpaoli, Basser, "Towards a Quantitative Assessment of Diffusion
# Anisotropy", Magnetic Resonance in Medicine, 1996; 36(6):893-906.
#
whiteMatterDiffusionEigenvalues = np.array([1500e-6, 400e-6, 400e-6])
tensorEigenvalues = np.tile(whiteMatterDiffusionEigenvalues, (2,1))
inputs = generateSyntheticInputs(voxelsInEachDim, gtab)
outputs = generateSyntheticOutputsFromMultiTensorModel(
voxelsInEachDim, gtab, tensorEigenvalues)
totalNumberOfSamples = np.prod(voxelsInEachDim)*len(bvals)
npt.assert_array_equal(inputs.shape,
(totalNumberOfSamples, 7),
'Input shapes don\'t match')
npt.assert_array_equal(outputs.shape, (totalNumberOfSamples,),
'Output shapes don\'t match')
# add spatial correlations - easiest to mock an exact signal in each
# voxel with known correlation.
def main():
unittest.main()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Test that shapes of input and output match up<commit_after># -*- coding: utf-8 -*-
import unittest
import numpy as np
import numpy.testing as npt
from dipy.core.gradients import gradient_table
from generateSyntheticData import (generateSyntheticInputs,
generateSyntheticOutputsFromMultiTensorModel)
class integration_test_generateSyntheticData(unittest.TestCase):
def test_dataGeneration(self):
voxelsInEachDim = (2, 3, 4)
bvals=1500*np.ones(7)
bvals[0]=0
sq2=np.sqrt(2)/2
bvecs=np.array([[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[sq2, sq2, 0],
[sq2, 0, sq2],
[0, sq2, sq2]])
smallDelta = 12.9
bigDelta = 21.8
gtab = gradient_table(bvals, bvecs, big_delta=bigDelta,
small_delta=smallDelta)
# Diffusion coefficients for white matter tracts, in mm^2/s
#
# Based roughly on values from:
#
# Pierpaoli, Basser, "Towards a Quantitative Assessment of Diffusion
# Anisotropy", Magnetic Resonance in Medicine, 1996; 36(6):893-906.
#
whiteMatterDiffusionEigenvalues = np.array([1500e-6, 400e-6, 400e-6])
tensorEigenvalues = np.tile(whiteMatterDiffusionEigenvalues, (2,1))
inputs = generateSyntheticInputs(voxelsInEachDim, gtab)
outputs = generateSyntheticOutputsFromMultiTensorModel(
voxelsInEachDim, gtab, tensorEigenvalues)
totalNumberOfSamples = np.prod(voxelsInEachDim)*len(bvals)
npt.assert_array_equal(inputs.shape,
(totalNumberOfSamples, 7),
'Input shapes don\'t match')
npt.assert_array_equal(outputs.shape, (totalNumberOfSamples,),
'Output shapes don\'t match')
# add spatial correlations - easiest to mock an exact signal in each
# voxel with known correlation.
def main():
unittest.main()
if __name__ == '__main__':
main()
|
|
4919ef51a5681eb329a4e9fc9a96f1c7706e829a
|
taiga/projects/notifications/migrations/0006_auto_20151103_0954.py
|
taiga/projects/notifications/migrations/0006_auto_20151103_0954.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('notifications', '0005_auto_20151005_1357'),
]
operations = [
migrations.AlterField(
model_name='notifypolicy',
name='notify_level',
field=models.SmallIntegerField(choices=[(<NotifyLevel.involved: 1>, 'Involved'), (<NotifyLevel.all: 2>, 'All'), (<NotifyLevel.none: 3>, 'None')]),
),
]
|
Create missing migration in taiga.projects.notifications
|
[Backport] Create missing migration in taiga.projects.notifications
|
Python
|
agpl-3.0
|
Rademade/taiga-back,dayatz/taiga-back,bdang2012/taiga-back-casting,bdang2012/taiga-back-casting,xdevelsistemas/taiga-back-community,gam-phon/taiga-back,Rademade/taiga-back,gam-phon/taiga-back,taigaio/taiga-back,taigaio/taiga-back,xdevelsistemas/taiga-back-community,Rademade/taiga-back,bdang2012/taiga-back-casting,dayatz/taiga-back,gam-phon/taiga-back,taigaio/taiga-back,xdevelsistemas/taiga-back-community,gam-phon/taiga-back,bdang2012/taiga-back-casting,Rademade/taiga-back,Rademade/taiga-back,dayatz/taiga-back
|
[Backport] Create missing migration in taiga.projects.notifications
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('notifications', '0005_auto_20151005_1357'),
]
operations = [
migrations.AlterField(
model_name='notifypolicy',
name='notify_level',
field=models.SmallIntegerField(choices=[(<NotifyLevel.involved: 1>, 'Involved'), (<NotifyLevel.all: 2>, 'All'), (<NotifyLevel.none: 3>, 'None')]),
),
]
|
<commit_before><commit_msg>[Backport] Create missing migration in taiga.projects.notifications<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('notifications', '0005_auto_20151005_1357'),
]
operations = [
migrations.AlterField(
model_name='notifypolicy',
name='notify_level',
field=models.SmallIntegerField(choices=[(<NotifyLevel.involved: 1>, 'Involved'), (<NotifyLevel.all: 2>, 'All'), (<NotifyLevel.none: 3>, 'None')]),
),
]
|
[Backport] Create missing migration in taiga.projects.notifications# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('notifications', '0005_auto_20151005_1357'),
]
operations = [
migrations.AlterField(
model_name='notifypolicy',
name='notify_level',
field=models.SmallIntegerField(choices=[(<NotifyLevel.involved: 1>, 'Involved'), (<NotifyLevel.all: 2>, 'All'), (<NotifyLevel.none: 3>, 'None')]),
),
]
|
<commit_before><commit_msg>[Backport] Create missing migration in taiga.projects.notifications<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('notifications', '0005_auto_20151005_1357'),
]
operations = [
migrations.AlterField(
model_name='notifypolicy',
name='notify_level',
field=models.SmallIntegerField(choices=[(<NotifyLevel.involved: 1>, 'Involved'), (<NotifyLevel.all: 2>, 'All'), (<NotifyLevel.none: 3>, 'None')]),
),
]
|
|
4c22b9529b9a7ac13c50bbac3ca81e450297b998
|
maintainers/scripts/hydra-eval-failures.py
|
maintainers/scripts/hydra-eval-failures.py
|
#!/usr/bin/env nix-shell
#!nix-shell -i python -p pythonFull pythonPackages.requests pythonPackages.pyquery pythonPackages.click
# To use, just execute this script with --help to display help.
import subprocess
import json
import click
import requests
from pyquery import PyQuery as pq
maintainers_json = subprocess.check_output([
'nix-instantiate',
'lib/maintainers.nix',
'--eval',
'--json'])
maintainers = json.loads(maintainers_json)
MAINTAINERS = {v: k for k, v in maintainers.iteritems()}
def get_response_text(url):
return pq(requests.get(url).text) # IO
EVAL_FILE = {
'nixos': 'nixos/release.nix',
'nixpkgs': 'pkgs/top-level/release.nix',
}
def get_maintainers(attr_name):
nixname = attr_name.split('.')
meta_json = subprocess.check_output([
'nix-instantiate',
'--eval',
'--strict',
'-A',
'.'.join(nixname[1:]) + '.meta',
EVAL_FILE[nixname[0]],
'--json'])
meta = json.loads(meta_json)
if meta.get('maintainers'):
return [MAINTAINERS[name] for name in meta['maintainers'] if MAINTAINERS.get(name)]
@click.command()
@click.option(
'--jobset',
default="nixos/release-16.09",
help='Hydra project like nixos/release-16.09')
def cli(jobset):
"""
Given a Hydra project, inspect latest evaluation
and print a summary of failed builds
"""
url = "http://hydra.nixos.org/jobset/{}".format(jobset)
# get the last evaluation
click.echo(click.style(
'Getting latest evaluation for {}'.format(url), fg='green'))
d = get_response_text(url)
evaluations = d('#tabs-evaluations').find('a[class="row-link"]')
latest_eval_url = evaluations[0].get('href')
# parse last evaluation page
click.echo(click.style(
'Parsing evaluation {}'.format(latest_eval_url), fg='green'))
d = get_response_text(latest_eval_url + '?full=1')
# TODO: aborted evaluations
# TODO: dependency failed without propagated builds
for tr in d('img[alt="Failed"]').parents('tr'):
a = pq(tr)('a')[1]
print "- [ ] [{}]({})".format(a.text, a.get('href'))
maintainers = get_maintainers(a.text)
if maintainers:
print " - maintainers: {}".format(", ".join(map(lambda u: '@' + u, maintainers)))
# TODO: print last three persons that touched this file
# TODO: pinpoint the diff that broke this build, or maybe it's transient or maybe it never worked?
if __name__ == "__main__":
try:
cli()
except:
import pdb;pdb.post_mortem()
|
Add a script to get failures for hydra eval /cc @globin
|
Add a script to get failures for hydra eval /cc @globin
|
Python
|
mit
|
NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs
|
Add a script to get failures for hydra eval /cc @globin
|
#!/usr/bin/env nix-shell
#!nix-shell -i python -p pythonFull pythonPackages.requests pythonPackages.pyquery pythonPackages.click
# To use, just execute this script with --help to display help.
import subprocess
import json
import click
import requests
from pyquery import PyQuery as pq
maintainers_json = subprocess.check_output([
'nix-instantiate',
'lib/maintainers.nix',
'--eval',
'--json'])
maintainers = json.loads(maintainers_json)
MAINTAINERS = {v: k for k, v in maintainers.iteritems()}
def get_response_text(url):
return pq(requests.get(url).text) # IO
EVAL_FILE = {
'nixos': 'nixos/release.nix',
'nixpkgs': 'pkgs/top-level/release.nix',
}
def get_maintainers(attr_name):
nixname = attr_name.split('.')
meta_json = subprocess.check_output([
'nix-instantiate',
'--eval',
'--strict',
'-A',
'.'.join(nixname[1:]) + '.meta',
EVAL_FILE[nixname[0]],
'--json'])
meta = json.loads(meta_json)
if meta.get('maintainers'):
return [MAINTAINERS[name] for name in meta['maintainers'] if MAINTAINERS.get(name)]
@click.command()
@click.option(
'--jobset',
default="nixos/release-16.09",
help='Hydra project like nixos/release-16.09')
def cli(jobset):
"""
Given a Hydra project, inspect latest evaluation
and print a summary of failed builds
"""
url = "http://hydra.nixos.org/jobset/{}".format(jobset)
# get the last evaluation
click.echo(click.style(
'Getting latest evaluation for {}'.format(url), fg='green'))
d = get_response_text(url)
evaluations = d('#tabs-evaluations').find('a[class="row-link"]')
latest_eval_url = evaluations[0].get('href')
# parse last evaluation page
click.echo(click.style(
'Parsing evaluation {}'.format(latest_eval_url), fg='green'))
d = get_response_text(latest_eval_url + '?full=1')
# TODO: aborted evaluations
# TODO: dependency failed without propagated builds
for tr in d('img[alt="Failed"]').parents('tr'):
a = pq(tr)('a')[1]
print "- [ ] [{}]({})".format(a.text, a.get('href'))
maintainers = get_maintainers(a.text)
if maintainers:
print " - maintainers: {}".format(", ".join(map(lambda u: '@' + u, maintainers)))
# TODO: print last three persons that touched this file
# TODO: pinpoint the diff that broke this build, or maybe it's transient or maybe it never worked?
if __name__ == "__main__":
try:
cli()
except:
import pdb;pdb.post_mortem()
|
<commit_before><commit_msg>Add a script to get failures for hydra eval /cc @globin<commit_after>
|
#!/usr/bin/env nix-shell
#!nix-shell -i python -p pythonFull pythonPackages.requests pythonPackages.pyquery pythonPackages.click
# To use, just execute this script with --help to display help.
import subprocess
import json
import click
import requests
from pyquery import PyQuery as pq
maintainers_json = subprocess.check_output([
'nix-instantiate',
'lib/maintainers.nix',
'--eval',
'--json'])
maintainers = json.loads(maintainers_json)
MAINTAINERS = {v: k for k, v in maintainers.iteritems()}
def get_response_text(url):
return pq(requests.get(url).text) # IO
EVAL_FILE = {
'nixos': 'nixos/release.nix',
'nixpkgs': 'pkgs/top-level/release.nix',
}
def get_maintainers(attr_name):
nixname = attr_name.split('.')
meta_json = subprocess.check_output([
'nix-instantiate',
'--eval',
'--strict',
'-A',
'.'.join(nixname[1:]) + '.meta',
EVAL_FILE[nixname[0]],
'--json'])
meta = json.loads(meta_json)
if meta.get('maintainers'):
return [MAINTAINERS[name] for name in meta['maintainers'] if MAINTAINERS.get(name)]
@click.command()
@click.option(
'--jobset',
default="nixos/release-16.09",
help='Hydra project like nixos/release-16.09')
def cli(jobset):
"""
Given a Hydra project, inspect latest evaluation
and print a summary of failed builds
"""
url = "http://hydra.nixos.org/jobset/{}".format(jobset)
# get the last evaluation
click.echo(click.style(
'Getting latest evaluation for {}'.format(url), fg='green'))
d = get_response_text(url)
evaluations = d('#tabs-evaluations').find('a[class="row-link"]')
latest_eval_url = evaluations[0].get('href')
# parse last evaluation page
click.echo(click.style(
'Parsing evaluation {}'.format(latest_eval_url), fg='green'))
d = get_response_text(latest_eval_url + '?full=1')
# TODO: aborted evaluations
# TODO: dependency failed without propagated builds
for tr in d('img[alt="Failed"]').parents('tr'):
a = pq(tr)('a')[1]
print "- [ ] [{}]({})".format(a.text, a.get('href'))
maintainers = get_maintainers(a.text)
if maintainers:
print " - maintainers: {}".format(", ".join(map(lambda u: '@' + u, maintainers)))
# TODO: print last three persons that touched this file
# TODO: pinpoint the diff that broke this build, or maybe it's transient or maybe it never worked?
if __name__ == "__main__":
try:
cli()
except:
import pdb;pdb.post_mortem()
|
Add a script to get failures for hydra eval /cc @globin#!/usr/bin/env nix-shell
#!nix-shell -i python -p pythonFull pythonPackages.requests pythonPackages.pyquery pythonPackages.click
# To use, just execute this script with --help to display help.
import subprocess
import json
import click
import requests
from pyquery import PyQuery as pq
maintainers_json = subprocess.check_output([
'nix-instantiate',
'lib/maintainers.nix',
'--eval',
'--json'])
maintainers = json.loads(maintainers_json)
MAINTAINERS = {v: k for k, v in maintainers.iteritems()}
def get_response_text(url):
return pq(requests.get(url).text) # IO
EVAL_FILE = {
'nixos': 'nixos/release.nix',
'nixpkgs': 'pkgs/top-level/release.nix',
}
def get_maintainers(attr_name):
nixname = attr_name.split('.')
meta_json = subprocess.check_output([
'nix-instantiate',
'--eval',
'--strict',
'-A',
'.'.join(nixname[1:]) + '.meta',
EVAL_FILE[nixname[0]],
'--json'])
meta = json.loads(meta_json)
if meta.get('maintainers'):
return [MAINTAINERS[name] for name in meta['maintainers'] if MAINTAINERS.get(name)]
@click.command()
@click.option(
'--jobset',
default="nixos/release-16.09",
help='Hydra project like nixos/release-16.09')
def cli(jobset):
"""
Given a Hydra project, inspect latest evaluation
and print a summary of failed builds
"""
url = "http://hydra.nixos.org/jobset/{}".format(jobset)
# get the last evaluation
click.echo(click.style(
'Getting latest evaluation for {}'.format(url), fg='green'))
d = get_response_text(url)
evaluations = d('#tabs-evaluations').find('a[class="row-link"]')
latest_eval_url = evaluations[0].get('href')
# parse last evaluation page
click.echo(click.style(
'Parsing evaluation {}'.format(latest_eval_url), fg='green'))
d = get_response_text(latest_eval_url + '?full=1')
# TODO: aborted evaluations
# TODO: dependency failed without propagated builds
for tr in d('img[alt="Failed"]').parents('tr'):
a = pq(tr)('a')[1]
print "- [ ] [{}]({})".format(a.text, a.get('href'))
maintainers = get_maintainers(a.text)
if maintainers:
print " - maintainers: {}".format(", ".join(map(lambda u: '@' + u, maintainers)))
# TODO: print last three persons that touched this file
# TODO: pinpoint the diff that broke this build, or maybe it's transient or maybe it never worked?
if __name__ == "__main__":
try:
cli()
except:
import pdb;pdb.post_mortem()
|
<commit_before><commit_msg>Add a script to get failures for hydra eval /cc @globin<commit_after>#!/usr/bin/env nix-shell
#!nix-shell -i python -p pythonFull pythonPackages.requests pythonPackages.pyquery pythonPackages.click
# To use, just execute this script with --help to display help.
import subprocess
import json
import click
import requests
from pyquery import PyQuery as pq
maintainers_json = subprocess.check_output([
'nix-instantiate',
'lib/maintainers.nix',
'--eval',
'--json'])
maintainers = json.loads(maintainers_json)
MAINTAINERS = {v: k for k, v in maintainers.iteritems()}
def get_response_text(url):
return pq(requests.get(url).text) # IO
EVAL_FILE = {
'nixos': 'nixos/release.nix',
'nixpkgs': 'pkgs/top-level/release.nix',
}
def get_maintainers(attr_name):
nixname = attr_name.split('.')
meta_json = subprocess.check_output([
'nix-instantiate',
'--eval',
'--strict',
'-A',
'.'.join(nixname[1:]) + '.meta',
EVAL_FILE[nixname[0]],
'--json'])
meta = json.loads(meta_json)
if meta.get('maintainers'):
return [MAINTAINERS[name] for name in meta['maintainers'] if MAINTAINERS.get(name)]
@click.command()
@click.option(
'--jobset',
default="nixos/release-16.09",
help='Hydra project like nixos/release-16.09')
def cli(jobset):
"""
Given a Hydra project, inspect latest evaluation
and print a summary of failed builds
"""
url = "http://hydra.nixos.org/jobset/{}".format(jobset)
# get the last evaluation
click.echo(click.style(
'Getting latest evaluation for {}'.format(url), fg='green'))
d = get_response_text(url)
evaluations = d('#tabs-evaluations').find('a[class="row-link"]')
latest_eval_url = evaluations[0].get('href')
# parse last evaluation page
click.echo(click.style(
'Parsing evaluation {}'.format(latest_eval_url), fg='green'))
d = get_response_text(latest_eval_url + '?full=1')
# TODO: aborted evaluations
# TODO: dependency failed without propagated builds
for tr in d('img[alt="Failed"]').parents('tr'):
a = pq(tr)('a')[1]
print "- [ ] [{}]({})".format(a.text, a.get('href'))
maintainers = get_maintainers(a.text)
if maintainers:
print " - maintainers: {}".format(", ".join(map(lambda u: '@' + u, maintainers)))
# TODO: print last three persons that touched this file
# TODO: pinpoint the diff that broke this build, or maybe it's transient or maybe it never worked?
if __name__ == "__main__":
try:
cli()
except:
import pdb;pdb.post_mortem()
|
|
af994bff6378c045aed40573c7a6ccaa2bb84560
|
tests/writer/test_file.py
|
tests/writer/test_file.py
|
import unittest, argparse, os, tempfile
from echolalia.writer.file import Writer
class FileTestCase(unittest.TestCase):
def setUp(self):
self.parser = argparse.ArgumentParser()
self.data = [{chr(i): i - 96} for i in xrange(97, 123)]
self.writer = Writer()
def test_add_args(self):
new_parser = self.writer.add_args(self.parser)
self.assertEqual(new_parser, self.parser)
args = new_parser.parse_args(['-o', 'output.json'])
self.assertEqual(args.output, 'output.json')
with self.assertRaises(SystemExit):
new_parser.parse_args([])
def test_write(self):
temp = tempfile.NamedTemporaryFile(delete=False)
try:
args = self.parser.parse_args([])
args.output = temp.name
self.writer.write(args, self.data)
with open(temp.name, 'r') as f:
self.assertEqual(f.read(), "{}\n".format(self.data))
finally:
os.unlink(temp.name)
|
Add tests for writer file
|
Add tests for writer file
|
Python
|
mit
|
eiri/echolalia-prototype
|
Add tests for writer file
|
import unittest, argparse, os, tempfile
from echolalia.writer.file import Writer
class FileTestCase(unittest.TestCase):
def setUp(self):
self.parser = argparse.ArgumentParser()
self.data = [{chr(i): i - 96} for i in xrange(97, 123)]
self.writer = Writer()
def test_add_args(self):
new_parser = self.writer.add_args(self.parser)
self.assertEqual(new_parser, self.parser)
args = new_parser.parse_args(['-o', 'output.json'])
self.assertEqual(args.output, 'output.json')
with self.assertRaises(SystemExit):
new_parser.parse_args([])
def test_write(self):
temp = tempfile.NamedTemporaryFile(delete=False)
try:
args = self.parser.parse_args([])
args.output = temp.name
self.writer.write(args, self.data)
with open(temp.name, 'r') as f:
self.assertEqual(f.read(), "{}\n".format(self.data))
finally:
os.unlink(temp.name)
|
<commit_before><commit_msg>Add tests for writer file<commit_after>
|
import unittest, argparse, os, tempfile
from echolalia.writer.file import Writer
class FileTestCase(unittest.TestCase):
def setUp(self):
self.parser = argparse.ArgumentParser()
self.data = [{chr(i): i - 96} for i in xrange(97, 123)]
self.writer = Writer()
def test_add_args(self):
new_parser = self.writer.add_args(self.parser)
self.assertEqual(new_parser, self.parser)
args = new_parser.parse_args(['-o', 'output.json'])
self.assertEqual(args.output, 'output.json')
with self.assertRaises(SystemExit):
new_parser.parse_args([])
def test_write(self):
temp = tempfile.NamedTemporaryFile(delete=False)
try:
args = self.parser.parse_args([])
args.output = temp.name
self.writer.write(args, self.data)
with open(temp.name, 'r') as f:
self.assertEqual(f.read(), "{}\n".format(self.data))
finally:
os.unlink(temp.name)
|
Add tests for writer fileimport unittest, argparse, os, tempfile
from echolalia.writer.file import Writer
class FileTestCase(unittest.TestCase):
def setUp(self):
self.parser = argparse.ArgumentParser()
self.data = [{chr(i): i - 96} for i in xrange(97, 123)]
self.writer = Writer()
def test_add_args(self):
new_parser = self.writer.add_args(self.parser)
self.assertEqual(new_parser, self.parser)
args = new_parser.parse_args(['-o', 'output.json'])
self.assertEqual(args.output, 'output.json')
with self.assertRaises(SystemExit):
new_parser.parse_args([])
def test_write(self):
temp = tempfile.NamedTemporaryFile(delete=False)
try:
args = self.parser.parse_args([])
args.output = temp.name
self.writer.write(args, self.data)
with open(temp.name, 'r') as f:
self.assertEqual(f.read(), "{}\n".format(self.data))
finally:
os.unlink(temp.name)
|
<commit_before><commit_msg>Add tests for writer file<commit_after>import unittest, argparse, os, tempfile
from echolalia.writer.file import Writer
class FileTestCase(unittest.TestCase):
def setUp(self):
self.parser = argparse.ArgumentParser()
self.data = [{chr(i): i - 96} for i in xrange(97, 123)]
self.writer = Writer()
def test_add_args(self):
new_parser = self.writer.add_args(self.parser)
self.assertEqual(new_parser, self.parser)
args = new_parser.parse_args(['-o', 'output.json'])
self.assertEqual(args.output, 'output.json')
with self.assertRaises(SystemExit):
new_parser.parse_args([])
def test_write(self):
temp = tempfile.NamedTemporaryFile(delete=False)
try:
args = self.parser.parse_args([])
args.output = temp.name
self.writer.write(args, self.data)
with open(temp.name, 'r') as f:
self.assertEqual(f.read(), "{}\n".format(self.data))
finally:
os.unlink(temp.name)
|
|
f49283377c7c1908afe67439a637c7414ce401f1
|
tests/query_test/test_chars.py
|
tests/query_test/test_chars.py
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'] and
v.get_value('table_format').compression_codec in ['none'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
|
Fix char test to only run on test/none.
|
Fix char test to only run on test/none.
Change-Id: I8f5ac5a6e7399ce2fdbe78d07ae24deaa1d7532d
Reviewed-on: http://gerrit.sjc.cloudera.com:8080/4326
Tested-by: jenkins
Reviewed-by: Alex Behm <fe1626037acfc2dc542d2aa723a6d14f2464a20c@cloudera.com>
|
Python
|
apache-2.0
|
theyaa/Impala,caseyching/Impala,theyaa/Impala,XiaominZhang/Impala,brightchen/Impala,bratatidas9/Impala-1,XiaominZhang/Impala,tempbottle/Impala,brightchen/Impala,bratatidas9/Impala-1,cchanning/Impala,brightchen/Impala,cloudera/recordservice,bratatidas9/Impala-1,brightchen/Impala,kapilrastogi/Impala,tempbottle/Impala,lirui-intel/Impala,theyaa/Impala,kapilrastogi/Impala,henryr/Impala,placrosse/ImpalaToGo,grundprinzip/Impala,placrosse/ImpalaToGo,scalingdata/Impala,lirui-intel/Impala,theyaa/Impala,cchanning/Impala,henryr/Impala,ImpalaToGo/ImpalaToGo,bowlofstew/Impala,lirui-intel/Impala,lnliuxing/Impala,bowlofstew/Impala,cchanning/Impala,bratatidas9/Impala-1,tempbottle/Impala,lnliuxing/Impala,cloudera/recordservice,cloudera/recordservice,cloudera/recordservice,cchanning/Impala,tempbottle/Impala,cchanning/Impala,brightchen/Impala,cgvarela/Impala,lnliuxing/Impala,caseyching/Impala,kapilrastogi/Impala,rdblue/Impala,ImpalaToGo/ImpalaToGo,gerashegalov/Impala,rdblue/Impala,brightchen/Impala,bratatidas9/Impala-1,henryr/Impala,gerashegalov/Impala,caseyching/Impala,gerashegalov/Impala,ImpalaToGo/ImpalaToGo,cgvarela/Impala,bowlofstew/Impala,kapilrastogi/Impala,bratatidas9/Impala-1,cloudera/recordservice,placrosse/ImpalaToGo,rdblue/Impala,rdblue/Impala,XiaominZhang/Impala,bowlofstew/Impala,XiaominZhang/Impala,ImpalaToGo/ImpalaToGo,kapilrastogi/Impala,tempbottle/Impala,gerashegalov/Impala,ImpalaToGo/ImpalaToGo,XiaominZhang/Impala,tempbottle/Impala,brightchen/Impala,rdblue/Impala,cloudera/recordservice,caseyching/Impala,rdblue/Impala,cgvarela/Impala,lirui-intel/Impala,placrosse/ImpalaToGo,XiaominZhang/Impala,cchanning/Impala,lirui-intel/Impala,theyaa/Impala,bratatidas9/Impala-1,cloudera/recordservice,grundprinzip/Impala,lirui-intel/Impala,scalingdata/Impala,caseyching/Impala,grundprinzip/Impala,caseyching/Impala,XiaominZhang/Impala,kapilrastogi/Impala,gerashegalov/Impala,bowlofstew/Impala,placrosse/ImpalaToGo,lnliuxing/Impala,lirui-intel/Impala,scalingdata/Impala,caseyching/Impala,grundprinzip/Impala,henryr/Impala,scalingdata/Impala,gerashegalov/Impala,rdblue/Impala,scalingdata/Impala,cgvarela/Impala,lnliuxing/Impala,gerashegalov/Impala,ImpalaToGo/ImpalaToGo,lnliuxing/Impala,tempbottle/Impala,kapilrastogi/Impala,theyaa/Impala,theyaa/Impala,henryr/Impala,cgvarela/Impala,grundprinzip/Impala,lnliuxing/Impala,bowlofstew/Impala,cgvarela/Impala,cchanning/Impala,placrosse/ImpalaToGo,henryr/Impala,cgvarela/Impala,bowlofstew/Impala,scalingdata/Impala,grundprinzip/Impala
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
Fix char test to only run on test/none.
Change-Id: I8f5ac5a6e7399ce2fdbe78d07ae24deaa1d7532d
Reviewed-on: http://gerrit.sjc.cloudera.com:8080/4326
Tested-by: jenkins
Reviewed-by: Alex Behm <fe1626037acfc2dc542d2aa723a6d14f2464a20c@cloudera.com>
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'] and
v.get_value('table_format').compression_codec in ['none'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
<commit_msg>Fix char test to only run on test/none.
Change-Id: I8f5ac5a6e7399ce2fdbe78d07ae24deaa1d7532d
Reviewed-on: http://gerrit.sjc.cloudera.com:8080/4326
Tested-by: jenkins
Reviewed-by: Alex Behm <fe1626037acfc2dc542d2aa723a6d14f2464a20c@cloudera.com><commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'] and
v.get_value('table_format').compression_codec in ['none'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
Fix char test to only run on test/none.
Change-Id: I8f5ac5a6e7399ce2fdbe78d07ae24deaa1d7532d
Reviewed-on: http://gerrit.sjc.cloudera.com:8080/4326
Tested-by: jenkins
Reviewed-by: Alex Behm <fe1626037acfc2dc542d2aa723a6d14f2464a20c@cloudera.com>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'] and
v.get_value('table_format').compression_codec in ['none'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
<commit_msg>Fix char test to only run on test/none.
Change-Id: I8f5ac5a6e7399ce2fdbe78d07ae24deaa1d7532d
Reviewed-on: http://gerrit.sjc.cloudera.com:8080/4326
Tested-by: jenkins
Reviewed-by: Alex Behm <fe1626037acfc2dc542d2aa723a6d14f2464a20c@cloudera.com><commit_after>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'] and
v.get_value('table_format').compression_codec in ['none'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
|
b6803d63107f74641f01930716f2c52d326d59d6
|
pycalphad/tests/test_plot.py
|
pycalphad/tests/test_plot.py
|
"""
The plot test module verifies that the eqplot produces plots without error.
"""
from pycalphad import Database, eqplot, equilibrium
import pycalphad.variables as v
from pycalphad.tests.datasets import *
from matplotlib.axes import Axes
ALFE_DBF = Database(ALFE_TDB)
ALCOCRNI_DBF = Database(ALCOCRNI_TDB)
def test_eqplot_binary():
"""
eqplot should return an axes object when one independent component and one
independent potential are passed.
"""
my_phases = ['LIQUID', 'FCC_A1', 'HCP_A3', 'AL5FE2',
'AL2FE', 'AL13FE4', 'AL5FE4']
comps = ['AL', 'FE', 'VA']
conds = {v.T: (1400, 1500, 50), v.P: 101325, v.X('AL'): (0, 1, 0.5)}
eq = equilibrium(ALFE_DBF, comps, my_phases, conds)
ax = eqplot(eq)
assert isinstance(ax, Axes)
def test_eqplot_ternary():
"""
eqplot should return an axes object that has a traingular projection when
two independent components and one independent potential are passed.
"""
eq = equilibrium(ALCOCRNI_DBF, ['AL', 'CO', 'CR', 'VA'], ['LIQUID'],
{v.T: 2500, v.X('AL'): (0,0.5,0.33), v.X('CO'): (0,0.5,0.3), v.P: 101325})
ax = eqplot(eq)
assert isinstance(ax, Axes)
assert ax.name == 'triangular'
|
Add integration tests for eqplot
|
TST: Add integration tests for eqplot
|
Python
|
mit
|
tkphd/pycalphad,tkphd/pycalphad,tkphd/pycalphad
|
TST: Add integration tests for eqplot
|
"""
The plot test module verifies that the eqplot produces plots without error.
"""
from pycalphad import Database, eqplot, equilibrium
import pycalphad.variables as v
from pycalphad.tests.datasets import *
from matplotlib.axes import Axes
ALFE_DBF = Database(ALFE_TDB)
ALCOCRNI_DBF = Database(ALCOCRNI_TDB)
def test_eqplot_binary():
"""
eqplot should return an axes object when one independent component and one
independent potential are passed.
"""
my_phases = ['LIQUID', 'FCC_A1', 'HCP_A3', 'AL5FE2',
'AL2FE', 'AL13FE4', 'AL5FE4']
comps = ['AL', 'FE', 'VA']
conds = {v.T: (1400, 1500, 50), v.P: 101325, v.X('AL'): (0, 1, 0.5)}
eq = equilibrium(ALFE_DBF, comps, my_phases, conds)
ax = eqplot(eq)
assert isinstance(ax, Axes)
def test_eqplot_ternary():
"""
eqplot should return an axes object that has a traingular projection when
two independent components and one independent potential are passed.
"""
eq = equilibrium(ALCOCRNI_DBF, ['AL', 'CO', 'CR', 'VA'], ['LIQUID'],
{v.T: 2500, v.X('AL'): (0,0.5,0.33), v.X('CO'): (0,0.5,0.3), v.P: 101325})
ax = eqplot(eq)
assert isinstance(ax, Axes)
assert ax.name == 'triangular'
|
<commit_before><commit_msg>TST: Add integration tests for eqplot<commit_after>
|
"""
The plot test module verifies that the eqplot produces plots without error.
"""
from pycalphad import Database, eqplot, equilibrium
import pycalphad.variables as v
from pycalphad.tests.datasets import *
from matplotlib.axes import Axes
ALFE_DBF = Database(ALFE_TDB)
ALCOCRNI_DBF = Database(ALCOCRNI_TDB)
def test_eqplot_binary():
"""
eqplot should return an axes object when one independent component and one
independent potential are passed.
"""
my_phases = ['LIQUID', 'FCC_A1', 'HCP_A3', 'AL5FE2',
'AL2FE', 'AL13FE4', 'AL5FE4']
comps = ['AL', 'FE', 'VA']
conds = {v.T: (1400, 1500, 50), v.P: 101325, v.X('AL'): (0, 1, 0.5)}
eq = equilibrium(ALFE_DBF, comps, my_phases, conds)
ax = eqplot(eq)
assert isinstance(ax, Axes)
def test_eqplot_ternary():
"""
eqplot should return an axes object that has a traingular projection when
two independent components and one independent potential are passed.
"""
eq = equilibrium(ALCOCRNI_DBF, ['AL', 'CO', 'CR', 'VA'], ['LIQUID'],
{v.T: 2500, v.X('AL'): (0,0.5,0.33), v.X('CO'): (0,0.5,0.3), v.P: 101325})
ax = eqplot(eq)
assert isinstance(ax, Axes)
assert ax.name == 'triangular'
|
TST: Add integration tests for eqplot"""
The plot test module verifies that the eqplot produces plots without error.
"""
from pycalphad import Database, eqplot, equilibrium
import pycalphad.variables as v
from pycalphad.tests.datasets import *
from matplotlib.axes import Axes
ALFE_DBF = Database(ALFE_TDB)
ALCOCRNI_DBF = Database(ALCOCRNI_TDB)
def test_eqplot_binary():
"""
eqplot should return an axes object when one independent component and one
independent potential are passed.
"""
my_phases = ['LIQUID', 'FCC_A1', 'HCP_A3', 'AL5FE2',
'AL2FE', 'AL13FE4', 'AL5FE4']
comps = ['AL', 'FE', 'VA']
conds = {v.T: (1400, 1500, 50), v.P: 101325, v.X('AL'): (0, 1, 0.5)}
eq = equilibrium(ALFE_DBF, comps, my_phases, conds)
ax = eqplot(eq)
assert isinstance(ax, Axes)
def test_eqplot_ternary():
"""
eqplot should return an axes object that has a traingular projection when
two independent components and one independent potential are passed.
"""
eq = equilibrium(ALCOCRNI_DBF, ['AL', 'CO', 'CR', 'VA'], ['LIQUID'],
{v.T: 2500, v.X('AL'): (0,0.5,0.33), v.X('CO'): (0,0.5,0.3), v.P: 101325})
ax = eqplot(eq)
assert isinstance(ax, Axes)
assert ax.name == 'triangular'
|
<commit_before><commit_msg>TST: Add integration tests for eqplot<commit_after>"""
The plot test module verifies that the eqplot produces plots without error.
"""
from pycalphad import Database, eqplot, equilibrium
import pycalphad.variables as v
from pycalphad.tests.datasets import *
from matplotlib.axes import Axes
ALFE_DBF = Database(ALFE_TDB)
ALCOCRNI_DBF = Database(ALCOCRNI_TDB)
def test_eqplot_binary():
"""
eqplot should return an axes object when one independent component and one
independent potential are passed.
"""
my_phases = ['LIQUID', 'FCC_A1', 'HCP_A3', 'AL5FE2',
'AL2FE', 'AL13FE4', 'AL5FE4']
comps = ['AL', 'FE', 'VA']
conds = {v.T: (1400, 1500, 50), v.P: 101325, v.X('AL'): (0, 1, 0.5)}
eq = equilibrium(ALFE_DBF, comps, my_phases, conds)
ax = eqplot(eq)
assert isinstance(ax, Axes)
def test_eqplot_ternary():
"""
eqplot should return an axes object that has a traingular projection when
two independent components and one independent potential are passed.
"""
eq = equilibrium(ALCOCRNI_DBF, ['AL', 'CO', 'CR', 'VA'], ['LIQUID'],
{v.T: 2500, v.X('AL'): (0,0.5,0.33), v.X('CO'): (0,0.5,0.3), v.P: 101325})
ax = eqplot(eq)
assert isinstance(ax, Axes)
assert ax.name == 'triangular'
|
|
c674a32f0138447923a548c0d21e0b4c7b031145
|
mrbelvedereci/plan/migrations/0005_plan_junit_path.py
|
mrbelvedereci/plan/migrations/0005_plan_junit_path.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-05-30 22:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('plan', '0004_remove_plan_devhub'),
]
operations = [
migrations.AddField(
model_name='plan',
name='junit_path',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
|
Add migration for junit_path field
|
Add migration for junit_path field
|
Python
|
bsd-3-clause
|
SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci
|
Add migration for junit_path field
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-05-30 22:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('plan', '0004_remove_plan_devhub'),
]
operations = [
migrations.AddField(
model_name='plan',
name='junit_path',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
|
<commit_before><commit_msg>Add migration for junit_path field<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-05-30 22:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('plan', '0004_remove_plan_devhub'),
]
operations = [
migrations.AddField(
model_name='plan',
name='junit_path',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
|
Add migration for junit_path field# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-05-30 22:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('plan', '0004_remove_plan_devhub'),
]
operations = [
migrations.AddField(
model_name='plan',
name='junit_path',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
|
<commit_before><commit_msg>Add migration for junit_path field<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-05-30 22:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('plan', '0004_remove_plan_devhub'),
]
operations = [
migrations.AddField(
model_name='plan',
name='junit_path',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
|
|
ebbba9cfe923b86da28b9801515406efa1bdba4e
|
recipe_modules/raw_io/test_api.py
|
recipe_modules/raw_io/test_api.py
|
from slave import recipe_test_api
class RawIOTestApi(recipe_test_api.RecipeTestApi): # pragma: no cover
@recipe_test_api.placeholder_step_data
@staticmethod
def output(data, retcode=None):
return data, retcode
|
Add the API available inside GenTests method for the raw_io module.
|
Add the API available inside GenTests method for the raw_io module.
This CL makes the raw_io module ready to have its output mocked
in GenTests.
R=agable@chromium.org
Review URL: https://codereview.chromium.org/160143003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@250773 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
shishkander/recipes-py,shishkander/recipes-py,luci/recipes-py,luci/recipes-py
|
Add the API available inside GenTests method for the raw_io module.
This CL makes the raw_io module ready to have its output mocked
in GenTests.
R=agable@chromium.org
Review URL: https://codereview.chromium.org/160143003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@250773 0039d316-1c4b-4281-b951-d872f2087c98
|
from slave import recipe_test_api
class RawIOTestApi(recipe_test_api.RecipeTestApi): # pragma: no cover
@recipe_test_api.placeholder_step_data
@staticmethod
def output(data, retcode=None):
return data, retcode
|
<commit_before><commit_msg>Add the API available inside GenTests method for the raw_io module.
This CL makes the raw_io module ready to have its output mocked
in GenTests.
R=agable@chromium.org
Review URL: https://codereview.chromium.org/160143003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@250773 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
from slave import recipe_test_api
class RawIOTestApi(recipe_test_api.RecipeTestApi): # pragma: no cover
@recipe_test_api.placeholder_step_data
@staticmethod
def output(data, retcode=None):
return data, retcode
|
Add the API available inside GenTests method for the raw_io module.
This CL makes the raw_io module ready to have its output mocked
in GenTests.
R=agable@chromium.org
Review URL: https://codereview.chromium.org/160143003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@250773 0039d316-1c4b-4281-b951-d872f2087c98from slave import recipe_test_api
class RawIOTestApi(recipe_test_api.RecipeTestApi): # pragma: no cover
@recipe_test_api.placeholder_step_data
@staticmethod
def output(data, retcode=None):
return data, retcode
|
<commit_before><commit_msg>Add the API available inside GenTests method for the raw_io module.
This CL makes the raw_io module ready to have its output mocked
in GenTests.
R=agable@chromium.org
Review URL: https://codereview.chromium.org/160143003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@250773 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>from slave import recipe_test_api
class RawIOTestApi(recipe_test_api.RecipeTestApi): # pragma: no cover
@recipe_test_api.placeholder_step_data
@staticmethod
def output(data, retcode=None):
return data, retcode
|
|
71fcb6294e1a5e30c88fcf2045b4406764ebd803
|
IPython/frontend/qt/console/tests/test_console_widget.py
|
IPython/frontend/qt/console/tests/test_console_widget.py
|
# Standard library imports
import unittest
# System library imports
from IPython.external.qt import QtGui
# Local imports
from IPython.frontend.qt.console.console_widget import ConsoleWidget
class TestConsoleWidget(unittest.TestCase):
@classmethod
def setUpClass(cls):
""" Create the application for the test case.
"""
cls._app = QtGui.QApplication([])
cls._app.setQuitOnLastWindowClosed(False)
@classmethod
def tearDownClass(cls):
""" Exit the application.
"""
QtGui.QApplication.quit()
def test_special_characters(self):
""" Are special characters displayed correctly?
"""
w = ConsoleWidget()
cursor = w._get_prompt_cursor()
test_inputs = ['xyz\b\b=\n', 'foo\b\nbar\n', 'foo\b\nbar\r\n', 'abc\rxyz\b\b=']
expected_outputs = [u'x=z\u2029', u'foo\u2029bar\u2029', u'foo\u2029bar\u2029', 'x=z']
for i, text in enumerate(test_inputs):
w._insert_plain_text(cursor, text)
cursor.select(cursor.Document)
selection = cursor.selectedText()
self.assertEquals(expected_outputs[i], selection)
# clear all the text
cursor.insertText('')
|
Add tests to check the actual output on the console.
|
TST: Add tests to check the actual output on the console.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
TST: Add tests to check the actual output on the console.
|
# Standard library imports
import unittest
# System library imports
from IPython.external.qt import QtGui
# Local imports
from IPython.frontend.qt.console.console_widget import ConsoleWidget
class TestConsoleWidget(unittest.TestCase):
@classmethod
def setUpClass(cls):
""" Create the application for the test case.
"""
cls._app = QtGui.QApplication([])
cls._app.setQuitOnLastWindowClosed(False)
@classmethod
def tearDownClass(cls):
""" Exit the application.
"""
QtGui.QApplication.quit()
def test_special_characters(self):
""" Are special characters displayed correctly?
"""
w = ConsoleWidget()
cursor = w._get_prompt_cursor()
test_inputs = ['xyz\b\b=\n', 'foo\b\nbar\n', 'foo\b\nbar\r\n', 'abc\rxyz\b\b=']
expected_outputs = [u'x=z\u2029', u'foo\u2029bar\u2029', u'foo\u2029bar\u2029', 'x=z']
for i, text in enumerate(test_inputs):
w._insert_plain_text(cursor, text)
cursor.select(cursor.Document)
selection = cursor.selectedText()
self.assertEquals(expected_outputs[i], selection)
# clear all the text
cursor.insertText('')
|
<commit_before><commit_msg>TST: Add tests to check the actual output on the console.<commit_after>
|
# Standard library imports
import unittest
# System library imports
from IPython.external.qt import QtGui
# Local imports
from IPython.frontend.qt.console.console_widget import ConsoleWidget
class TestConsoleWidget(unittest.TestCase):
@classmethod
def setUpClass(cls):
""" Create the application for the test case.
"""
cls._app = QtGui.QApplication([])
cls._app.setQuitOnLastWindowClosed(False)
@classmethod
def tearDownClass(cls):
""" Exit the application.
"""
QtGui.QApplication.quit()
def test_special_characters(self):
""" Are special characters displayed correctly?
"""
w = ConsoleWidget()
cursor = w._get_prompt_cursor()
test_inputs = ['xyz\b\b=\n', 'foo\b\nbar\n', 'foo\b\nbar\r\n', 'abc\rxyz\b\b=']
expected_outputs = [u'x=z\u2029', u'foo\u2029bar\u2029', u'foo\u2029bar\u2029', 'x=z']
for i, text in enumerate(test_inputs):
w._insert_plain_text(cursor, text)
cursor.select(cursor.Document)
selection = cursor.selectedText()
self.assertEquals(expected_outputs[i], selection)
# clear all the text
cursor.insertText('')
|
TST: Add tests to check the actual output on the console.# Standard library imports
import unittest
# System library imports
from IPython.external.qt import QtGui
# Local imports
from IPython.frontend.qt.console.console_widget import ConsoleWidget
class TestConsoleWidget(unittest.TestCase):
@classmethod
def setUpClass(cls):
""" Create the application for the test case.
"""
cls._app = QtGui.QApplication([])
cls._app.setQuitOnLastWindowClosed(False)
@classmethod
def tearDownClass(cls):
""" Exit the application.
"""
QtGui.QApplication.quit()
def test_special_characters(self):
""" Are special characters displayed correctly?
"""
w = ConsoleWidget()
cursor = w._get_prompt_cursor()
test_inputs = ['xyz\b\b=\n', 'foo\b\nbar\n', 'foo\b\nbar\r\n', 'abc\rxyz\b\b=']
expected_outputs = [u'x=z\u2029', u'foo\u2029bar\u2029', u'foo\u2029bar\u2029', 'x=z']
for i, text in enumerate(test_inputs):
w._insert_plain_text(cursor, text)
cursor.select(cursor.Document)
selection = cursor.selectedText()
self.assertEquals(expected_outputs[i], selection)
# clear all the text
cursor.insertText('')
|
<commit_before><commit_msg>TST: Add tests to check the actual output on the console.<commit_after># Standard library imports
import unittest
# System library imports
from IPython.external.qt import QtGui
# Local imports
from IPython.frontend.qt.console.console_widget import ConsoleWidget
class TestConsoleWidget(unittest.TestCase):
@classmethod
def setUpClass(cls):
""" Create the application for the test case.
"""
cls._app = QtGui.QApplication([])
cls._app.setQuitOnLastWindowClosed(False)
@classmethod
def tearDownClass(cls):
""" Exit the application.
"""
QtGui.QApplication.quit()
def test_special_characters(self):
""" Are special characters displayed correctly?
"""
w = ConsoleWidget()
cursor = w._get_prompt_cursor()
test_inputs = ['xyz\b\b=\n', 'foo\b\nbar\n', 'foo\b\nbar\r\n', 'abc\rxyz\b\b=']
expected_outputs = [u'x=z\u2029', u'foo\u2029bar\u2029', u'foo\u2029bar\u2029', 'x=z']
for i, text in enumerate(test_inputs):
w._insert_plain_text(cursor, text)
cursor.select(cursor.Document)
selection = cursor.selectedText()
self.assertEquals(expected_outputs[i], selection)
# clear all the text
cursor.insertText('')
|
|
30b8e75af44315576ff1ab010bee6668b8f90782
|
add_artists.py
|
add_artists.py
|
#!/Users/julian/.local/share/virtualenvs/great/bin/pypy
"""
* group by artist
* canonical case
* find existing artist
"""
import csv
import subprocess
import sys
from great.models import music
from great.web import engine_from_config
from sqlalchemy import sql
e = engine_from_config()
def copy(text):
subprocess.Popen(["pbcopy"], stdin=subprocess.PIPE).communicate(
text.encode("utf-8"),
)
def canonicalize(artist):
stripped = artist.strip()
for each in "and", "for", "in", "of", "the":
stripped = stripped.replace(" " + each.title() + " ", " " + each + " ")
return stripped
def exists(artist):
return e.execute(
sql.exists(
sql.select([music.artists]).where(music.artists.c.name == artist),
).select(),
).scalar()
with open("/dev/tty") as tty:
for line in sys.stdin:
artist = canonicalize(line[:-1].decode("utf-8"))
if not exists(artist):
copy(artist)
print repr(artist)
add = tty.readline().strip().decode("utf-8") or artist
print "Adding:", repr(add)
e.execute(music.artists.insert().values(name=add))
|
Add a temporary semi-interactive way to add artists.
|
Add a temporary semi-interactive way to add artists.
|
Python
|
mit
|
Julian/Great,Julian/Great,Julian/Great
|
Add a temporary semi-interactive way to add artists.
|
#!/Users/julian/.local/share/virtualenvs/great/bin/pypy
"""
* group by artist
* canonical case
* find existing artist
"""
import csv
import subprocess
import sys
from great.models import music
from great.web import engine_from_config
from sqlalchemy import sql
e = engine_from_config()
def copy(text):
subprocess.Popen(["pbcopy"], stdin=subprocess.PIPE).communicate(
text.encode("utf-8"),
)
def canonicalize(artist):
stripped = artist.strip()
for each in "and", "for", "in", "of", "the":
stripped = stripped.replace(" " + each.title() + " ", " " + each + " ")
return stripped
def exists(artist):
return e.execute(
sql.exists(
sql.select([music.artists]).where(music.artists.c.name == artist),
).select(),
).scalar()
with open("/dev/tty") as tty:
for line in sys.stdin:
artist = canonicalize(line[:-1].decode("utf-8"))
if not exists(artist):
copy(artist)
print repr(artist)
add = tty.readline().strip().decode("utf-8") or artist
print "Adding:", repr(add)
e.execute(music.artists.insert().values(name=add))
|
<commit_before><commit_msg>Add a temporary semi-interactive way to add artists.<commit_after>
|
#!/Users/julian/.local/share/virtualenvs/great/bin/pypy
"""
* group by artist
* canonical case
* find existing artist
"""
import csv
import subprocess
import sys
from great.models import music
from great.web import engine_from_config
from sqlalchemy import sql
e = engine_from_config()
def copy(text):
subprocess.Popen(["pbcopy"], stdin=subprocess.PIPE).communicate(
text.encode("utf-8"),
)
def canonicalize(artist):
stripped = artist.strip()
for each in "and", "for", "in", "of", "the":
stripped = stripped.replace(" " + each.title() + " ", " " + each + " ")
return stripped
def exists(artist):
return e.execute(
sql.exists(
sql.select([music.artists]).where(music.artists.c.name == artist),
).select(),
).scalar()
with open("/dev/tty") as tty:
for line in sys.stdin:
artist = canonicalize(line[:-1].decode("utf-8"))
if not exists(artist):
copy(artist)
print repr(artist)
add = tty.readline().strip().decode("utf-8") or artist
print "Adding:", repr(add)
e.execute(music.artists.insert().values(name=add))
|
Add a temporary semi-interactive way to add artists.#!/Users/julian/.local/share/virtualenvs/great/bin/pypy
"""
* group by artist
* canonical case
* find existing artist
"""
import csv
import subprocess
import sys
from great.models import music
from great.web import engine_from_config
from sqlalchemy import sql
e = engine_from_config()
def copy(text):
subprocess.Popen(["pbcopy"], stdin=subprocess.PIPE).communicate(
text.encode("utf-8"),
)
def canonicalize(artist):
stripped = artist.strip()
for each in "and", "for", "in", "of", "the":
stripped = stripped.replace(" " + each.title() + " ", " " + each + " ")
return stripped
def exists(artist):
return e.execute(
sql.exists(
sql.select([music.artists]).where(music.artists.c.name == artist),
).select(),
).scalar()
with open("/dev/tty") as tty:
for line in sys.stdin:
artist = canonicalize(line[:-1].decode("utf-8"))
if not exists(artist):
copy(artist)
print repr(artist)
add = tty.readline().strip().decode("utf-8") or artist
print "Adding:", repr(add)
e.execute(music.artists.insert().values(name=add))
|
<commit_before><commit_msg>Add a temporary semi-interactive way to add artists.<commit_after>#!/Users/julian/.local/share/virtualenvs/great/bin/pypy
"""
* group by artist
* canonical case
* find existing artist
"""
import csv
import subprocess
import sys
from great.models import music
from great.web import engine_from_config
from sqlalchemy import sql
e = engine_from_config()
def copy(text):
subprocess.Popen(["pbcopy"], stdin=subprocess.PIPE).communicate(
text.encode("utf-8"),
)
def canonicalize(artist):
stripped = artist.strip()
for each in "and", "for", "in", "of", "the":
stripped = stripped.replace(" " + each.title() + " ", " " + each + " ")
return stripped
def exists(artist):
return e.execute(
sql.exists(
sql.select([music.artists]).where(music.artists.c.name == artist),
).select(),
).scalar()
with open("/dev/tty") as tty:
for line in sys.stdin:
artist = canonicalize(line[:-1].decode("utf-8"))
if not exists(artist):
copy(artist)
print repr(artist)
add = tty.readline().strip().decode("utf-8") or artist
print "Adding:", repr(add)
e.execute(music.artists.insert().values(name=add))
|
|
2b83c83ceb9215bda88872a5a39f8b6006788ca5
|
count_labelsets.py
|
count_labelsets.py
|
"""Create multilabel data set with normalized spelling.
The input consists of a directory of text files containing the dataset in
historic spelling.
The data set consists of:
<sentence id>\t<sentence>\tEmotie_Liefde (embodied emotions labels separated by
_)
<sentence id>\t<sentence>\tNone ('None' if no words were tagged)
Usage: python normalize_dataset.py <input dir> <output dir>
"""
import argparse
import codecs
import os
from collections import Counter
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('input_dir', help='the name of the directory '
'containing text files that should be normalized.')
args = parser.parse_args()
input_dir = args.input_dir
labelsets = Counter()
len_labelset = Counter()
print 'texts'
text_files = [fi for fi in os.listdir(input_dir) if fi.endswith('.txt')]
for text_file in text_files:
print text_file
in_file = os.path.join(input_dir, text_file)
with codecs.open(in_file, 'rb', 'utf-8') as f:
lines = f.readlines()
for line in lines:
parts = line.split('\t')
labels = parts[2].strip()
labelsets[labels] += 1
# count labelset lengths
if labels != 'None':
length = len(labels.split('_'))
len_labelset[str(length).zfill(3)] += 1
print '\n# different labelsets\t{}'.format(str(len(labelsets)))
# print lengths
print '\n# labels\tfrequency'
for le, freq in len_labelset.most_common():
print '{}\t{}'.format(le, freq)
# print labelsets
print '\nLabelset\tfrequency'
for ls, freq in labelsets.most_common():
print '{}\t{}'.format(ls, freq)
|
Add script to count label set statistics
|
Add script to count label set statistics
Added a script that outputs statistics about label sets in the data.
|
Python
|
apache-2.0
|
NLeSC/embodied-emotions-scripts,NLeSC/embodied-emotions-scripts
|
Add script to count label set statistics
Added a script that outputs statistics about label sets in the data.
|
"""Create multilabel data set with normalized spelling.
The input consists of a directory of text files containing the dataset in
historic spelling.
The data set consists of:
<sentence id>\t<sentence>\tEmotie_Liefde (embodied emotions labels separated by
_)
<sentence id>\t<sentence>\tNone ('None' if no words were tagged)
Usage: python normalize_dataset.py <input dir> <output dir>
"""
import argparse
import codecs
import os
from collections import Counter
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('input_dir', help='the name of the directory '
'containing text files that should be normalized.')
args = parser.parse_args()
input_dir = args.input_dir
labelsets = Counter()
len_labelset = Counter()
print 'texts'
text_files = [fi for fi in os.listdir(input_dir) if fi.endswith('.txt')]
for text_file in text_files:
print text_file
in_file = os.path.join(input_dir, text_file)
with codecs.open(in_file, 'rb', 'utf-8') as f:
lines = f.readlines()
for line in lines:
parts = line.split('\t')
labels = parts[2].strip()
labelsets[labels] += 1
# count labelset lengths
if labels != 'None':
length = len(labels.split('_'))
len_labelset[str(length).zfill(3)] += 1
print '\n# different labelsets\t{}'.format(str(len(labelsets)))
# print lengths
print '\n# labels\tfrequency'
for le, freq in len_labelset.most_common():
print '{}\t{}'.format(le, freq)
# print labelsets
print '\nLabelset\tfrequency'
for ls, freq in labelsets.most_common():
print '{}\t{}'.format(ls, freq)
|
<commit_before><commit_msg>Add script to count label set statistics
Added a script that outputs statistics about label sets in the data.<commit_after>
|
"""Create multilabel data set with normalized spelling.
The input consists of a directory of text files containing the dataset in
historic spelling.
The data set consists of:
<sentence id>\t<sentence>\tEmotie_Liefde (embodied emotions labels separated by
_)
<sentence id>\t<sentence>\tNone ('None' if no words were tagged)
Usage: python normalize_dataset.py <input dir> <output dir>
"""
import argparse
import codecs
import os
from collections import Counter
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('input_dir', help='the name of the directory '
'containing text files that should be normalized.')
args = parser.parse_args()
input_dir = args.input_dir
labelsets = Counter()
len_labelset = Counter()
print 'texts'
text_files = [fi for fi in os.listdir(input_dir) if fi.endswith('.txt')]
for text_file in text_files:
print text_file
in_file = os.path.join(input_dir, text_file)
with codecs.open(in_file, 'rb', 'utf-8') as f:
lines = f.readlines()
for line in lines:
parts = line.split('\t')
labels = parts[2].strip()
labelsets[labels] += 1
# count labelset lengths
if labels != 'None':
length = len(labels.split('_'))
len_labelset[str(length).zfill(3)] += 1
print '\n# different labelsets\t{}'.format(str(len(labelsets)))
# print lengths
print '\n# labels\tfrequency'
for le, freq in len_labelset.most_common():
print '{}\t{}'.format(le, freq)
# print labelsets
print '\nLabelset\tfrequency'
for ls, freq in labelsets.most_common():
print '{}\t{}'.format(ls, freq)
|
Add script to count label set statistics
Added a script that outputs statistics about label sets in the data."""Create multilabel data set with normalized spelling.
The input consists of a directory of text files containing the dataset in
historic spelling.
The data set consists of:
<sentence id>\t<sentence>\tEmotie_Liefde (embodied emotions labels separated by
_)
<sentence id>\t<sentence>\tNone ('None' if no words were tagged)
Usage: python normalize_dataset.py <input dir> <output dir>
"""
import argparse
import codecs
import os
from collections import Counter
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('input_dir', help='the name of the directory '
'containing text files that should be normalized.')
args = parser.parse_args()
input_dir = args.input_dir
labelsets = Counter()
len_labelset = Counter()
print 'texts'
text_files = [fi for fi in os.listdir(input_dir) if fi.endswith('.txt')]
for text_file in text_files:
print text_file
in_file = os.path.join(input_dir, text_file)
with codecs.open(in_file, 'rb', 'utf-8') as f:
lines = f.readlines()
for line in lines:
parts = line.split('\t')
labels = parts[2].strip()
labelsets[labels] += 1
# count labelset lengths
if labels != 'None':
length = len(labels.split('_'))
len_labelset[str(length).zfill(3)] += 1
print '\n# different labelsets\t{}'.format(str(len(labelsets)))
# print lengths
print '\n# labels\tfrequency'
for le, freq in len_labelset.most_common():
print '{}\t{}'.format(le, freq)
# print labelsets
print '\nLabelset\tfrequency'
for ls, freq in labelsets.most_common():
print '{}\t{}'.format(ls, freq)
|
<commit_before><commit_msg>Add script to count label set statistics
Added a script that outputs statistics about label sets in the data.<commit_after>"""Create multilabel data set with normalized spelling.
The input consists of a directory of text files containing the dataset in
historic spelling.
The data set consists of:
<sentence id>\t<sentence>\tEmotie_Liefde (embodied emotions labels separated by
_)
<sentence id>\t<sentence>\tNone ('None' if no words were tagged)
Usage: python normalize_dataset.py <input dir> <output dir>
"""
import argparse
import codecs
import os
from collections import Counter
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('input_dir', help='the name of the directory '
'containing text files that should be normalized.')
args = parser.parse_args()
input_dir = args.input_dir
labelsets = Counter()
len_labelset = Counter()
print 'texts'
text_files = [fi for fi in os.listdir(input_dir) if fi.endswith('.txt')]
for text_file in text_files:
print text_file
in_file = os.path.join(input_dir, text_file)
with codecs.open(in_file, 'rb', 'utf-8') as f:
lines = f.readlines()
for line in lines:
parts = line.split('\t')
labels = parts[2].strip()
labelsets[labels] += 1
# count labelset lengths
if labels != 'None':
length = len(labels.split('_'))
len_labelset[str(length).zfill(3)] += 1
print '\n# different labelsets\t{}'.format(str(len(labelsets)))
# print lengths
print '\n# labels\tfrequency'
for le, freq in len_labelset.most_common():
print '{}\t{}'.format(le, freq)
# print labelsets
print '\nLabelset\tfrequency'
for ls, freq in labelsets.most_common():
print '{}\t{}'.format(ls, freq)
|
|
678a27d21f9d8166c320180aebc91a0d2abe528d
|
tests/test_tagreplacement.py
|
tests/test_tagreplacement.py
|
#
import pytest
import sdsc
#True
# tagtype = str(tagsreplaced.group(1))
# tokens = int(tagsreplaced.group(2))
@pytest.mark.parametrize("text,result",
(
# 0 - normal text, no op
('noop',(False, None, 1)),
# 1 - regular tag replacement
('##@cooltag-1##',(True, 'cooltag', 1)),
# 2 - regular tag replacement
('##@cooltag-2##',(True, 'cooltag', 2)),
# 3 - tricky case that should never happen since the text should always be
# a single token only -- still, we should find something in there
('Michael Caine approves of this ##@cooltag-1##',(True, 'cooltag', 1)),
# 4 - tricky case with a dash in the replaced tag -- should not happen and
# we should not find anything in there
('##@cool-tag-1##',(False, None, 1)),
# 5 - replacement for a tag like <cooltag> </cooltag> (i.e. no content)
# this behavior is ... controversial: also see
# tests/cases/sentencelength.xml +29
('##@cooltag-0##',(True, 'cooltag', 0)),
)
)
def test_findtagreplacement(text,result):
"""checks whether placeholders for special tags are found"""
assert sdsc.findtagreplacement(text) == result
|
Add test cases for tag replacement finder
|
Add test cases for tag replacement finder
|
Python
|
lgpl-2.1
|
sknorr/suse-doc-style-checker,sknorr/suse-doc-style-checker,sknorr/suse-doc-style-checker
|
Add test cases for tag replacement finder
|
#
import pytest
import sdsc
#True
# tagtype = str(tagsreplaced.group(1))
# tokens = int(tagsreplaced.group(2))
@pytest.mark.parametrize("text,result",
(
# 0 - normal text, no op
('noop',(False, None, 1)),
# 1 - regular tag replacement
('##@cooltag-1##',(True, 'cooltag', 1)),
# 2 - regular tag replacement
('##@cooltag-2##',(True, 'cooltag', 2)),
# 3 - tricky case that should never happen since the text should always be
# a single token only -- still, we should find something in there
('Michael Caine approves of this ##@cooltag-1##',(True, 'cooltag', 1)),
# 4 - tricky case with a dash in the replaced tag -- should not happen and
# we should not find anything in there
('##@cool-tag-1##',(False, None, 1)),
# 5 - replacement for a tag like <cooltag> </cooltag> (i.e. no content)
# this behavior is ... controversial: also see
# tests/cases/sentencelength.xml +29
('##@cooltag-0##',(True, 'cooltag', 0)),
)
)
def test_findtagreplacement(text,result):
"""checks whether placeholders for special tags are found"""
assert sdsc.findtagreplacement(text) == result
|
<commit_before><commit_msg>Add test cases for tag replacement finder<commit_after>
|
#
import pytest
import sdsc
#True
# tagtype = str(tagsreplaced.group(1))
# tokens = int(tagsreplaced.group(2))
@pytest.mark.parametrize("text,result",
(
# 0 - normal text, no op
('noop',(False, None, 1)),
# 1 - regular tag replacement
('##@cooltag-1##',(True, 'cooltag', 1)),
# 2 - regular tag replacement
('##@cooltag-2##',(True, 'cooltag', 2)),
# 3 - tricky case that should never happen since the text should always be
# a single token only -- still, we should find something in there
('Michael Caine approves of this ##@cooltag-1##',(True, 'cooltag', 1)),
# 4 - tricky case with a dash in the replaced tag -- should not happen and
# we should not find anything in there
('##@cool-tag-1##',(False, None, 1)),
# 5 - replacement for a tag like <cooltag> </cooltag> (i.e. no content)
# this behavior is ... controversial: also see
# tests/cases/sentencelength.xml +29
('##@cooltag-0##',(True, 'cooltag', 0)),
)
)
def test_findtagreplacement(text,result):
"""checks whether placeholders for special tags are found"""
assert sdsc.findtagreplacement(text) == result
|
Add test cases for tag replacement finder#
import pytest
import sdsc
#True
# tagtype = str(tagsreplaced.group(1))
# tokens = int(tagsreplaced.group(2))
@pytest.mark.parametrize("text,result",
(
# 0 - normal text, no op
('noop',(False, None, 1)),
# 1 - regular tag replacement
('##@cooltag-1##',(True, 'cooltag', 1)),
# 2 - regular tag replacement
('##@cooltag-2##',(True, 'cooltag', 2)),
# 3 - tricky case that should never happen since the text should always be
# a single token only -- still, we should find something in there
('Michael Caine approves of this ##@cooltag-1##',(True, 'cooltag', 1)),
# 4 - tricky case with a dash in the replaced tag -- should not happen and
# we should not find anything in there
('##@cool-tag-1##',(False, None, 1)),
# 5 - replacement for a tag like <cooltag> </cooltag> (i.e. no content)
# this behavior is ... controversial: also see
# tests/cases/sentencelength.xml +29
('##@cooltag-0##',(True, 'cooltag', 0)),
)
)
def test_findtagreplacement(text,result):
"""checks whether placeholders for special tags are found"""
assert sdsc.findtagreplacement(text) == result
|
<commit_before><commit_msg>Add test cases for tag replacement finder<commit_after>#
import pytest
import sdsc
#True
# tagtype = str(tagsreplaced.group(1))
# tokens = int(tagsreplaced.group(2))
@pytest.mark.parametrize("text,result",
(
# 0 - normal text, no op
('noop',(False, None, 1)),
# 1 - regular tag replacement
('##@cooltag-1##',(True, 'cooltag', 1)),
# 2 - regular tag replacement
('##@cooltag-2##',(True, 'cooltag', 2)),
# 3 - tricky case that should never happen since the text should always be
# a single token only -- still, we should find something in there
('Michael Caine approves of this ##@cooltag-1##',(True, 'cooltag', 1)),
# 4 - tricky case with a dash in the replaced tag -- should not happen and
# we should not find anything in there
('##@cool-tag-1##',(False, None, 1)),
# 5 - replacement for a tag like <cooltag> </cooltag> (i.e. no content)
# this behavior is ... controversial: also see
# tests/cases/sentencelength.xml +29
('##@cooltag-0##',(True, 'cooltag', 0)),
)
)
def test_findtagreplacement(text,result):
"""checks whether placeholders for special tags are found"""
assert sdsc.findtagreplacement(text) == result
|
|
031a595900595bea181b2dc51c34eff44dab56a1
|
tests/treebrd/test_schema.py
|
tests/treebrd/test_schema.py
|
from unittest import TestCase
from rapt.treebrd.errors import RelationReferenceError
from rapt.treebrd.schema import Schema
class TestSchema(TestCase):
def test_contains_when_empty(self):
self.assertFalse(Schema({}).contains('Relation'))
def test_contains_when_false(self):
self.assertFalse(Schema({'AnotherRelation': []}).contains('Relation'))
def test_contains_when_true(self):
self.assertTrue(Schema({'Relation': []}).contains('Relation'))
def test_to_dict(self):
expected = {'alpha': ['a1'], 'beta': ['b1']}
actual = Schema(expected).to_dict()
self.assertNotEquals(id(expected), id(actual))
self.assertEqual(expected, actual)
def test_get_attributes(self):
raw = {'alpha': ['a1'], 'beta': ['b1']}
expected = ['a1']
actual = Schema(raw).get_attributes('alpha')
self.assertNotEquals(id(expected), id(raw['alpha']))
self.assertEqual(expected, actual)
def test_add(self):
schema = Schema({'alpha': ['a1']})
schema.add('beta', ['b1'])
self.assertTrue(schema.contains('beta'))
self.assertEqual(['b1'], schema.get_attributes('beta'))
def test_exception_when_name_conflicts(self):
schema = Schema({'alpha': ['a1']})
self.assertRaises(RelationReferenceError, schema.add, 'alpha', [])
|
Add a test class for Schema.
|
Add a test class for Schema.
|
Python
|
mit
|
pyrapt/rapt
|
Add a test class for Schema.
|
from unittest import TestCase
from rapt.treebrd.errors import RelationReferenceError
from rapt.treebrd.schema import Schema
class TestSchema(TestCase):
def test_contains_when_empty(self):
self.assertFalse(Schema({}).contains('Relation'))
def test_contains_when_false(self):
self.assertFalse(Schema({'AnotherRelation': []}).contains('Relation'))
def test_contains_when_true(self):
self.assertTrue(Schema({'Relation': []}).contains('Relation'))
def test_to_dict(self):
expected = {'alpha': ['a1'], 'beta': ['b1']}
actual = Schema(expected).to_dict()
self.assertNotEquals(id(expected), id(actual))
self.assertEqual(expected, actual)
def test_get_attributes(self):
raw = {'alpha': ['a1'], 'beta': ['b1']}
expected = ['a1']
actual = Schema(raw).get_attributes('alpha')
self.assertNotEquals(id(expected), id(raw['alpha']))
self.assertEqual(expected, actual)
def test_add(self):
schema = Schema({'alpha': ['a1']})
schema.add('beta', ['b1'])
self.assertTrue(schema.contains('beta'))
self.assertEqual(['b1'], schema.get_attributes('beta'))
def test_exception_when_name_conflicts(self):
schema = Schema({'alpha': ['a1']})
self.assertRaises(RelationReferenceError, schema.add, 'alpha', [])
|
<commit_before><commit_msg>Add a test class for Schema.<commit_after>
|
from unittest import TestCase
from rapt.treebrd.errors import RelationReferenceError
from rapt.treebrd.schema import Schema
class TestSchema(TestCase):
def test_contains_when_empty(self):
self.assertFalse(Schema({}).contains('Relation'))
def test_contains_when_false(self):
self.assertFalse(Schema({'AnotherRelation': []}).contains('Relation'))
def test_contains_when_true(self):
self.assertTrue(Schema({'Relation': []}).contains('Relation'))
def test_to_dict(self):
expected = {'alpha': ['a1'], 'beta': ['b1']}
actual = Schema(expected).to_dict()
self.assertNotEquals(id(expected), id(actual))
self.assertEqual(expected, actual)
def test_get_attributes(self):
raw = {'alpha': ['a1'], 'beta': ['b1']}
expected = ['a1']
actual = Schema(raw).get_attributes('alpha')
self.assertNotEquals(id(expected), id(raw['alpha']))
self.assertEqual(expected, actual)
def test_add(self):
schema = Schema({'alpha': ['a1']})
schema.add('beta', ['b1'])
self.assertTrue(schema.contains('beta'))
self.assertEqual(['b1'], schema.get_attributes('beta'))
def test_exception_when_name_conflicts(self):
schema = Schema({'alpha': ['a1']})
self.assertRaises(RelationReferenceError, schema.add, 'alpha', [])
|
Add a test class for Schema.from unittest import TestCase
from rapt.treebrd.errors import RelationReferenceError
from rapt.treebrd.schema import Schema
class TestSchema(TestCase):
def test_contains_when_empty(self):
self.assertFalse(Schema({}).contains('Relation'))
def test_contains_when_false(self):
self.assertFalse(Schema({'AnotherRelation': []}).contains('Relation'))
def test_contains_when_true(self):
self.assertTrue(Schema({'Relation': []}).contains('Relation'))
def test_to_dict(self):
expected = {'alpha': ['a1'], 'beta': ['b1']}
actual = Schema(expected).to_dict()
self.assertNotEquals(id(expected), id(actual))
self.assertEqual(expected, actual)
def test_get_attributes(self):
raw = {'alpha': ['a1'], 'beta': ['b1']}
expected = ['a1']
actual = Schema(raw).get_attributes('alpha')
self.assertNotEquals(id(expected), id(raw['alpha']))
self.assertEqual(expected, actual)
def test_add(self):
schema = Schema({'alpha': ['a1']})
schema.add('beta', ['b1'])
self.assertTrue(schema.contains('beta'))
self.assertEqual(['b1'], schema.get_attributes('beta'))
def test_exception_when_name_conflicts(self):
schema = Schema({'alpha': ['a1']})
self.assertRaises(RelationReferenceError, schema.add, 'alpha', [])
|
<commit_before><commit_msg>Add a test class for Schema.<commit_after>from unittest import TestCase
from rapt.treebrd.errors import RelationReferenceError
from rapt.treebrd.schema import Schema
class TestSchema(TestCase):
def test_contains_when_empty(self):
self.assertFalse(Schema({}).contains('Relation'))
def test_contains_when_false(self):
self.assertFalse(Schema({'AnotherRelation': []}).contains('Relation'))
def test_contains_when_true(self):
self.assertTrue(Schema({'Relation': []}).contains('Relation'))
def test_to_dict(self):
expected = {'alpha': ['a1'], 'beta': ['b1']}
actual = Schema(expected).to_dict()
self.assertNotEquals(id(expected), id(actual))
self.assertEqual(expected, actual)
def test_get_attributes(self):
raw = {'alpha': ['a1'], 'beta': ['b1']}
expected = ['a1']
actual = Schema(raw).get_attributes('alpha')
self.assertNotEquals(id(expected), id(raw['alpha']))
self.assertEqual(expected, actual)
def test_add(self):
schema = Schema({'alpha': ['a1']})
schema.add('beta', ['b1'])
self.assertTrue(schema.contains('beta'))
self.assertEqual(['b1'], schema.get_attributes('beta'))
def test_exception_when_name_conflicts(self):
schema = Schema({'alpha': ['a1']})
self.assertRaises(RelationReferenceError, schema.add, 'alpha', [])
|
|
22ddf4ed423eabf86ff3aa3b7dcb183b96f700c7
|
HadithHouseWebsite/hadiths/migrations/0005_create_fulltextsearch_index.py
|
HadithHouseWebsite/hadiths/migrations/0005_create_fulltextsearch_index.py
|
from django.db import migrations
from django.db.transaction import atomic
from hadiths.initial_data import *
from hadiths.models import Person, Hadith, HadithTag, Chain, ChainPersonRel, Book, HadithTagRel
class Migration(migrations.Migration):
dependencies = [
('hadiths', '0004_add_first_hadiths'),
]
operations = [
migrations.RunSQL('''
CREATE INDEX hadiths_text_idx ON hadiths USING GIN (TO_TSVECTOR('english', text));
CREATE INDEX hadiths_simpletext_idx ON hadiths USING GIN (TO_TSVECTOR('english', simple_text));
''')
]
|
Create full text indexes on 'text' and 'simple_text'
|
chore(text-fields): Create full text indexes on 'text' and 'simple_text'
#228
|
Python
|
mit
|
hadithhouse/hadithhouse,hadithhouse/hadithhouse,hadithhouse/hadithhouse,rafidka/hadithhouse,rafidka/hadithhouse,rafidka/hadithhouse,hadithhouse/hadithhouse,hadithhouse/hadithhouse,rafidka/hadithhouse,hadithhouse/hadithhouse,rafidka/hadithhouse,rafidka/hadithhouse
|
chore(text-fields): Create full text indexes on 'text' and 'simple_text'
#228
|
from django.db import migrations
from django.db.transaction import atomic
from hadiths.initial_data import *
from hadiths.models import Person, Hadith, HadithTag, Chain, ChainPersonRel, Book, HadithTagRel
class Migration(migrations.Migration):
dependencies = [
('hadiths', '0004_add_first_hadiths'),
]
operations = [
migrations.RunSQL('''
CREATE INDEX hadiths_text_idx ON hadiths USING GIN (TO_TSVECTOR('english', text));
CREATE INDEX hadiths_simpletext_idx ON hadiths USING GIN (TO_TSVECTOR('english', simple_text));
''')
]
|
<commit_before><commit_msg>chore(text-fields): Create full text indexes on 'text' and 'simple_text'
#228<commit_after>
|
from django.db import migrations
from django.db.transaction import atomic
from hadiths.initial_data import *
from hadiths.models import Person, Hadith, HadithTag, Chain, ChainPersonRel, Book, HadithTagRel
class Migration(migrations.Migration):
dependencies = [
('hadiths', '0004_add_first_hadiths'),
]
operations = [
migrations.RunSQL('''
CREATE INDEX hadiths_text_idx ON hadiths USING GIN (TO_TSVECTOR('english', text));
CREATE INDEX hadiths_simpletext_idx ON hadiths USING GIN (TO_TSVECTOR('english', simple_text));
''')
]
|
chore(text-fields): Create full text indexes on 'text' and 'simple_text'
#228from django.db import migrations
from django.db.transaction import atomic
from hadiths.initial_data import *
from hadiths.models import Person, Hadith, HadithTag, Chain, ChainPersonRel, Book, HadithTagRel
class Migration(migrations.Migration):
dependencies = [
('hadiths', '0004_add_first_hadiths'),
]
operations = [
migrations.RunSQL('''
CREATE INDEX hadiths_text_idx ON hadiths USING GIN (TO_TSVECTOR('english', text));
CREATE INDEX hadiths_simpletext_idx ON hadiths USING GIN (TO_TSVECTOR('english', simple_text));
''')
]
|
<commit_before><commit_msg>chore(text-fields): Create full text indexes on 'text' and 'simple_text'
#228<commit_after>from django.db import migrations
from django.db.transaction import atomic
from hadiths.initial_data import *
from hadiths.models import Person, Hadith, HadithTag, Chain, ChainPersonRel, Book, HadithTagRel
class Migration(migrations.Migration):
dependencies = [
('hadiths', '0004_add_first_hadiths'),
]
operations = [
migrations.RunSQL('''
CREATE INDEX hadiths_text_idx ON hadiths USING GIN (TO_TSVECTOR('english', text));
CREATE INDEX hadiths_simpletext_idx ON hadiths USING GIN (TO_TSVECTOR('english', simple_text));
''')
]
|
|
789bb6a7454f029e9f101689ae604469412dae6e
|
tools/convert_mat_to_json.py
|
tools/convert_mat_to_json.py
|
# Script to convert proprietary .mat files into json
import sys
import os
import glob
import json
import numpy as np
from scipy.io import loadmat
def main(mat_dir, out_dir):
""" Script to convert all .mat files in mat_dir into corresponding json files
in out_dir
Any Matlab arrays are converted to lists of floats
.json files have the same basename as the .mat files
"""
# Find all .mat files in mat_dir
mat_files = glob.glob(os.path.join(mat_dir, '*.mat'))
# Iterate through each .mat file
for mat_file in mat_files:
json_dict = {}
mat_dict = loadmat(mat_file, squeeze_me=True)
# Iterate through all entries of mat_dict
# For each entry, convert data type if necessary
for k in mat_dict.keys():
if isinstance(mat_dict[k], np.ndarray):
json_dict[k] = mat_dict[k].tolist()
elif isinstance(mat_dict[k], unicode):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], str):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], int):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], float):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], list):
json_dict[k] = mat_dict[k]
else:
print('Did not convert key {} of type {}'.format(k, type(k)))
# Write converted dict to json
# Check that output directory exists, if not create it
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
fn = os.path.join(out_dir, os.path.splitext(os.path.basename(mat_file))[0]) + '.json'
with open(fn, 'w') as f:
json.dump(json_dict, f)
print('Wrote data in {} to JSON in {}'.format(mat_file, fn))
if __name__ == '__main__':
main(sys.argv[1], sys.argv[2])
|
Add script to convert .mat files to JSON
|
Add script to convert .mat files to JSON
* This allows us to avoid having proprietary .mat files in our test data
|
Python
|
apache-2.0
|
voicesauce/opensauce-python,voicesauce/opensauce-python,voicesauce/opensauce-python
|
Add script to convert .mat files to JSON
* This allows us to avoid having proprietary .mat files in our test data
|
# Script to convert proprietary .mat files into json
import sys
import os
import glob
import json
import numpy as np
from scipy.io import loadmat
def main(mat_dir, out_dir):
""" Script to convert all .mat files in mat_dir into corresponding json files
in out_dir
Any Matlab arrays are converted to lists of floats
.json files have the same basename as the .mat files
"""
# Find all .mat files in mat_dir
mat_files = glob.glob(os.path.join(mat_dir, '*.mat'))
# Iterate through each .mat file
for mat_file in mat_files:
json_dict = {}
mat_dict = loadmat(mat_file, squeeze_me=True)
# Iterate through all entries of mat_dict
# For each entry, convert data type if necessary
for k in mat_dict.keys():
if isinstance(mat_dict[k], np.ndarray):
json_dict[k] = mat_dict[k].tolist()
elif isinstance(mat_dict[k], unicode):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], str):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], int):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], float):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], list):
json_dict[k] = mat_dict[k]
else:
print('Did not convert key {} of type {}'.format(k, type(k)))
# Write converted dict to json
# Check that output directory exists, if not create it
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
fn = os.path.join(out_dir, os.path.splitext(os.path.basename(mat_file))[0]) + '.json'
with open(fn, 'w') as f:
json.dump(json_dict, f)
print('Wrote data in {} to JSON in {}'.format(mat_file, fn))
if __name__ == '__main__':
main(sys.argv[1], sys.argv[2])
|
<commit_before><commit_msg>Add script to convert .mat files to JSON
* This allows us to avoid having proprietary .mat files in our test data<commit_after>
|
# Script to convert proprietary .mat files into json
import sys
import os
import glob
import json
import numpy as np
from scipy.io import loadmat
def main(mat_dir, out_dir):
""" Script to convert all .mat files in mat_dir into corresponding json files
in out_dir
Any Matlab arrays are converted to lists of floats
.json files have the same basename as the .mat files
"""
# Find all .mat files in mat_dir
mat_files = glob.glob(os.path.join(mat_dir, '*.mat'))
# Iterate through each .mat file
for mat_file in mat_files:
json_dict = {}
mat_dict = loadmat(mat_file, squeeze_me=True)
# Iterate through all entries of mat_dict
# For each entry, convert data type if necessary
for k in mat_dict.keys():
if isinstance(mat_dict[k], np.ndarray):
json_dict[k] = mat_dict[k].tolist()
elif isinstance(mat_dict[k], unicode):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], str):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], int):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], float):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], list):
json_dict[k] = mat_dict[k]
else:
print('Did not convert key {} of type {}'.format(k, type(k)))
# Write converted dict to json
# Check that output directory exists, if not create it
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
fn = os.path.join(out_dir, os.path.splitext(os.path.basename(mat_file))[0]) + '.json'
with open(fn, 'w') as f:
json.dump(json_dict, f)
print('Wrote data in {} to JSON in {}'.format(mat_file, fn))
if __name__ == '__main__':
main(sys.argv[1], sys.argv[2])
|
Add script to convert .mat files to JSON
* This allows us to avoid having proprietary .mat files in our test data# Script to convert proprietary .mat files into json
import sys
import os
import glob
import json
import numpy as np
from scipy.io import loadmat
def main(mat_dir, out_dir):
""" Script to convert all .mat files in mat_dir into corresponding json files
in out_dir
Any Matlab arrays are converted to lists of floats
.json files have the same basename as the .mat files
"""
# Find all .mat files in mat_dir
mat_files = glob.glob(os.path.join(mat_dir, '*.mat'))
# Iterate through each .mat file
for mat_file in mat_files:
json_dict = {}
mat_dict = loadmat(mat_file, squeeze_me=True)
# Iterate through all entries of mat_dict
# For each entry, convert data type if necessary
for k in mat_dict.keys():
if isinstance(mat_dict[k], np.ndarray):
json_dict[k] = mat_dict[k].tolist()
elif isinstance(mat_dict[k], unicode):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], str):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], int):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], float):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], list):
json_dict[k] = mat_dict[k]
else:
print('Did not convert key {} of type {}'.format(k, type(k)))
# Write converted dict to json
# Check that output directory exists, if not create it
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
fn = os.path.join(out_dir, os.path.splitext(os.path.basename(mat_file))[0]) + '.json'
with open(fn, 'w') as f:
json.dump(json_dict, f)
print('Wrote data in {} to JSON in {}'.format(mat_file, fn))
if __name__ == '__main__':
main(sys.argv[1], sys.argv[2])
|
<commit_before><commit_msg>Add script to convert .mat files to JSON
* This allows us to avoid having proprietary .mat files in our test data<commit_after># Script to convert proprietary .mat files into json
import sys
import os
import glob
import json
import numpy as np
from scipy.io import loadmat
def main(mat_dir, out_dir):
""" Script to convert all .mat files in mat_dir into corresponding json files
in out_dir
Any Matlab arrays are converted to lists of floats
.json files have the same basename as the .mat files
"""
# Find all .mat files in mat_dir
mat_files = glob.glob(os.path.join(mat_dir, '*.mat'))
# Iterate through each .mat file
for mat_file in mat_files:
json_dict = {}
mat_dict = loadmat(mat_file, squeeze_me=True)
# Iterate through all entries of mat_dict
# For each entry, convert data type if necessary
for k in mat_dict.keys():
if isinstance(mat_dict[k], np.ndarray):
json_dict[k] = mat_dict[k].tolist()
elif isinstance(mat_dict[k], unicode):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], str):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], int):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], float):
json_dict[k] = mat_dict[k]
elif isinstance(mat_dict[k], list):
json_dict[k] = mat_dict[k]
else:
print('Did not convert key {} of type {}'.format(k, type(k)))
# Write converted dict to json
# Check that output directory exists, if not create it
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
fn = os.path.join(out_dir, os.path.splitext(os.path.basename(mat_file))[0]) + '.json'
with open(fn, 'w') as f:
json.dump(json_dict, f)
print('Wrote data in {} to JSON in {}'.format(mat_file, fn))
if __name__ == '__main__':
main(sys.argv[1], sys.argv[2])
|
|
7a09ee3ebf72bb2644f40b9ded208b266cfb6ff1
|
show_samples_cifar_conditional.py
|
show_samples_cifar_conditional.py
|
from pylearn2.utils import serial
import sys
_, model_path = sys.argv
model = serial.load(model_path)
space = model.generator.get_output_space()
from pylearn2.config import yaml_parse
from pylearn2.format.target_format import OneHotFormatter
from pylearn2.gui.patch_viewer import PatchViewer
import numpy as np
dataset = yaml_parse.load(model.dataset_yaml_src)
grid_shape = None
# Number of choices for one-hot values
rows = model.generator.condition_space.get_total_dimension()
# Samples per condition
sample_cols = 5
# Generate conditional information
formatter = OneHotFormatter(rows,
dtype=model.generator.condition_space.dtype)
conditional = formatter.format(np.concatenate([np.repeat(i, sample_cols) for i in range(rows)]),
mode='concatenate')
# For some reason format_as from VectorSpace is not working right
topo_samples = model.generator.sample(conditional).eval()
samples = dataset.get_design_matrix(topo_samples)
dataset.axes = ['b', 0, 1, 'c']
dataset.view_converter.axes = ['b', 0, 1, 'c']
topo_samples = dataset.get_topological_view(samples)
pv = PatchViewer(grid_shape=(rows, sample_cols + 1), patch_shape=(32,32),
is_color=True)
scale = np.abs(samples).max()
X = dataset.X
topo = dataset.get_topological_view()
index = 0
for i in xrange(samples.shape[0]):
topo_sample = topo_samples[i, :, :, :]
print topo_sample.min(), topo_sample.max()
pv.add_patch(topo_sample / scale, rescale=False)
if (i +1) % sample_cols == 0:
sample = samples[i, :]
dists = np.square(X - sample).sum(axis=1)
j = np.argmin(dists)
match = topo[j, :]
print match.min(), match.max()
pv.add_patch(match / scale, rescale=False, activation=1)
pv.show()
|
Add model visualizer for CIFAR conditional
|
Add model visualizer for CIFAR conditional
|
Python
|
bsd-3-clause
|
hans/adversarial
|
Add model visualizer for CIFAR conditional
|
from pylearn2.utils import serial
import sys
_, model_path = sys.argv
model = serial.load(model_path)
space = model.generator.get_output_space()
from pylearn2.config import yaml_parse
from pylearn2.format.target_format import OneHotFormatter
from pylearn2.gui.patch_viewer import PatchViewer
import numpy as np
dataset = yaml_parse.load(model.dataset_yaml_src)
grid_shape = None
# Number of choices for one-hot values
rows = model.generator.condition_space.get_total_dimension()
# Samples per condition
sample_cols = 5
# Generate conditional information
formatter = OneHotFormatter(rows,
dtype=model.generator.condition_space.dtype)
conditional = formatter.format(np.concatenate([np.repeat(i, sample_cols) for i in range(rows)]),
mode='concatenate')
# For some reason format_as from VectorSpace is not working right
topo_samples = model.generator.sample(conditional).eval()
samples = dataset.get_design_matrix(topo_samples)
dataset.axes = ['b', 0, 1, 'c']
dataset.view_converter.axes = ['b', 0, 1, 'c']
topo_samples = dataset.get_topological_view(samples)
pv = PatchViewer(grid_shape=(rows, sample_cols + 1), patch_shape=(32,32),
is_color=True)
scale = np.abs(samples).max()
X = dataset.X
topo = dataset.get_topological_view()
index = 0
for i in xrange(samples.shape[0]):
topo_sample = topo_samples[i, :, :, :]
print topo_sample.min(), topo_sample.max()
pv.add_patch(topo_sample / scale, rescale=False)
if (i +1) % sample_cols == 0:
sample = samples[i, :]
dists = np.square(X - sample).sum(axis=1)
j = np.argmin(dists)
match = topo[j, :]
print match.min(), match.max()
pv.add_patch(match / scale, rescale=False, activation=1)
pv.show()
|
<commit_before><commit_msg>Add model visualizer for CIFAR conditional<commit_after>
|
from pylearn2.utils import serial
import sys
_, model_path = sys.argv
model = serial.load(model_path)
space = model.generator.get_output_space()
from pylearn2.config import yaml_parse
from pylearn2.format.target_format import OneHotFormatter
from pylearn2.gui.patch_viewer import PatchViewer
import numpy as np
dataset = yaml_parse.load(model.dataset_yaml_src)
grid_shape = None
# Number of choices for one-hot values
rows = model.generator.condition_space.get_total_dimension()
# Samples per condition
sample_cols = 5
# Generate conditional information
formatter = OneHotFormatter(rows,
dtype=model.generator.condition_space.dtype)
conditional = formatter.format(np.concatenate([np.repeat(i, sample_cols) for i in range(rows)]),
mode='concatenate')
# For some reason format_as from VectorSpace is not working right
topo_samples = model.generator.sample(conditional).eval()
samples = dataset.get_design_matrix(topo_samples)
dataset.axes = ['b', 0, 1, 'c']
dataset.view_converter.axes = ['b', 0, 1, 'c']
topo_samples = dataset.get_topological_view(samples)
pv = PatchViewer(grid_shape=(rows, sample_cols + 1), patch_shape=(32,32),
is_color=True)
scale = np.abs(samples).max()
X = dataset.X
topo = dataset.get_topological_view()
index = 0
for i in xrange(samples.shape[0]):
topo_sample = topo_samples[i, :, :, :]
print topo_sample.min(), topo_sample.max()
pv.add_patch(topo_sample / scale, rescale=False)
if (i +1) % sample_cols == 0:
sample = samples[i, :]
dists = np.square(X - sample).sum(axis=1)
j = np.argmin(dists)
match = topo[j, :]
print match.min(), match.max()
pv.add_patch(match / scale, rescale=False, activation=1)
pv.show()
|
Add model visualizer for CIFAR conditionalfrom pylearn2.utils import serial
import sys
_, model_path = sys.argv
model = serial.load(model_path)
space = model.generator.get_output_space()
from pylearn2.config import yaml_parse
from pylearn2.format.target_format import OneHotFormatter
from pylearn2.gui.patch_viewer import PatchViewer
import numpy as np
dataset = yaml_parse.load(model.dataset_yaml_src)
grid_shape = None
# Number of choices for one-hot values
rows = model.generator.condition_space.get_total_dimension()
# Samples per condition
sample_cols = 5
# Generate conditional information
formatter = OneHotFormatter(rows,
dtype=model.generator.condition_space.dtype)
conditional = formatter.format(np.concatenate([np.repeat(i, sample_cols) for i in range(rows)]),
mode='concatenate')
# For some reason format_as from VectorSpace is not working right
topo_samples = model.generator.sample(conditional).eval()
samples = dataset.get_design_matrix(topo_samples)
dataset.axes = ['b', 0, 1, 'c']
dataset.view_converter.axes = ['b', 0, 1, 'c']
topo_samples = dataset.get_topological_view(samples)
pv = PatchViewer(grid_shape=(rows, sample_cols + 1), patch_shape=(32,32),
is_color=True)
scale = np.abs(samples).max()
X = dataset.X
topo = dataset.get_topological_view()
index = 0
for i in xrange(samples.shape[0]):
topo_sample = topo_samples[i, :, :, :]
print topo_sample.min(), topo_sample.max()
pv.add_patch(topo_sample / scale, rescale=False)
if (i +1) % sample_cols == 0:
sample = samples[i, :]
dists = np.square(X - sample).sum(axis=1)
j = np.argmin(dists)
match = topo[j, :]
print match.min(), match.max()
pv.add_patch(match / scale, rescale=False, activation=1)
pv.show()
|
<commit_before><commit_msg>Add model visualizer for CIFAR conditional<commit_after>from pylearn2.utils import serial
import sys
_, model_path = sys.argv
model = serial.load(model_path)
space = model.generator.get_output_space()
from pylearn2.config import yaml_parse
from pylearn2.format.target_format import OneHotFormatter
from pylearn2.gui.patch_viewer import PatchViewer
import numpy as np
dataset = yaml_parse.load(model.dataset_yaml_src)
grid_shape = None
# Number of choices for one-hot values
rows = model.generator.condition_space.get_total_dimension()
# Samples per condition
sample_cols = 5
# Generate conditional information
formatter = OneHotFormatter(rows,
dtype=model.generator.condition_space.dtype)
conditional = formatter.format(np.concatenate([np.repeat(i, sample_cols) for i in range(rows)]),
mode='concatenate')
# For some reason format_as from VectorSpace is not working right
topo_samples = model.generator.sample(conditional).eval()
samples = dataset.get_design_matrix(topo_samples)
dataset.axes = ['b', 0, 1, 'c']
dataset.view_converter.axes = ['b', 0, 1, 'c']
topo_samples = dataset.get_topological_view(samples)
pv = PatchViewer(grid_shape=(rows, sample_cols + 1), patch_shape=(32,32),
is_color=True)
scale = np.abs(samples).max()
X = dataset.X
topo = dataset.get_topological_view()
index = 0
for i in xrange(samples.shape[0]):
topo_sample = topo_samples[i, :, :, :]
print topo_sample.min(), topo_sample.max()
pv.add_patch(topo_sample / scale, rescale=False)
if (i +1) % sample_cols == 0:
sample = samples[i, :]
dists = np.square(X - sample).sum(axis=1)
j = np.argmin(dists)
match = topo[j, :]
print match.min(), match.max()
pv.add_patch(match / scale, rescale=False, activation=1)
pv.show()
|
|
909ac33b64275b436209d77f9eba791e086cdb0e
|
notify_levure_app_of_save.py
|
notify_levure_app_of_save.py
|
import sublime
import sublime_plugin
import re
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key and script name
# 4. Get response from LiveCode IDE
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
print('stack name and filename', stack_name, view.file_name())
|
Add python file that is triggered when saving a file
|
[WIP] Add python file that is triggered when saving a file
|
Python
|
mit
|
trevordevore/livecode-sublimetext
|
[WIP] Add python file that is triggered when saving a file
|
import sublime
import sublime_plugin
import re
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key and script name
# 4. Get response from LiveCode IDE
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
print('stack name and filename', stack_name, view.file_name())
|
<commit_before><commit_msg>[WIP] Add python file that is triggered when saving a file<commit_after>
|
import sublime
import sublime_plugin
import re
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key and script name
# 4. Get response from LiveCode IDE
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
print('stack name and filename', stack_name, view.file_name())
|
[WIP] Add python file that is triggered when saving a fileimport sublime
import sublime_plugin
import re
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key and script name
# 4. Get response from LiveCode IDE
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
print('stack name and filename', stack_name, view.file_name())
|
<commit_before><commit_msg>[WIP] Add python file that is triggered when saving a file<commit_after>import sublime
import sublime_plugin
import re
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key and script name
# 4. Get response from LiveCode IDE
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
print('stack name and filename', stack_name, view.file_name())
|
|
21219c2ba2e6dbd82c8c9d226de96dafb5c9bb4e
|
Communication/mavtester.py
|
Communication/mavtester.py
|
#!/usr/bin/env python
'''
test mavlink messages
Do not forget to precise the baudrate (default 115200)
'''
import sys, struct, time, os
from curses import ascii
from pymavlink import mavutil
from argparse import ArgumentParser
parser = ArgumentParser(description=__doc__)
parser.add_argument("--baudrate", type=int,
help="master port baud rate", default=115200)
parser.add_argument("--device", required=True, help="serial device")
parser.add_argument("--source-system", dest='SOURCE_SYSTEM', type=int,
default=255, help='MAVLink source system for this GCS')
args = parser.parse_args()
def wait_heartbeat(m):
'''wait for a heartbeat so we know the target system IDs'''
print("Waiting for APM heartbeat")
msg = m.recv_match(type='HEARTBEAT', blocking=True)
print("Heartbeat from APM (system %u component %u)" % (m.target_system, m.target_component))
# create a mavlink serial instance
master = mavutil.mavlink_connection(args.device, baud=args.baudrate, source_system=args.SOURCE_SYSTEM)
# wait for the heartbeat msg to find the system ID
while True:
wait_heartbeat(master)
|
Add a script to test mavlink connection (read heartbeat)
|
Add a script to test mavlink connection (read heartbeat)
|
Python
|
mit
|
baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite
|
Add a script to test mavlink connection (read heartbeat)
|
#!/usr/bin/env python
'''
test mavlink messages
Do not forget to precise the baudrate (default 115200)
'''
import sys, struct, time, os
from curses import ascii
from pymavlink import mavutil
from argparse import ArgumentParser
parser = ArgumentParser(description=__doc__)
parser.add_argument("--baudrate", type=int,
help="master port baud rate", default=115200)
parser.add_argument("--device", required=True, help="serial device")
parser.add_argument("--source-system", dest='SOURCE_SYSTEM', type=int,
default=255, help='MAVLink source system for this GCS')
args = parser.parse_args()
def wait_heartbeat(m):
'''wait for a heartbeat so we know the target system IDs'''
print("Waiting for APM heartbeat")
msg = m.recv_match(type='HEARTBEAT', blocking=True)
print("Heartbeat from APM (system %u component %u)" % (m.target_system, m.target_component))
# create a mavlink serial instance
master = mavutil.mavlink_connection(args.device, baud=args.baudrate, source_system=args.SOURCE_SYSTEM)
# wait for the heartbeat msg to find the system ID
while True:
wait_heartbeat(master)
|
<commit_before><commit_msg>Add a script to test mavlink connection (read heartbeat)<commit_after>
|
#!/usr/bin/env python
'''
test mavlink messages
Do not forget to precise the baudrate (default 115200)
'''
import sys, struct, time, os
from curses import ascii
from pymavlink import mavutil
from argparse import ArgumentParser
parser = ArgumentParser(description=__doc__)
parser.add_argument("--baudrate", type=int,
help="master port baud rate", default=115200)
parser.add_argument("--device", required=True, help="serial device")
parser.add_argument("--source-system", dest='SOURCE_SYSTEM', type=int,
default=255, help='MAVLink source system for this GCS')
args = parser.parse_args()
def wait_heartbeat(m):
'''wait for a heartbeat so we know the target system IDs'''
print("Waiting for APM heartbeat")
msg = m.recv_match(type='HEARTBEAT', blocking=True)
print("Heartbeat from APM (system %u component %u)" % (m.target_system, m.target_component))
# create a mavlink serial instance
master = mavutil.mavlink_connection(args.device, baud=args.baudrate, source_system=args.SOURCE_SYSTEM)
# wait for the heartbeat msg to find the system ID
while True:
wait_heartbeat(master)
|
Add a script to test mavlink connection (read heartbeat)#!/usr/bin/env python
'''
test mavlink messages
Do not forget to precise the baudrate (default 115200)
'''
import sys, struct, time, os
from curses import ascii
from pymavlink import mavutil
from argparse import ArgumentParser
parser = ArgumentParser(description=__doc__)
parser.add_argument("--baudrate", type=int,
help="master port baud rate", default=115200)
parser.add_argument("--device", required=True, help="serial device")
parser.add_argument("--source-system", dest='SOURCE_SYSTEM', type=int,
default=255, help='MAVLink source system for this GCS')
args = parser.parse_args()
def wait_heartbeat(m):
'''wait for a heartbeat so we know the target system IDs'''
print("Waiting for APM heartbeat")
msg = m.recv_match(type='HEARTBEAT', blocking=True)
print("Heartbeat from APM (system %u component %u)" % (m.target_system, m.target_component))
# create a mavlink serial instance
master = mavutil.mavlink_connection(args.device, baud=args.baudrate, source_system=args.SOURCE_SYSTEM)
# wait for the heartbeat msg to find the system ID
while True:
wait_heartbeat(master)
|
<commit_before><commit_msg>Add a script to test mavlink connection (read heartbeat)<commit_after>#!/usr/bin/env python
'''
test mavlink messages
Do not forget to precise the baudrate (default 115200)
'''
import sys, struct, time, os
from curses import ascii
from pymavlink import mavutil
from argparse import ArgumentParser
parser = ArgumentParser(description=__doc__)
parser.add_argument("--baudrate", type=int,
help="master port baud rate", default=115200)
parser.add_argument("--device", required=True, help="serial device")
parser.add_argument("--source-system", dest='SOURCE_SYSTEM', type=int,
default=255, help='MAVLink source system for this GCS')
args = parser.parse_args()
def wait_heartbeat(m):
'''wait for a heartbeat so we know the target system IDs'''
print("Waiting for APM heartbeat")
msg = m.recv_match(type='HEARTBEAT', blocking=True)
print("Heartbeat from APM (system %u component %u)" % (m.target_system, m.target_component))
# create a mavlink serial instance
master = mavutil.mavlink_connection(args.device, baud=args.baudrate, source_system=args.SOURCE_SYSTEM)
# wait for the heartbeat msg to find the system ID
while True:
wait_heartbeat(master)
|
|
a0c75c6bff9934bff4c99d60297d0f3580e27612
|
misc/get_transformer_objects.py
|
misc/get_transformer_objects.py
|
# -*- encoding: utf-8
"""
Snippet.
AWLC: When I'm trying to diagnose failures on the transformer queue,
I save the queue contents with sqs_freezeray [1], and then I want to
go through them to analyse the failures.
Our transformer messages contain pointers to S3, not the records themselves.
This snippet gets messages from the frozen SQS output, extracts the
S3 pointers, and yields the contents of the resulting objects.
Copy + paste this into a Jupyter notebook/script to use.
Usage:
>>> for obj in get_transformer_objects():
... print(obj)
{"sourceId": "123", "sourceName": "sierra", ...}
{"sourceId": "456", "sourceName": "sierra", ...}
{"sourceId": "789", "sourceName": "sierra", ...}
[1]: https://github.com/wellcometrust/dockerfiles/tree/master/sqs_freezeray
"""
def get_transformer_objects(key=None):
import json
import boto3
s3 = boto3.client('s3')
if key is None:
resp = s3.list_objects_v2(
Bucket='wellcomecollection-platform-infra',
Prefix='sqs'
)
possible_keys = [r['Key'] for r in resp['Contents']]
key = max(possible_keys)
if not key.startswith('sqs/'):
key = f'sqs/{key}'
data = s3.get_object(
Bucket='wellcomecollection-platform-infra',
Key=key
)['Body'].read()
jl = json.loads
for line in data.splitlines():
s3key = jl(jl(jl(line)['Body'])['Message'])['s3key']
s3obj = s3.get_object(
Bucket='wellcomecollection-vhs-sourcedata',
Key=s3key
)
yield s3obj['Body'].read()
|
Add my script for handling transformer failures
|
Add my script for handling transformer failures
|
Python
|
mit
|
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
|
Add my script for handling transformer failures
|
# -*- encoding: utf-8
"""
Snippet.
AWLC: When I'm trying to diagnose failures on the transformer queue,
I save the queue contents with sqs_freezeray [1], and then I want to
go through them to analyse the failures.
Our transformer messages contain pointers to S3, not the records themselves.
This snippet gets messages from the frozen SQS output, extracts the
S3 pointers, and yields the contents of the resulting objects.
Copy + paste this into a Jupyter notebook/script to use.
Usage:
>>> for obj in get_transformer_objects():
... print(obj)
{"sourceId": "123", "sourceName": "sierra", ...}
{"sourceId": "456", "sourceName": "sierra", ...}
{"sourceId": "789", "sourceName": "sierra", ...}
[1]: https://github.com/wellcometrust/dockerfiles/tree/master/sqs_freezeray
"""
def get_transformer_objects(key=None):
import json
import boto3
s3 = boto3.client('s3')
if key is None:
resp = s3.list_objects_v2(
Bucket='wellcomecollection-platform-infra',
Prefix='sqs'
)
possible_keys = [r['Key'] for r in resp['Contents']]
key = max(possible_keys)
if not key.startswith('sqs/'):
key = f'sqs/{key}'
data = s3.get_object(
Bucket='wellcomecollection-platform-infra',
Key=key
)['Body'].read()
jl = json.loads
for line in data.splitlines():
s3key = jl(jl(jl(line)['Body'])['Message'])['s3key']
s3obj = s3.get_object(
Bucket='wellcomecollection-vhs-sourcedata',
Key=s3key
)
yield s3obj['Body'].read()
|
<commit_before><commit_msg>Add my script for handling transformer failures<commit_after>
|
# -*- encoding: utf-8
"""
Snippet.
AWLC: When I'm trying to diagnose failures on the transformer queue,
I save the queue contents with sqs_freezeray [1], and then I want to
go through them to analyse the failures.
Our transformer messages contain pointers to S3, not the records themselves.
This snippet gets messages from the frozen SQS output, extracts the
S3 pointers, and yields the contents of the resulting objects.
Copy + paste this into a Jupyter notebook/script to use.
Usage:
>>> for obj in get_transformer_objects():
... print(obj)
{"sourceId": "123", "sourceName": "sierra", ...}
{"sourceId": "456", "sourceName": "sierra", ...}
{"sourceId": "789", "sourceName": "sierra", ...}
[1]: https://github.com/wellcometrust/dockerfiles/tree/master/sqs_freezeray
"""
def get_transformer_objects(key=None):
import json
import boto3
s3 = boto3.client('s3')
if key is None:
resp = s3.list_objects_v2(
Bucket='wellcomecollection-platform-infra',
Prefix='sqs'
)
possible_keys = [r['Key'] for r in resp['Contents']]
key = max(possible_keys)
if not key.startswith('sqs/'):
key = f'sqs/{key}'
data = s3.get_object(
Bucket='wellcomecollection-platform-infra',
Key=key
)['Body'].read()
jl = json.loads
for line in data.splitlines():
s3key = jl(jl(jl(line)['Body'])['Message'])['s3key']
s3obj = s3.get_object(
Bucket='wellcomecollection-vhs-sourcedata',
Key=s3key
)
yield s3obj['Body'].read()
|
Add my script for handling transformer failures# -*- encoding: utf-8
"""
Snippet.
AWLC: When I'm trying to diagnose failures on the transformer queue,
I save the queue contents with sqs_freezeray [1], and then I want to
go through them to analyse the failures.
Our transformer messages contain pointers to S3, not the records themselves.
This snippet gets messages from the frozen SQS output, extracts the
S3 pointers, and yields the contents of the resulting objects.
Copy + paste this into a Jupyter notebook/script to use.
Usage:
>>> for obj in get_transformer_objects():
... print(obj)
{"sourceId": "123", "sourceName": "sierra", ...}
{"sourceId": "456", "sourceName": "sierra", ...}
{"sourceId": "789", "sourceName": "sierra", ...}
[1]: https://github.com/wellcometrust/dockerfiles/tree/master/sqs_freezeray
"""
def get_transformer_objects(key=None):
import json
import boto3
s3 = boto3.client('s3')
if key is None:
resp = s3.list_objects_v2(
Bucket='wellcomecollection-platform-infra',
Prefix='sqs'
)
possible_keys = [r['Key'] for r in resp['Contents']]
key = max(possible_keys)
if not key.startswith('sqs/'):
key = f'sqs/{key}'
data = s3.get_object(
Bucket='wellcomecollection-platform-infra',
Key=key
)['Body'].read()
jl = json.loads
for line in data.splitlines():
s3key = jl(jl(jl(line)['Body'])['Message'])['s3key']
s3obj = s3.get_object(
Bucket='wellcomecollection-vhs-sourcedata',
Key=s3key
)
yield s3obj['Body'].read()
|
<commit_before><commit_msg>Add my script for handling transformer failures<commit_after># -*- encoding: utf-8
"""
Snippet.
AWLC: When I'm trying to diagnose failures on the transformer queue,
I save the queue contents with sqs_freezeray [1], and then I want to
go through them to analyse the failures.
Our transformer messages contain pointers to S3, not the records themselves.
This snippet gets messages from the frozen SQS output, extracts the
S3 pointers, and yields the contents of the resulting objects.
Copy + paste this into a Jupyter notebook/script to use.
Usage:
>>> for obj in get_transformer_objects():
... print(obj)
{"sourceId": "123", "sourceName": "sierra", ...}
{"sourceId": "456", "sourceName": "sierra", ...}
{"sourceId": "789", "sourceName": "sierra", ...}
[1]: https://github.com/wellcometrust/dockerfiles/tree/master/sqs_freezeray
"""
def get_transformer_objects(key=None):
import json
import boto3
s3 = boto3.client('s3')
if key is None:
resp = s3.list_objects_v2(
Bucket='wellcomecollection-platform-infra',
Prefix='sqs'
)
possible_keys = [r['Key'] for r in resp['Contents']]
key = max(possible_keys)
if not key.startswith('sqs/'):
key = f'sqs/{key}'
data = s3.get_object(
Bucket='wellcomecollection-platform-infra',
Key=key
)['Body'].read()
jl = json.loads
for line in data.splitlines():
s3key = jl(jl(jl(line)['Body'])['Message'])['s3key']
s3obj = s3.get_object(
Bucket='wellcomecollection-vhs-sourcedata',
Key=s3key
)
yield s3obj['Body'].read()
|
|
bc8926d62c05549127e0fd9713e5a16f3d7565f7
|
tests/projects/test_views.py
|
tests/projects/test_views.py
|
import pytest
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_hide_private_projects(client, user, project_factory):
public = project_factory()
private = project_factory(is_public=False)
private.participants.add(user)
client.login(username=user, password='password')
url = reverse('project-list')
response = client.get(url)
assert response.status_code == 200
project_list = response.context['project_list']
assert public in project_list
assert private not in project_list
|
Add test to ensure private projects are not shown in the project list
|
Add test to ensure private projects are not shown in the project list
|
Python
|
agpl-3.0
|
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
|
Add test to ensure private projects are not shown in the project list
|
import pytest
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_hide_private_projects(client, user, project_factory):
public = project_factory()
private = project_factory(is_public=False)
private.participants.add(user)
client.login(username=user, password='password')
url = reverse('project-list')
response = client.get(url)
assert response.status_code == 200
project_list = response.context['project_list']
assert public in project_list
assert private not in project_list
|
<commit_before><commit_msg>Add test to ensure private projects are not shown in the project list<commit_after>
|
import pytest
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_hide_private_projects(client, user, project_factory):
public = project_factory()
private = project_factory(is_public=False)
private.participants.add(user)
client.login(username=user, password='password')
url = reverse('project-list')
response = client.get(url)
assert response.status_code == 200
project_list = response.context['project_list']
assert public in project_list
assert private not in project_list
|
Add test to ensure private projects are not shown in the project listimport pytest
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_hide_private_projects(client, user, project_factory):
public = project_factory()
private = project_factory(is_public=False)
private.participants.add(user)
client.login(username=user, password='password')
url = reverse('project-list')
response = client.get(url)
assert response.status_code == 200
project_list = response.context['project_list']
assert public in project_list
assert private not in project_list
|
<commit_before><commit_msg>Add test to ensure private projects are not shown in the project list<commit_after>import pytest
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_hide_private_projects(client, user, project_factory):
public = project_factory()
private = project_factory(is_public=False)
private.participants.add(user)
client.login(username=user, password='password')
url = reverse('project-list')
response = client.get(url)
assert response.status_code == 200
project_list = response.context['project_list']
assert public in project_list
assert private not in project_list
|
|
2622c2ac4ecfc499f947d749b76ea1538148d4bb
|
heat/db/sqlalchemy/migrate_repo/versions/013_owner_id_uuid.py
|
heat/db/sqlalchemy/migrate_repo/versions/013_owner_id_uuid.py
|
from sqlalchemy import *
from migrate import *
from heat.common import utils
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
stack = Table('stack', meta, autoload=True)
dialect = migrate_engine.url.get_dialect().name
if not dialect.startswith('sqlite'):
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).drop()
stack.c.owner_id.alter(String(36), nullable=True)
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
dialect = migrate_engine.url.get_dialect().name
if dialect.startswith('sqlite'):
return
stack = Table('stack', meta, autoload=True)
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).drop()
stack.c.owner_id.alter(Integer, nullable=True)
fkeys = list(event.c.stack_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[event.c.stack_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
|
Allow stack.owner_id to store a uuid.
|
Allow stack.owner_id to store a uuid.
Fixes bug 1078854
Change-Id: I3dae1502968853d563ba52bc1d6656c48d5d18ba
|
Python
|
apache-2.0
|
JioCloud/heat,ntt-sic/heat,openstack/heat,citrix-openstack-build/heat,rickerc/heat_audit,steveb/heat,noironetworks/heat,steveb/heat,NeCTAR-RC/heat,dragorosson/heat,noironetworks/heat,dims/heat,cryptickp/heat,srznew/heat,cwolferh/heat-scratch,dims/heat,pratikmallya/heat,JioCloud/heat,gonzolino/heat,redhat-openstack/heat,varunarya10/heat,takeshineshiro/heat,maestro-hybrid-cloud/heat,takeshineshiro/heat,redhat-openstack/heat,openstack/heat,rickerc/heat_audit,rh-s/heat,miguelgrinberg/heat,jasondunsmore/heat,varunarya10/heat,rdo-management/heat,pshchelo/heat,Triv90/Heat,dragorosson/heat,rdo-management/heat,jasondunsmore/heat,NeCTAR-RC/heat,Triv90/Heat,Triv90/Heat,maestro-hybrid-cloud/heat,citrix-openstack-build/heat,cwolferh/heat-scratch,pratikmallya/heat,rh-s/heat,miguelgrinberg/heat,srznew/heat,ntt-sic/heat,gonzolino/heat,cryptickp/heat,pshchelo/heat
|
Allow stack.owner_id to store a uuid.
Fixes bug 1078854
Change-Id: I3dae1502968853d563ba52bc1d6656c48d5d18ba
|
from sqlalchemy import *
from migrate import *
from heat.common import utils
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
stack = Table('stack', meta, autoload=True)
dialect = migrate_engine.url.get_dialect().name
if not dialect.startswith('sqlite'):
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).drop()
stack.c.owner_id.alter(String(36), nullable=True)
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
dialect = migrate_engine.url.get_dialect().name
if dialect.startswith('sqlite'):
return
stack = Table('stack', meta, autoload=True)
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).drop()
stack.c.owner_id.alter(Integer, nullable=True)
fkeys = list(event.c.stack_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[event.c.stack_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
|
<commit_before><commit_msg>Allow stack.owner_id to store a uuid.
Fixes bug 1078854
Change-Id: I3dae1502968853d563ba52bc1d6656c48d5d18ba<commit_after>
|
from sqlalchemy import *
from migrate import *
from heat.common import utils
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
stack = Table('stack', meta, autoload=True)
dialect = migrate_engine.url.get_dialect().name
if not dialect.startswith('sqlite'):
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).drop()
stack.c.owner_id.alter(String(36), nullable=True)
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
dialect = migrate_engine.url.get_dialect().name
if dialect.startswith('sqlite'):
return
stack = Table('stack', meta, autoload=True)
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).drop()
stack.c.owner_id.alter(Integer, nullable=True)
fkeys = list(event.c.stack_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[event.c.stack_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
|
Allow stack.owner_id to store a uuid.
Fixes bug 1078854
Change-Id: I3dae1502968853d563ba52bc1d6656c48d5d18bafrom sqlalchemy import *
from migrate import *
from heat.common import utils
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
stack = Table('stack', meta, autoload=True)
dialect = migrate_engine.url.get_dialect().name
if not dialect.startswith('sqlite'):
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).drop()
stack.c.owner_id.alter(String(36), nullable=True)
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
dialect = migrate_engine.url.get_dialect().name
if dialect.startswith('sqlite'):
return
stack = Table('stack', meta, autoload=True)
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).drop()
stack.c.owner_id.alter(Integer, nullable=True)
fkeys = list(event.c.stack_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[event.c.stack_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
|
<commit_before><commit_msg>Allow stack.owner_id to store a uuid.
Fixes bug 1078854
Change-Id: I3dae1502968853d563ba52bc1d6656c48d5d18ba<commit_after>from sqlalchemy import *
from migrate import *
from heat.common import utils
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
stack = Table('stack', meta, autoload=True)
dialect = migrate_engine.url.get_dialect().name
if not dialect.startswith('sqlite'):
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).drop()
stack.c.owner_id.alter(String(36), nullable=True)
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
dialect = migrate_engine.url.get_dialect().name
if dialect.startswith('sqlite'):
return
stack = Table('stack', meta, autoload=True)
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).drop()
stack.c.owner_id.alter(Integer, nullable=True)
fkeys = list(event.c.stack_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[event.c.stack_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
fkeys = list(stack.c.owner_id.foreign_keys)
if fkeys:
fkey_name = fkeys[0].constraint.name
ForeignKeyConstraint(columns=[stack.c.owner_id],
refcolumns=[stack.c.id],
name=fkey_name).create()
|
|
45667354abd56a3ec47cf9978959f5a00e3d46a9
|
heat_integrationtests/functional/test_conditional_exposure.py
|
heat_integrationtests/functional/test_conditional_exposure.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heatclient import exc
import keystoneclient
from heat_integrationtests.common import test
class ConditionalExposureTestBase(test.HeatIntegrationTest):
def setUp(self):
super(ConditionalExposureTestBase, self).setUp()
self.client = self.orchestration_client
def _delete(self, stack_name):
stacks = self.client.stacks.list()
for s in stacks:
if s.stack_name == stack_name:
self._stack_delete(s.identifier)
break
class ServiceBasedExposureTest(ConditionalExposureTestBase):
# NOTE(pas-ha) if we ever decide to install Sahara on Heat
# functional gate, this must be changed to other not-installed
# but in principle supported service
unavailable_service = 'Sahara'
unavailable_template = """
heat_template_version: 2015-10-15
resources:
not_available:
type: OS::Sahara::NodeGroupTemplate
properties:
plugin_name: fake
hadoop_version: 0.1
flavor: m1.large
node_processes: []
"""
def setUp(self):
super(ServiceBasedExposureTest, self).setUp()
# check that Sahara endpoint is available
if self._is_sahara_deployed():
self.skipTest("Sahara is actually deployed, "
"can not run negative tests on "
"Sahara resources availability.")
def _is_sahara_deployed(self):
keystone = self.identity_client
try:
keystone.service_catalog.url_for(
attr='region',
filter_value=self.conf.region,
service_type='data-processing',
endpoint_type='publicURL')
except keystoneclient.exceptions.EndpointNotFound:
return False
return True
def test_unavailable_resources_not_listed(self):
resources = self.client.resource_types.list()
self.assertFalse(any(self.unavailable_service in r.resource_type
for r in resources))
def test_unavailable_resources_not_created(self):
stack_name = self._stack_rand_name()
self.addCleanup(self._delete, stack_name)
ex = self.assertRaises(exc.HTTPBadRequest,
self.client.stacks.create,
stack_name=stack_name,
template=self.unavailable_template)
self.assertIn('ResourceTypeUnavailable', ex.message)
self.assertIn('OS::Sahara::NodeGroupTemplate', ex.message)
|
Add functional test for resource exposure
|
Add functional test for resource exposure
These simple tests check that Sahara resources can not be created
and are not listed in the resource-type-list.
If we ever decide to install Sahara on the Heat functional tests gate,
tests should be changed to use other not installed but in principle
supported service and resources, as these tests will be skipped in this
case.
Related blueprint keystone-based-resource-availability
Change-Id: I6e692587f49b6c34c5e99b8d26bb6e60b7ce7af5
|
Python
|
apache-2.0
|
steveb/heat,cryptickp/heat,srznew/heat,cwolferh/heat-scratch,jasondunsmore/heat,dragorosson/heat,srznew/heat,jasondunsmore/heat,miguelgrinberg/heat,pratikmallya/heat,pratikmallya/heat,miguelgrinberg/heat,gonzolino/heat,takeshineshiro/heat,cwolferh/heat-scratch,dragorosson/heat,gonzolino/heat,cryptickp/heat,noironetworks/heat,dims/heat,noironetworks/heat,steveb/heat,openstack/heat,takeshineshiro/heat,maestro-hybrid-cloud/heat,maestro-hybrid-cloud/heat,dims/heat,openstack/heat
|
Add functional test for resource exposure
These simple tests check that Sahara resources can not be created
and are not listed in the resource-type-list.
If we ever decide to install Sahara on the Heat functional tests gate,
tests should be changed to use other not installed but in principle
supported service and resources, as these tests will be skipped in this
case.
Related blueprint keystone-based-resource-availability
Change-Id: I6e692587f49b6c34c5e99b8d26bb6e60b7ce7af5
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heatclient import exc
import keystoneclient
from heat_integrationtests.common import test
class ConditionalExposureTestBase(test.HeatIntegrationTest):
def setUp(self):
super(ConditionalExposureTestBase, self).setUp()
self.client = self.orchestration_client
def _delete(self, stack_name):
stacks = self.client.stacks.list()
for s in stacks:
if s.stack_name == stack_name:
self._stack_delete(s.identifier)
break
class ServiceBasedExposureTest(ConditionalExposureTestBase):
# NOTE(pas-ha) if we ever decide to install Sahara on Heat
# functional gate, this must be changed to other not-installed
# but in principle supported service
unavailable_service = 'Sahara'
unavailable_template = """
heat_template_version: 2015-10-15
resources:
not_available:
type: OS::Sahara::NodeGroupTemplate
properties:
plugin_name: fake
hadoop_version: 0.1
flavor: m1.large
node_processes: []
"""
def setUp(self):
super(ServiceBasedExposureTest, self).setUp()
# check that Sahara endpoint is available
if self._is_sahara_deployed():
self.skipTest("Sahara is actually deployed, "
"can not run negative tests on "
"Sahara resources availability.")
def _is_sahara_deployed(self):
keystone = self.identity_client
try:
keystone.service_catalog.url_for(
attr='region',
filter_value=self.conf.region,
service_type='data-processing',
endpoint_type='publicURL')
except keystoneclient.exceptions.EndpointNotFound:
return False
return True
def test_unavailable_resources_not_listed(self):
resources = self.client.resource_types.list()
self.assertFalse(any(self.unavailable_service in r.resource_type
for r in resources))
def test_unavailable_resources_not_created(self):
stack_name = self._stack_rand_name()
self.addCleanup(self._delete, stack_name)
ex = self.assertRaises(exc.HTTPBadRequest,
self.client.stacks.create,
stack_name=stack_name,
template=self.unavailable_template)
self.assertIn('ResourceTypeUnavailable', ex.message)
self.assertIn('OS::Sahara::NodeGroupTemplate', ex.message)
|
<commit_before><commit_msg>Add functional test for resource exposure
These simple tests check that Sahara resources can not be created
and are not listed in the resource-type-list.
If we ever decide to install Sahara on the Heat functional tests gate,
tests should be changed to use other not installed but in principle
supported service and resources, as these tests will be skipped in this
case.
Related blueprint keystone-based-resource-availability
Change-Id: I6e692587f49b6c34c5e99b8d26bb6e60b7ce7af5<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heatclient import exc
import keystoneclient
from heat_integrationtests.common import test
class ConditionalExposureTestBase(test.HeatIntegrationTest):
def setUp(self):
super(ConditionalExposureTestBase, self).setUp()
self.client = self.orchestration_client
def _delete(self, stack_name):
stacks = self.client.stacks.list()
for s in stacks:
if s.stack_name == stack_name:
self._stack_delete(s.identifier)
break
class ServiceBasedExposureTest(ConditionalExposureTestBase):
# NOTE(pas-ha) if we ever decide to install Sahara on Heat
# functional gate, this must be changed to other not-installed
# but in principle supported service
unavailable_service = 'Sahara'
unavailable_template = """
heat_template_version: 2015-10-15
resources:
not_available:
type: OS::Sahara::NodeGroupTemplate
properties:
plugin_name: fake
hadoop_version: 0.1
flavor: m1.large
node_processes: []
"""
def setUp(self):
super(ServiceBasedExposureTest, self).setUp()
# check that Sahara endpoint is available
if self._is_sahara_deployed():
self.skipTest("Sahara is actually deployed, "
"can not run negative tests on "
"Sahara resources availability.")
def _is_sahara_deployed(self):
keystone = self.identity_client
try:
keystone.service_catalog.url_for(
attr='region',
filter_value=self.conf.region,
service_type='data-processing',
endpoint_type='publicURL')
except keystoneclient.exceptions.EndpointNotFound:
return False
return True
def test_unavailable_resources_not_listed(self):
resources = self.client.resource_types.list()
self.assertFalse(any(self.unavailable_service in r.resource_type
for r in resources))
def test_unavailable_resources_not_created(self):
stack_name = self._stack_rand_name()
self.addCleanup(self._delete, stack_name)
ex = self.assertRaises(exc.HTTPBadRequest,
self.client.stacks.create,
stack_name=stack_name,
template=self.unavailable_template)
self.assertIn('ResourceTypeUnavailable', ex.message)
self.assertIn('OS::Sahara::NodeGroupTemplate', ex.message)
|
Add functional test for resource exposure
These simple tests check that Sahara resources can not be created
and are not listed in the resource-type-list.
If we ever decide to install Sahara on the Heat functional tests gate,
tests should be changed to use other not installed but in principle
supported service and resources, as these tests will be skipped in this
case.
Related blueprint keystone-based-resource-availability
Change-Id: I6e692587f49b6c34c5e99b8d26bb6e60b7ce7af5# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heatclient import exc
import keystoneclient
from heat_integrationtests.common import test
class ConditionalExposureTestBase(test.HeatIntegrationTest):
def setUp(self):
super(ConditionalExposureTestBase, self).setUp()
self.client = self.orchestration_client
def _delete(self, stack_name):
stacks = self.client.stacks.list()
for s in stacks:
if s.stack_name == stack_name:
self._stack_delete(s.identifier)
break
class ServiceBasedExposureTest(ConditionalExposureTestBase):
# NOTE(pas-ha) if we ever decide to install Sahara on Heat
# functional gate, this must be changed to other not-installed
# but in principle supported service
unavailable_service = 'Sahara'
unavailable_template = """
heat_template_version: 2015-10-15
resources:
not_available:
type: OS::Sahara::NodeGroupTemplate
properties:
plugin_name: fake
hadoop_version: 0.1
flavor: m1.large
node_processes: []
"""
def setUp(self):
super(ServiceBasedExposureTest, self).setUp()
# check that Sahara endpoint is available
if self._is_sahara_deployed():
self.skipTest("Sahara is actually deployed, "
"can not run negative tests on "
"Sahara resources availability.")
def _is_sahara_deployed(self):
keystone = self.identity_client
try:
keystone.service_catalog.url_for(
attr='region',
filter_value=self.conf.region,
service_type='data-processing',
endpoint_type='publicURL')
except keystoneclient.exceptions.EndpointNotFound:
return False
return True
def test_unavailable_resources_not_listed(self):
resources = self.client.resource_types.list()
self.assertFalse(any(self.unavailable_service in r.resource_type
for r in resources))
def test_unavailable_resources_not_created(self):
stack_name = self._stack_rand_name()
self.addCleanup(self._delete, stack_name)
ex = self.assertRaises(exc.HTTPBadRequest,
self.client.stacks.create,
stack_name=stack_name,
template=self.unavailable_template)
self.assertIn('ResourceTypeUnavailable', ex.message)
self.assertIn('OS::Sahara::NodeGroupTemplate', ex.message)
|
<commit_before><commit_msg>Add functional test for resource exposure
These simple tests check that Sahara resources can not be created
and are not listed in the resource-type-list.
If we ever decide to install Sahara on the Heat functional tests gate,
tests should be changed to use other not installed but in principle
supported service and resources, as these tests will be skipped in this
case.
Related blueprint keystone-based-resource-availability
Change-Id: I6e692587f49b6c34c5e99b8d26bb6e60b7ce7af5<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heatclient import exc
import keystoneclient
from heat_integrationtests.common import test
class ConditionalExposureTestBase(test.HeatIntegrationTest):
def setUp(self):
super(ConditionalExposureTestBase, self).setUp()
self.client = self.orchestration_client
def _delete(self, stack_name):
stacks = self.client.stacks.list()
for s in stacks:
if s.stack_name == stack_name:
self._stack_delete(s.identifier)
break
class ServiceBasedExposureTest(ConditionalExposureTestBase):
# NOTE(pas-ha) if we ever decide to install Sahara on Heat
# functional gate, this must be changed to other not-installed
# but in principle supported service
unavailable_service = 'Sahara'
unavailable_template = """
heat_template_version: 2015-10-15
resources:
not_available:
type: OS::Sahara::NodeGroupTemplate
properties:
plugin_name: fake
hadoop_version: 0.1
flavor: m1.large
node_processes: []
"""
def setUp(self):
super(ServiceBasedExposureTest, self).setUp()
# check that Sahara endpoint is available
if self._is_sahara_deployed():
self.skipTest("Sahara is actually deployed, "
"can not run negative tests on "
"Sahara resources availability.")
def _is_sahara_deployed(self):
keystone = self.identity_client
try:
keystone.service_catalog.url_for(
attr='region',
filter_value=self.conf.region,
service_type='data-processing',
endpoint_type='publicURL')
except keystoneclient.exceptions.EndpointNotFound:
return False
return True
def test_unavailable_resources_not_listed(self):
resources = self.client.resource_types.list()
self.assertFalse(any(self.unavailable_service in r.resource_type
for r in resources))
def test_unavailable_resources_not_created(self):
stack_name = self._stack_rand_name()
self.addCleanup(self._delete, stack_name)
ex = self.assertRaises(exc.HTTPBadRequest,
self.client.stacks.create,
stack_name=stack_name,
template=self.unavailable_template)
self.assertIn('ResourceTypeUnavailable', ex.message)
self.assertIn('OS::Sahara::NodeGroupTemplate', ex.message)
|
|
0dee40cb737121b0fcc141597c04bd3b39d8fbca
|
tests/unit/test_blake.py
|
tests/unit/test_blake.py
|
# Import nacl libs
import libnacl.blake
# Import python libs
import unittest
class TestBlake(unittest.TestCase):
'''
Test sign functions
'''
def test_keyless_blake(self):
msg1 = b'Are you suggesting coconuts migrate?'
msg2 = b'Not at all, they could be carried.'
chash1 = libnacl.crypto_generichash(msg1)
chash2 = libnacl.crypto_generichash(msg2)
self.assertNotEqual(msg1, chash1)
self.assertNotEqual(msg2, chash2)
self.assertNotEqual(chash2, chash1)
def test_key_blake(self):
msg1 = b'Are you suggesting coconuts migrate?'
msg2 = b'Not at all, they could be carried.'
key1 = libnacl.utils.rand_nonce()
key2 = libnacl.utils.rand_nonce()
khash1_1 = libnacl.blake.Blake2b(msg1, key1).digest()
khash1_1_2 = libnacl.blake.Blake2b(msg1, key1).digest()
khash1_2 = libnacl.blake.Blake2b(msg1, key2).digest()
khash2_1 = libnacl.blake.Blake2b(msg2, key1).digest()
khash2_2 = libnacl.blake.Blake2b(msg2, key2).digest()
self.assertNotEqual(msg1, khash1_1)
self.assertNotEqual(msg1, khash1_2)
self.assertNotEqual(msg2, khash2_1)
self.assertNotEqual(msg2, khash2_2)
self.assertNotEqual(khash1_1, khash1_2)
self.assertNotEqual(khash2_1, khash2_2)
self.assertNotEqual(khash1_1, khash2_1)
self.assertNotEqual(khash1_2, khash2_2)
self.assertEqual(khash1_1, khash1_1_2)
|
Add tests for high level blake hash class
|
Add tests for high level blake hash class
|
Python
|
apache-2.0
|
RaetProtocol/libnacl,mindw/libnacl,cachedout/libnacl,saltstack/libnacl,johnttan/libnacl,coinkite/libnacl
|
Add tests for high level blake hash class
|
# Import nacl libs
import libnacl.blake
# Import python libs
import unittest
class TestBlake(unittest.TestCase):
'''
Test sign functions
'''
def test_keyless_blake(self):
msg1 = b'Are you suggesting coconuts migrate?'
msg2 = b'Not at all, they could be carried.'
chash1 = libnacl.crypto_generichash(msg1)
chash2 = libnacl.crypto_generichash(msg2)
self.assertNotEqual(msg1, chash1)
self.assertNotEqual(msg2, chash2)
self.assertNotEqual(chash2, chash1)
def test_key_blake(self):
msg1 = b'Are you suggesting coconuts migrate?'
msg2 = b'Not at all, they could be carried.'
key1 = libnacl.utils.rand_nonce()
key2 = libnacl.utils.rand_nonce()
khash1_1 = libnacl.blake.Blake2b(msg1, key1).digest()
khash1_1_2 = libnacl.blake.Blake2b(msg1, key1).digest()
khash1_2 = libnacl.blake.Blake2b(msg1, key2).digest()
khash2_1 = libnacl.blake.Blake2b(msg2, key1).digest()
khash2_2 = libnacl.blake.Blake2b(msg2, key2).digest()
self.assertNotEqual(msg1, khash1_1)
self.assertNotEqual(msg1, khash1_2)
self.assertNotEqual(msg2, khash2_1)
self.assertNotEqual(msg2, khash2_2)
self.assertNotEqual(khash1_1, khash1_2)
self.assertNotEqual(khash2_1, khash2_2)
self.assertNotEqual(khash1_1, khash2_1)
self.assertNotEqual(khash1_2, khash2_2)
self.assertEqual(khash1_1, khash1_1_2)
|
<commit_before><commit_msg>Add tests for high level blake hash class<commit_after>
|
# Import nacl libs
import libnacl.blake
# Import python libs
import unittest
class TestBlake(unittest.TestCase):
'''
Test sign functions
'''
def test_keyless_blake(self):
msg1 = b'Are you suggesting coconuts migrate?'
msg2 = b'Not at all, they could be carried.'
chash1 = libnacl.crypto_generichash(msg1)
chash2 = libnacl.crypto_generichash(msg2)
self.assertNotEqual(msg1, chash1)
self.assertNotEqual(msg2, chash2)
self.assertNotEqual(chash2, chash1)
def test_key_blake(self):
msg1 = b'Are you suggesting coconuts migrate?'
msg2 = b'Not at all, they could be carried.'
key1 = libnacl.utils.rand_nonce()
key2 = libnacl.utils.rand_nonce()
khash1_1 = libnacl.blake.Blake2b(msg1, key1).digest()
khash1_1_2 = libnacl.blake.Blake2b(msg1, key1).digest()
khash1_2 = libnacl.blake.Blake2b(msg1, key2).digest()
khash2_1 = libnacl.blake.Blake2b(msg2, key1).digest()
khash2_2 = libnacl.blake.Blake2b(msg2, key2).digest()
self.assertNotEqual(msg1, khash1_1)
self.assertNotEqual(msg1, khash1_2)
self.assertNotEqual(msg2, khash2_1)
self.assertNotEqual(msg2, khash2_2)
self.assertNotEqual(khash1_1, khash1_2)
self.assertNotEqual(khash2_1, khash2_2)
self.assertNotEqual(khash1_1, khash2_1)
self.assertNotEqual(khash1_2, khash2_2)
self.assertEqual(khash1_1, khash1_1_2)
|
Add tests for high level blake hash class# Import nacl libs
import libnacl.blake
# Import python libs
import unittest
class TestBlake(unittest.TestCase):
'''
Test sign functions
'''
def test_keyless_blake(self):
msg1 = b'Are you suggesting coconuts migrate?'
msg2 = b'Not at all, they could be carried.'
chash1 = libnacl.crypto_generichash(msg1)
chash2 = libnacl.crypto_generichash(msg2)
self.assertNotEqual(msg1, chash1)
self.assertNotEqual(msg2, chash2)
self.assertNotEqual(chash2, chash1)
def test_key_blake(self):
msg1 = b'Are you suggesting coconuts migrate?'
msg2 = b'Not at all, they could be carried.'
key1 = libnacl.utils.rand_nonce()
key2 = libnacl.utils.rand_nonce()
khash1_1 = libnacl.blake.Blake2b(msg1, key1).digest()
khash1_1_2 = libnacl.blake.Blake2b(msg1, key1).digest()
khash1_2 = libnacl.blake.Blake2b(msg1, key2).digest()
khash2_1 = libnacl.blake.Blake2b(msg2, key1).digest()
khash2_2 = libnacl.blake.Blake2b(msg2, key2).digest()
self.assertNotEqual(msg1, khash1_1)
self.assertNotEqual(msg1, khash1_2)
self.assertNotEqual(msg2, khash2_1)
self.assertNotEqual(msg2, khash2_2)
self.assertNotEqual(khash1_1, khash1_2)
self.assertNotEqual(khash2_1, khash2_2)
self.assertNotEqual(khash1_1, khash2_1)
self.assertNotEqual(khash1_2, khash2_2)
self.assertEqual(khash1_1, khash1_1_2)
|
<commit_before><commit_msg>Add tests for high level blake hash class<commit_after># Import nacl libs
import libnacl.blake
# Import python libs
import unittest
class TestBlake(unittest.TestCase):
'''
Test sign functions
'''
def test_keyless_blake(self):
msg1 = b'Are you suggesting coconuts migrate?'
msg2 = b'Not at all, they could be carried.'
chash1 = libnacl.crypto_generichash(msg1)
chash2 = libnacl.crypto_generichash(msg2)
self.assertNotEqual(msg1, chash1)
self.assertNotEqual(msg2, chash2)
self.assertNotEqual(chash2, chash1)
def test_key_blake(self):
msg1 = b'Are you suggesting coconuts migrate?'
msg2 = b'Not at all, they could be carried.'
key1 = libnacl.utils.rand_nonce()
key2 = libnacl.utils.rand_nonce()
khash1_1 = libnacl.blake.Blake2b(msg1, key1).digest()
khash1_1_2 = libnacl.blake.Blake2b(msg1, key1).digest()
khash1_2 = libnacl.blake.Blake2b(msg1, key2).digest()
khash2_1 = libnacl.blake.Blake2b(msg2, key1).digest()
khash2_2 = libnacl.blake.Blake2b(msg2, key2).digest()
self.assertNotEqual(msg1, khash1_1)
self.assertNotEqual(msg1, khash1_2)
self.assertNotEqual(msg2, khash2_1)
self.assertNotEqual(msg2, khash2_2)
self.assertNotEqual(khash1_1, khash1_2)
self.assertNotEqual(khash2_1, khash2_2)
self.assertNotEqual(khash1_1, khash2_1)
self.assertNotEqual(khash1_2, khash2_2)
self.assertEqual(khash1_1, khash1_1_2)
|
|
bbe1c2aed3e6da1bd95f856e37c85e4562d6c1f0
|
crmapp/urls.py
|
crmapp/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
from accounts.views import AccountList
from accounts.urls import account_urls
from contacts.urls import contact_urls
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
url(r'^account/new/$',
'crmapp.accounts.views.account_cru', name='account_new'
),
url(r'^account/list/$',
AccountList.as_view(), name='account_list'
),
url(r'^account/(?P<uuid>[\w-]+)/', include(account_urls)),
# Contact related URLS
url(r'^contact/(?P<uuid>[\w-]+)/', include(contact_urls)),
# Communication related URLs
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
from accounts.views import AccountList
from accounts.urls import account_urls
from contacts.urls import contact_urls
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
url(r'^account/new/$',
'crmapp.accounts.views.account_cru', name='account_new'
),
url(r'^account/list/$',
AccountList.as_view(), name='account_list'
),
url(r'^account/(?P<uuid>[\w-]+)/', include(account_urls)),
# Contact related URLS
url(r'^contact/new/$',
'crmapp.contacts.views.contact_cru', name='contact_new'
),
url(r'^contact/(?P<uuid>[\w-]+)/', include(contact_urls)),
# Communication related URLs
)
|
Create the Contacts App - Part II > New Contact - Create URL
|
Create the Contacts App - Part II > New Contact - Create URL
|
Python
|
mit
|
tabdon/crmeasyapp,deenaariff/Django,tabdon/crmeasyapp
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
from accounts.views import AccountList
from accounts.urls import account_urls
from contacts.urls import contact_urls
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
url(r'^account/new/$',
'crmapp.accounts.views.account_cru', name='account_new'
),
url(r'^account/list/$',
AccountList.as_view(), name='account_list'
),
url(r'^account/(?P<uuid>[\w-]+)/', include(account_urls)),
# Contact related URLS
url(r'^contact/(?P<uuid>[\w-]+)/', include(contact_urls)),
# Communication related URLs
)Create the Contacts App - Part II > New Contact - Create URL
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
from accounts.views import AccountList
from accounts.urls import account_urls
from contacts.urls import contact_urls
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
url(r'^account/new/$',
'crmapp.accounts.views.account_cru', name='account_new'
),
url(r'^account/list/$',
AccountList.as_view(), name='account_list'
),
url(r'^account/(?P<uuid>[\w-]+)/', include(account_urls)),
# Contact related URLS
url(r'^contact/new/$',
'crmapp.contacts.views.contact_cru', name='contact_new'
),
url(r'^contact/(?P<uuid>[\w-]+)/', include(contact_urls)),
# Communication related URLs
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
from accounts.views import AccountList
from accounts.urls import account_urls
from contacts.urls import contact_urls
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
url(r'^account/new/$',
'crmapp.accounts.views.account_cru', name='account_new'
),
url(r'^account/list/$',
AccountList.as_view(), name='account_list'
),
url(r'^account/(?P<uuid>[\w-]+)/', include(account_urls)),
# Contact related URLS
url(r'^contact/(?P<uuid>[\w-]+)/', include(contact_urls)),
# Communication related URLs
)<commit_msg>Create the Contacts App - Part II > New Contact - Create URL<commit_after>
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
from accounts.views import AccountList
from accounts.urls import account_urls
from contacts.urls import contact_urls
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
url(r'^account/new/$',
'crmapp.accounts.views.account_cru', name='account_new'
),
url(r'^account/list/$',
AccountList.as_view(), name='account_list'
),
url(r'^account/(?P<uuid>[\w-]+)/', include(account_urls)),
# Contact related URLS
url(r'^contact/new/$',
'crmapp.contacts.views.contact_cru', name='contact_new'
),
url(r'^contact/(?P<uuid>[\w-]+)/', include(contact_urls)),
# Communication related URLs
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
from accounts.views import AccountList
from accounts.urls import account_urls
from contacts.urls import contact_urls
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
url(r'^account/new/$',
'crmapp.accounts.views.account_cru', name='account_new'
),
url(r'^account/list/$',
AccountList.as_view(), name='account_list'
),
url(r'^account/(?P<uuid>[\w-]+)/', include(account_urls)),
# Contact related URLS
url(r'^contact/(?P<uuid>[\w-]+)/', include(contact_urls)),
# Communication related URLs
)Create the Contacts App - Part II > New Contact - Create URLfrom django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
from accounts.views import AccountList
from accounts.urls import account_urls
from contacts.urls import contact_urls
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
url(r'^account/new/$',
'crmapp.accounts.views.account_cru', name='account_new'
),
url(r'^account/list/$',
AccountList.as_view(), name='account_list'
),
url(r'^account/(?P<uuid>[\w-]+)/', include(account_urls)),
# Contact related URLS
url(r'^contact/new/$',
'crmapp.contacts.views.contact_cru', name='contact_new'
),
url(r'^contact/(?P<uuid>[\w-]+)/', include(contact_urls)),
# Communication related URLs
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
from accounts.views import AccountList
from accounts.urls import account_urls
from contacts.urls import contact_urls
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
url(r'^account/new/$',
'crmapp.accounts.views.account_cru', name='account_new'
),
url(r'^account/list/$',
AccountList.as_view(), name='account_list'
),
url(r'^account/(?P<uuid>[\w-]+)/', include(account_urls)),
# Contact related URLS
url(r'^contact/(?P<uuid>[\w-]+)/', include(contact_urls)),
# Communication related URLs
)<commit_msg>Create the Contacts App - Part II > New Contact - Create URL<commit_after>from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from marketing.views import HomePage
from accounts.views import AccountList
from accounts.urls import account_urls
from contacts.urls import contact_urls
urlpatterns = patterns('',
# Marketing pages
url(r'^$', HomePage.as_view(), name="home"),
# Subscriber related URLs
url(r'^signup/$',
'crmapp.subscribers.views.subscriber_new', name='sub_new'
),
# Admin URL
(r'^admin/', include(admin.site.urls)),
# Login/Logout URLs
(r'^login/$',
'django.contrib.auth.views.login', {'template_name': 'login.html'}
),
(r'^logout/$',
'django.contrib.auth.views.logout', {'next_page': '/login/'}
),
# Account related URLs
url(r'^account/new/$',
'crmapp.accounts.views.account_cru', name='account_new'
),
url(r'^account/list/$',
AccountList.as_view(), name='account_list'
),
url(r'^account/(?P<uuid>[\w-]+)/', include(account_urls)),
# Contact related URLS
url(r'^contact/new/$',
'crmapp.contacts.views.contact_cru', name='contact_new'
),
url(r'^contact/(?P<uuid>[\w-]+)/', include(contact_urls)),
# Communication related URLs
)
|
8ccf9d141a7524d1450a883c9a7a853d7863e423
|
us_ignite/dummy/locations.py
|
us_ignite/dummy/locations.py
|
from __future__ import division
import math
import random
# Geographic centre of the USA:
longitude = float(39.8281418)
latitude = float(-98.6419404)
def get_location(radius=90):
lng_min = longitude - radius / abs(math.cos(math.radians(latitude)) * 69)
lng_max = longitude + radius / abs(math.cos(math.radians(latitude)) * 69)
lat_min = latitude - (radius / 69)
lat_max = latitude + (radius / 69)
lng = random.triangular(lng_min, lng_max)
lat = random.triangular(lat_min, lat_max)
return [lng, lat]
|
Add component to generate random lng/lat points.
|
Add component to generate random lng/lat points.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
Add component to generate random lng/lat points.
|
from __future__ import division
import math
import random
# Geographic centre of the USA:
longitude = float(39.8281418)
latitude = float(-98.6419404)
def get_location(radius=90):
lng_min = longitude - radius / abs(math.cos(math.radians(latitude)) * 69)
lng_max = longitude + radius / abs(math.cos(math.radians(latitude)) * 69)
lat_min = latitude - (radius / 69)
lat_max = latitude + (radius / 69)
lng = random.triangular(lng_min, lng_max)
lat = random.triangular(lat_min, lat_max)
return [lng, lat]
|
<commit_before><commit_msg>Add component to generate random lng/lat points.<commit_after>
|
from __future__ import division
import math
import random
# Geographic centre of the USA:
longitude = float(39.8281418)
latitude = float(-98.6419404)
def get_location(radius=90):
lng_min = longitude - radius / abs(math.cos(math.radians(latitude)) * 69)
lng_max = longitude + radius / abs(math.cos(math.radians(latitude)) * 69)
lat_min = latitude - (radius / 69)
lat_max = latitude + (radius / 69)
lng = random.triangular(lng_min, lng_max)
lat = random.triangular(lat_min, lat_max)
return [lng, lat]
|
Add component to generate random lng/lat points.from __future__ import division
import math
import random
# Geographic centre of the USA:
longitude = float(39.8281418)
latitude = float(-98.6419404)
def get_location(radius=90):
lng_min = longitude - radius / abs(math.cos(math.radians(latitude)) * 69)
lng_max = longitude + radius / abs(math.cos(math.radians(latitude)) * 69)
lat_min = latitude - (radius / 69)
lat_max = latitude + (radius / 69)
lng = random.triangular(lng_min, lng_max)
lat = random.triangular(lat_min, lat_max)
return [lng, lat]
|
<commit_before><commit_msg>Add component to generate random lng/lat points.<commit_after>from __future__ import division
import math
import random
# Geographic centre of the USA:
longitude = float(39.8281418)
latitude = float(-98.6419404)
def get_location(radius=90):
lng_min = longitude - radius / abs(math.cos(math.radians(latitude)) * 69)
lng_max = longitude + radius / abs(math.cos(math.radians(latitude)) * 69)
lat_min = latitude - (radius / 69)
lat_max = latitude + (radius / 69)
lng = random.triangular(lng_min, lng_max)
lat = random.triangular(lat_min, lat_max)
return [lng, lat]
|
|
aa9e4547fafb204af2815d3af97b5b716ba1164e
|
mysite/search/tests.py
|
mysite/search/tests.py
|
import django.test
from search.models import Project
class NonJavascriptSearch(django.test.TestCase):
fixtures = ['bugs-for-two-projects.json']
def testSearch(self):
response = self.client.get('/search/')
for n in range(1, 11):
self.assertContains(response, 'Title #%d' % n)
self.assertContains(response, 'Description #%d' % n)
|
Add a trivial empty-search test
|
Add a trivial empty-search test
|
Python
|
agpl-3.0
|
campbe13/openhatch,openhatch/oh-mainline,willingc/oh-mainline,sudheesh001/oh-mainline,mzdaniel/oh-mainline,onceuponatimeforever/oh-mainline,moijes12/oh-mainline,vipul-sharma20/oh-mainline,moijes12/oh-mainline,waseem18/oh-mainline,ojengwa/oh-mainline,eeshangarg/oh-mainline,heeraj123/oh-mainline,onceuponatimeforever/oh-mainline,onceuponatimeforever/oh-mainline,ehashman/oh-mainline,SnappleCap/oh-mainline,sudheesh001/oh-mainline,campbe13/openhatch,eeshangarg/oh-mainline,heeraj123/oh-mainline,campbe13/openhatch,SnappleCap/oh-mainline,openhatch/oh-mainline,willingc/oh-mainline,eeshangarg/oh-mainline,ehashman/oh-mainline,SnappleCap/oh-mainline,mzdaniel/oh-mainline,Changaco/oh-mainline,willingc/oh-mainline,heeraj123/oh-mainline,moijes12/oh-mainline,ehashman/oh-mainline,mzdaniel/oh-mainline,ehashman/oh-mainline,mzdaniel/oh-mainline,eeshangarg/oh-mainline,willingc/oh-mainline,jledbetter/openhatch,onceuponatimeforever/oh-mainline,jledbetter/openhatch,nirmeshk/oh-mainline,heeraj123/oh-mainline,nirmeshk/oh-mainline,heeraj123/oh-mainline,ojengwa/oh-mainline,sudheesh001/oh-mainline,jledbetter/openhatch,waseem18/oh-mainline,waseem18/oh-mainline,SnappleCap/oh-mainline,jledbetter/openhatch,vipul-sharma20/oh-mainline,nirmeshk/oh-mainline,mzdaniel/oh-mainline,openhatch/oh-mainline,vipul-sharma20/oh-mainline,moijes12/oh-mainline,jledbetter/openhatch,campbe13/openhatch,sudheesh001/oh-mainline,SnappleCap/oh-mainline,willingc/oh-mainline,mzdaniel/oh-mainline,openhatch/oh-mainline,ojengwa/oh-mainline,waseem18/oh-mainline,openhatch/oh-mainline,vipul-sharma20/oh-mainline,Changaco/oh-mainline,ehashman/oh-mainline,sudheesh001/oh-mainline,onceuponatimeforever/oh-mainline,Changaco/oh-mainline,ojengwa/oh-mainline,eeshangarg/oh-mainline,vipul-sharma20/oh-mainline,waseem18/oh-mainline,nirmeshk/oh-mainline,mzdaniel/oh-mainline,nirmeshk/oh-mainline,ojengwa/oh-mainline,campbe13/openhatch,Changaco/oh-mainline,moijes12/oh-mainline,Changaco/oh-mainline
|
Add a trivial empty-search test
|
import django.test
from search.models import Project
class NonJavascriptSearch(django.test.TestCase):
fixtures = ['bugs-for-two-projects.json']
def testSearch(self):
response = self.client.get('/search/')
for n in range(1, 11):
self.assertContains(response, 'Title #%d' % n)
self.assertContains(response, 'Description #%d' % n)
|
<commit_before><commit_msg>Add a trivial empty-search test<commit_after>
|
import django.test
from search.models import Project
class NonJavascriptSearch(django.test.TestCase):
fixtures = ['bugs-for-two-projects.json']
def testSearch(self):
response = self.client.get('/search/')
for n in range(1, 11):
self.assertContains(response, 'Title #%d' % n)
self.assertContains(response, 'Description #%d' % n)
|
Add a trivial empty-search testimport django.test
from search.models import Project
class NonJavascriptSearch(django.test.TestCase):
fixtures = ['bugs-for-two-projects.json']
def testSearch(self):
response = self.client.get('/search/')
for n in range(1, 11):
self.assertContains(response, 'Title #%d' % n)
self.assertContains(response, 'Description #%d' % n)
|
<commit_before><commit_msg>Add a trivial empty-search test<commit_after>import django.test
from search.models import Project
class NonJavascriptSearch(django.test.TestCase):
fixtures = ['bugs-for-two-projects.json']
def testSearch(self):
response = self.client.get('/search/')
for n in range(1, 11):
self.assertContains(response, 'Title #%d' % n)
self.assertContains(response, 'Description #%d' % n)
|
|
272d81e2efaf94cc60ba5ecf95882fd5182c5cc8
|
tests/sentry/web/frontend/test_organization_audit_log.py
|
tests/sentry/web/frontend/test_organization_audit_log.py
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.models import AuditLogEntry, AuditLogEntryEvent
from sentry.testutils import TestCase
class OrganizationAuditLogTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team = self.create_team(organization=organization)
project = self.create_project(team=team)
path = reverse('sentry-organization-audit-log', args=[organization.slug])
AuditLogEntry.objects.create(
organization=organization,
actor=self.user,
event=AuditLogEntryEvent.ORG_ADD,
)
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-audit-log.html')
assert resp.context['organization'] == organization
assert len(resp.context['audit_log_queryset']) == 1
|
Add test for audit log
|
Add test for audit log
|
Python
|
bsd-3-clause
|
JTCunning/sentry,gg7/sentry,ngonzalvez/sentry,korealerts1/sentry,kevinlondon/sentry,looker/sentry,nicholasserra/sentry,1tush/sentry,looker/sentry,vperron/sentry,llonchj/sentry,beeftornado/sentry,Kryz/sentry,llonchj/sentry,wujuguang/sentry,BuildingLink/sentry,fotinakis/sentry,fuziontech/sentry,songyi199111/sentry,JamesMura/sentry,jean/sentry,BuildingLink/sentry,jokey2k/sentry,felixbuenemann/sentry,ifduyue/sentry,jokey2k/sentry,JackDanger/sentry,daevaorn/sentry,BuildingLink/sentry,beeftornado/sentry,alexm92/sentry,JamesMura/sentry,gencer/sentry,kevinastone/sentry,pauloschilling/sentry,TedaLIEz/sentry,korealerts1/sentry,gencer/sentry,TedaLIEz/sentry,felixbuenemann/sentry,felixbuenemann/sentry,ewdurbin/sentry,BuildingLink/sentry,Kryz/sentry,wong2/sentry,drcapulet/sentry,daevaorn/sentry,mvaled/sentry,boneyao/sentry,fotinakis/sentry,zenefits/sentry,argonemyth/sentry,ifduyue/sentry,llonchj/sentry,kevinastone/sentry,argonemyth/sentry,ifduyue/sentry,BayanGroup/sentry,TedaLIEz/sentry,zenefits/sentry,boneyao/sentry,songyi199111/sentry,zenefits/sentry,zenefits/sentry,looker/sentry,JTCunning/sentry,gencer/sentry,imankulov/sentry,mvaled/sentry,daevaorn/sentry,songyi199111/sentry,fuziontech/sentry,mitsuhiko/sentry,JamesMura/sentry,JTCunning/sentry,imankulov/sentry,korealerts1/sentry,JackDanger/sentry,imankulov/sentry,wong2/sentry,JamesMura/sentry,ngonzalvez/sentry,gencer/sentry,daevaorn/sentry,Natim/sentry,jean/sentry,ifduyue/sentry,Natim/sentry,fuziontech/sentry,BuildingLink/sentry,fotinakis/sentry,kevinlondon/sentry,BayanGroup/sentry,vperron/sentry,drcapulet/sentry,beeftornado/sentry,looker/sentry,gg7/sentry,hongliang5623/sentry,camilonova/sentry,kevinlondon/sentry,vperron/sentry,argonemyth/sentry,jokey2k/sentry,ifduyue/sentry,hongliang5623/sentry,Natim/sentry,ewdurbin/sentry,zenefits/sentry,mitsuhiko/sentry,jean/sentry,1tush/sentry,pauloschilling/sentry,looker/sentry,wujuguang/sentry,gencer/sentry,ngonzalvez/sentry,camilonova/sentry,nicholasserra/sentry,camilonova/sentry,mvaled/sentry,gg7/sentry,Kryz/sentry,pauloschilling/sentry,fotinakis/sentry,mvaled/sentry,wong2/sentry,JackDanger/sentry,jean/sentry,alexm92/sentry,JamesMura/sentry,nicholasserra/sentry,jean/sentry,mvaled/sentry,ewdurbin/sentry,mvaled/sentry,hongliang5623/sentry,boneyao/sentry,alexm92/sentry,1tush/sentry,BayanGroup/sentry,drcapulet/sentry,wujuguang/sentry,kevinastone/sentry
|
Add test for audit log
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.models import AuditLogEntry, AuditLogEntryEvent
from sentry.testutils import TestCase
class OrganizationAuditLogTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team = self.create_team(organization=organization)
project = self.create_project(team=team)
path = reverse('sentry-organization-audit-log', args=[organization.slug])
AuditLogEntry.objects.create(
organization=organization,
actor=self.user,
event=AuditLogEntryEvent.ORG_ADD,
)
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-audit-log.html')
assert resp.context['organization'] == organization
assert len(resp.context['audit_log_queryset']) == 1
|
<commit_before><commit_msg>Add test for audit log<commit_after>
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.models import AuditLogEntry, AuditLogEntryEvent
from sentry.testutils import TestCase
class OrganizationAuditLogTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team = self.create_team(organization=organization)
project = self.create_project(team=team)
path = reverse('sentry-organization-audit-log', args=[organization.slug])
AuditLogEntry.objects.create(
organization=organization,
actor=self.user,
event=AuditLogEntryEvent.ORG_ADD,
)
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-audit-log.html')
assert resp.context['organization'] == organization
assert len(resp.context['audit_log_queryset']) == 1
|
Add test for audit logfrom __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.models import AuditLogEntry, AuditLogEntryEvent
from sentry.testutils import TestCase
class OrganizationAuditLogTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team = self.create_team(organization=organization)
project = self.create_project(team=team)
path = reverse('sentry-organization-audit-log', args=[organization.slug])
AuditLogEntry.objects.create(
organization=organization,
actor=self.user,
event=AuditLogEntryEvent.ORG_ADD,
)
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-audit-log.html')
assert resp.context['organization'] == organization
assert len(resp.context['audit_log_queryset']) == 1
|
<commit_before><commit_msg>Add test for audit log<commit_after>from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.models import AuditLogEntry, AuditLogEntryEvent
from sentry.testutils import TestCase
class OrganizationAuditLogTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team = self.create_team(organization=organization)
project = self.create_project(team=team)
path = reverse('sentry-organization-audit-log', args=[organization.slug])
AuditLogEntry.objects.create(
organization=organization,
actor=self.user,
event=AuditLogEntryEvent.ORG_ADD,
)
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-audit-log.html')
assert resp.context['organization'] == organization
assert len(resp.context['audit_log_queryset']) == 1
|
|
ea5bcdb8864fe326fcaa66f43313311d954ed759
|
arx/sources/test/s3.py
|
arx/sources/test/s3.py
|
import pytest
from ...decorators import InvalidScheme
from ..s3 import S3, S3Jar, S3Tar, Invalid
def test_http():
src = S3('s3://bucket/key')
assert src.authority == 'bucket'
assert src.path == '/key'
assert src.fragment is None
with pytest.raises(Invalid):
src = S3('s3://bucket/key#pieces')
with pytest.raises(InvalidScheme):
src = S3('tar+s3://bucket/key')
with pytest.raises(InvalidScheme):
src = S3('jar+s3://bucket/key')
def test_tar():
src = S3Tar('tar+s3://bucket/key.tbz')
assert src.scheme == 'tar+s3'
assert src.authority == 'bucket'
assert src.path == '/key.tbz'
with pytest.raises(InvalidScheme):
src = S3Tar('https://aol.com/aol.tgz')
def test_jar():
src = S3Jar('jar+s3://bucket/key.jar')
assert src.scheme == 'jar+s3'
assert src.authority == 'bucket'
assert src.path == '/key.jar'
assert src.fragment is None
with pytest.raises(Invalid):
S3Jar('jar+s3://bucket/key.jar#web.xml')
with pytest.raises(InvalidScheme):
src = S3Jar('tar+s3://bucket/key')
with pytest.raises(InvalidScheme):
src = S3Jar('https://aol.com/web.jar')
|
Test S3 parsing and validation
|
Test S3 parsing and validation
|
Python
|
mit
|
drcloud/arx
|
Test S3 parsing and validation
|
import pytest
from ...decorators import InvalidScheme
from ..s3 import S3, S3Jar, S3Tar, Invalid
def test_http():
src = S3('s3://bucket/key')
assert src.authority == 'bucket'
assert src.path == '/key'
assert src.fragment is None
with pytest.raises(Invalid):
src = S3('s3://bucket/key#pieces')
with pytest.raises(InvalidScheme):
src = S3('tar+s3://bucket/key')
with pytest.raises(InvalidScheme):
src = S3('jar+s3://bucket/key')
def test_tar():
src = S3Tar('tar+s3://bucket/key.tbz')
assert src.scheme == 'tar+s3'
assert src.authority == 'bucket'
assert src.path == '/key.tbz'
with pytest.raises(InvalidScheme):
src = S3Tar('https://aol.com/aol.tgz')
def test_jar():
src = S3Jar('jar+s3://bucket/key.jar')
assert src.scheme == 'jar+s3'
assert src.authority == 'bucket'
assert src.path == '/key.jar'
assert src.fragment is None
with pytest.raises(Invalid):
S3Jar('jar+s3://bucket/key.jar#web.xml')
with pytest.raises(InvalidScheme):
src = S3Jar('tar+s3://bucket/key')
with pytest.raises(InvalidScheme):
src = S3Jar('https://aol.com/web.jar')
|
<commit_before><commit_msg>Test S3 parsing and validation<commit_after>
|
import pytest
from ...decorators import InvalidScheme
from ..s3 import S3, S3Jar, S3Tar, Invalid
def test_http():
src = S3('s3://bucket/key')
assert src.authority == 'bucket'
assert src.path == '/key'
assert src.fragment is None
with pytest.raises(Invalid):
src = S3('s3://bucket/key#pieces')
with pytest.raises(InvalidScheme):
src = S3('tar+s3://bucket/key')
with pytest.raises(InvalidScheme):
src = S3('jar+s3://bucket/key')
def test_tar():
src = S3Tar('tar+s3://bucket/key.tbz')
assert src.scheme == 'tar+s3'
assert src.authority == 'bucket'
assert src.path == '/key.tbz'
with pytest.raises(InvalidScheme):
src = S3Tar('https://aol.com/aol.tgz')
def test_jar():
src = S3Jar('jar+s3://bucket/key.jar')
assert src.scheme == 'jar+s3'
assert src.authority == 'bucket'
assert src.path == '/key.jar'
assert src.fragment is None
with pytest.raises(Invalid):
S3Jar('jar+s3://bucket/key.jar#web.xml')
with pytest.raises(InvalidScheme):
src = S3Jar('tar+s3://bucket/key')
with pytest.raises(InvalidScheme):
src = S3Jar('https://aol.com/web.jar')
|
Test S3 parsing and validationimport pytest
from ...decorators import InvalidScheme
from ..s3 import S3, S3Jar, S3Tar, Invalid
def test_http():
src = S3('s3://bucket/key')
assert src.authority == 'bucket'
assert src.path == '/key'
assert src.fragment is None
with pytest.raises(Invalid):
src = S3('s3://bucket/key#pieces')
with pytest.raises(InvalidScheme):
src = S3('tar+s3://bucket/key')
with pytest.raises(InvalidScheme):
src = S3('jar+s3://bucket/key')
def test_tar():
src = S3Tar('tar+s3://bucket/key.tbz')
assert src.scheme == 'tar+s3'
assert src.authority == 'bucket'
assert src.path == '/key.tbz'
with pytest.raises(InvalidScheme):
src = S3Tar('https://aol.com/aol.tgz')
def test_jar():
src = S3Jar('jar+s3://bucket/key.jar')
assert src.scheme == 'jar+s3'
assert src.authority == 'bucket'
assert src.path == '/key.jar'
assert src.fragment is None
with pytest.raises(Invalid):
S3Jar('jar+s3://bucket/key.jar#web.xml')
with pytest.raises(InvalidScheme):
src = S3Jar('tar+s3://bucket/key')
with pytest.raises(InvalidScheme):
src = S3Jar('https://aol.com/web.jar')
|
<commit_before><commit_msg>Test S3 parsing and validation<commit_after>import pytest
from ...decorators import InvalidScheme
from ..s3 import S3, S3Jar, S3Tar, Invalid
def test_http():
src = S3('s3://bucket/key')
assert src.authority == 'bucket'
assert src.path == '/key'
assert src.fragment is None
with pytest.raises(Invalid):
src = S3('s3://bucket/key#pieces')
with pytest.raises(InvalidScheme):
src = S3('tar+s3://bucket/key')
with pytest.raises(InvalidScheme):
src = S3('jar+s3://bucket/key')
def test_tar():
src = S3Tar('tar+s3://bucket/key.tbz')
assert src.scheme == 'tar+s3'
assert src.authority == 'bucket'
assert src.path == '/key.tbz'
with pytest.raises(InvalidScheme):
src = S3Tar('https://aol.com/aol.tgz')
def test_jar():
src = S3Jar('jar+s3://bucket/key.jar')
assert src.scheme == 'jar+s3'
assert src.authority == 'bucket'
assert src.path == '/key.jar'
assert src.fragment is None
with pytest.raises(Invalid):
S3Jar('jar+s3://bucket/key.jar#web.xml')
with pytest.raises(InvalidScheme):
src = S3Jar('tar+s3://bucket/key')
with pytest.raises(InvalidScheme):
src = S3Jar('https://aol.com/web.jar')
|
|
466d7fb81e0b14c9f1f7bc573f5dd29373b2db04
|
comics/crawler/crawlers/threepanelsoul.py
|
comics/crawler/crawlers/threepanelsoul.py
|
from comics.crawler.crawlers import BaseComicCrawler
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.rsspect.com/rss/threeps.xml')
for entry in self.feed.entries:
if self.timestamp_to_date(entry.updated_parsed) == self.pub_date:
self.title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
self.url = pieces[i + 1]
if piece.count('alt='):
self.text = pieces[i + 1]
if self.url and self.text:
return
|
Add 'Three Panel Soul' crawler
|
Add 'Three Panel Soul' crawler
|
Python
|
agpl-3.0
|
jodal/comics,datagutten/comics,datagutten/comics,klette/comics,jodal/comics,jodal/comics,klette/comics,klette/comics,datagutten/comics,jodal/comics,datagutten/comics
|
Add 'Three Panel Soul' crawler
|
from comics.crawler.crawlers import BaseComicCrawler
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.rsspect.com/rss/threeps.xml')
for entry in self.feed.entries:
if self.timestamp_to_date(entry.updated_parsed) == self.pub_date:
self.title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
self.url = pieces[i + 1]
if piece.count('alt='):
self.text = pieces[i + 1]
if self.url and self.text:
return
|
<commit_before><commit_msg>Add 'Three Panel Soul' crawler<commit_after>
|
from comics.crawler.crawlers import BaseComicCrawler
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.rsspect.com/rss/threeps.xml')
for entry in self.feed.entries:
if self.timestamp_to_date(entry.updated_parsed) == self.pub_date:
self.title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
self.url = pieces[i + 1]
if piece.count('alt='):
self.text = pieces[i + 1]
if self.url and self.text:
return
|
Add 'Three Panel Soul' crawlerfrom comics.crawler.crawlers import BaseComicCrawler
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.rsspect.com/rss/threeps.xml')
for entry in self.feed.entries:
if self.timestamp_to_date(entry.updated_parsed) == self.pub_date:
self.title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
self.url = pieces[i + 1]
if piece.count('alt='):
self.text = pieces[i + 1]
if self.url and self.text:
return
|
<commit_before><commit_msg>Add 'Three Panel Soul' crawler<commit_after>from comics.crawler.crawlers import BaseComicCrawler
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.rsspect.com/rss/threeps.xml')
for entry in self.feed.entries:
if self.timestamp_to_date(entry.updated_parsed) == self.pub_date:
self.title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
self.url = pieces[i + 1]
if piece.count('alt='):
self.text = pieces[i + 1]
if self.url and self.text:
return
|
|
606c94e5ccdd3310dbc450215a3ef140819a4623
|
pennathletics/parse.py
|
pennathletics/parse.py
|
"""Utilities for parsing pages"""
from collections import namedtuple
Home = namedtuple('Home', ['town', 'school'])
def ht_feet_to_inches(ht_str):
"""Take in height in ft-in format, and return inches
>>> ht_feet_to_inches("6-0")
72
"""
feet, inches = ht_str.split("-")
return int(feet) * 12 + int(inches)
def parse_hometown(hometown_str):
"""Take in athlete's hometown and parse it into hometown and previous
school
>>> parse_hometown("Newport Beach, Calif. (Orange Lutheran)")
Home(town="Newport Beach, Calif.", school="Orange Lutheran")
"""
town, school = hometown_str.split("(")
return Home(town[:-1], school[:-1])
|
Add some parsing util funcs
|
Add some parsing util funcs
|
Python
|
mit
|
pennlabs/pennathletics
|
Add some parsing util funcs
|
"""Utilities for parsing pages"""
from collections import namedtuple
Home = namedtuple('Home', ['town', 'school'])
def ht_feet_to_inches(ht_str):
"""Take in height in ft-in format, and return inches
>>> ht_feet_to_inches("6-0")
72
"""
feet, inches = ht_str.split("-")
return int(feet) * 12 + int(inches)
def parse_hometown(hometown_str):
"""Take in athlete's hometown and parse it into hometown and previous
school
>>> parse_hometown("Newport Beach, Calif. (Orange Lutheran)")
Home(town="Newport Beach, Calif.", school="Orange Lutheran")
"""
town, school = hometown_str.split("(")
return Home(town[:-1], school[:-1])
|
<commit_before><commit_msg>Add some parsing util funcs<commit_after>
|
"""Utilities for parsing pages"""
from collections import namedtuple
Home = namedtuple('Home', ['town', 'school'])
def ht_feet_to_inches(ht_str):
"""Take in height in ft-in format, and return inches
>>> ht_feet_to_inches("6-0")
72
"""
feet, inches = ht_str.split("-")
return int(feet) * 12 + int(inches)
def parse_hometown(hometown_str):
"""Take in athlete's hometown and parse it into hometown and previous
school
>>> parse_hometown("Newport Beach, Calif. (Orange Lutheran)")
Home(town="Newport Beach, Calif.", school="Orange Lutheran")
"""
town, school = hometown_str.split("(")
return Home(town[:-1], school[:-1])
|
Add some parsing util funcs"""Utilities for parsing pages"""
from collections import namedtuple
Home = namedtuple('Home', ['town', 'school'])
def ht_feet_to_inches(ht_str):
"""Take in height in ft-in format, and return inches
>>> ht_feet_to_inches("6-0")
72
"""
feet, inches = ht_str.split("-")
return int(feet) * 12 + int(inches)
def parse_hometown(hometown_str):
"""Take in athlete's hometown and parse it into hometown and previous
school
>>> parse_hometown("Newport Beach, Calif. (Orange Lutheran)")
Home(town="Newport Beach, Calif.", school="Orange Lutheran")
"""
town, school = hometown_str.split("(")
return Home(town[:-1], school[:-1])
|
<commit_before><commit_msg>Add some parsing util funcs<commit_after>"""Utilities for parsing pages"""
from collections import namedtuple
Home = namedtuple('Home', ['town', 'school'])
def ht_feet_to_inches(ht_str):
"""Take in height in ft-in format, and return inches
>>> ht_feet_to_inches("6-0")
72
"""
feet, inches = ht_str.split("-")
return int(feet) * 12 + int(inches)
def parse_hometown(hometown_str):
"""Take in athlete's hometown and parse it into hometown and previous
school
>>> parse_hometown("Newport Beach, Calif. (Orange Lutheran)")
Home(town="Newport Beach, Calif.", school="Orange Lutheran")
"""
town, school = hometown_str.split("(")
return Home(town[:-1], school[:-1])
|
|
ffebad0b3e23e03b650408e3486933317e9675e7
|
pysc2/tests/observer_test.py
|
pysc2/tests/observer_test.py
|
#!/usr/bin/python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test that two built in bots can be watched by an observer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from future.builtins import range # pylint: disable=redefined-builtin
from pysc2 import maps
from pysc2 import run_configs
from pysc2.tests import utils
from s2clientprotocol import common_pb2 as sc_common
from s2clientprotocol import sc2api_pb2 as sc_pb
class TestObserver(utils.TestCase):
def test_observer(self):
run_config = run_configs.get()
map_inst = maps.get("Simple64")
with run_config.start() as controller:
create = sc_pb.RequestCreateGame(local_map=sc_pb.LocalMap(
map_path=map_inst.path, map_data=map_inst.data(run_config)))
create.player_setup.add(
type=sc_pb.Computer, race=sc_common.Random, difficulty=sc_pb.VeryEasy)
create.player_setup.add(
type=sc_pb.Computer, race=sc_common.Random, difficulty=sc_pb.VeryHard)
create.player_setup.add(type=sc_pb.Observer)
controller.create_game(create)
join = sc_pb.RequestJoinGame(
options=sc_pb.InterfaceOptions(), # cheap observations
observed_player_id=0)
controller.join_game(join)
outcome = False
for _ in range(60 * 60): # 60 minutes should be plenty.
controller.step(16)
obs = controller.observe()
if obs.player_result:
print("Outcome after %s steps (%0.1f game minutes):" % (
obs.observation.game_loop, obs.observation.game_loop / (16 * 60)))
for r in obs.player_result:
print("Player %s: %s" % (r.player_id, sc_pb.Result.Name(r.result)))
outcome = True
break
self.assertTrue(outcome)
if __name__ == "__main__":
absltest.main()
|
Add a test that verifies you can observe two built in bots playing against each other.
|
Add a test that verifies you can observe two built in bots playing against each other.
PiperOrigin-RevId: 183666331
|
Python
|
apache-2.0
|
deepmind/pysc2
|
Add a test that verifies you can observe two built in bots playing against each other.
PiperOrigin-RevId: 183666331
|
#!/usr/bin/python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test that two built in bots can be watched by an observer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from future.builtins import range # pylint: disable=redefined-builtin
from pysc2 import maps
from pysc2 import run_configs
from pysc2.tests import utils
from s2clientprotocol import common_pb2 as sc_common
from s2clientprotocol import sc2api_pb2 as sc_pb
class TestObserver(utils.TestCase):
def test_observer(self):
run_config = run_configs.get()
map_inst = maps.get("Simple64")
with run_config.start() as controller:
create = sc_pb.RequestCreateGame(local_map=sc_pb.LocalMap(
map_path=map_inst.path, map_data=map_inst.data(run_config)))
create.player_setup.add(
type=sc_pb.Computer, race=sc_common.Random, difficulty=sc_pb.VeryEasy)
create.player_setup.add(
type=sc_pb.Computer, race=sc_common.Random, difficulty=sc_pb.VeryHard)
create.player_setup.add(type=sc_pb.Observer)
controller.create_game(create)
join = sc_pb.RequestJoinGame(
options=sc_pb.InterfaceOptions(), # cheap observations
observed_player_id=0)
controller.join_game(join)
outcome = False
for _ in range(60 * 60): # 60 minutes should be plenty.
controller.step(16)
obs = controller.observe()
if obs.player_result:
print("Outcome after %s steps (%0.1f game minutes):" % (
obs.observation.game_loop, obs.observation.game_loop / (16 * 60)))
for r in obs.player_result:
print("Player %s: %s" % (r.player_id, sc_pb.Result.Name(r.result)))
outcome = True
break
self.assertTrue(outcome)
if __name__ == "__main__":
absltest.main()
|
<commit_before><commit_msg>Add a test that verifies you can observe two built in bots playing against each other.
PiperOrigin-RevId: 183666331<commit_after>
|
#!/usr/bin/python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test that two built in bots can be watched by an observer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from future.builtins import range # pylint: disable=redefined-builtin
from pysc2 import maps
from pysc2 import run_configs
from pysc2.tests import utils
from s2clientprotocol import common_pb2 as sc_common
from s2clientprotocol import sc2api_pb2 as sc_pb
class TestObserver(utils.TestCase):
def test_observer(self):
run_config = run_configs.get()
map_inst = maps.get("Simple64")
with run_config.start() as controller:
create = sc_pb.RequestCreateGame(local_map=sc_pb.LocalMap(
map_path=map_inst.path, map_data=map_inst.data(run_config)))
create.player_setup.add(
type=sc_pb.Computer, race=sc_common.Random, difficulty=sc_pb.VeryEasy)
create.player_setup.add(
type=sc_pb.Computer, race=sc_common.Random, difficulty=sc_pb.VeryHard)
create.player_setup.add(type=sc_pb.Observer)
controller.create_game(create)
join = sc_pb.RequestJoinGame(
options=sc_pb.InterfaceOptions(), # cheap observations
observed_player_id=0)
controller.join_game(join)
outcome = False
for _ in range(60 * 60): # 60 minutes should be plenty.
controller.step(16)
obs = controller.observe()
if obs.player_result:
print("Outcome after %s steps (%0.1f game minutes):" % (
obs.observation.game_loop, obs.observation.game_loop / (16 * 60)))
for r in obs.player_result:
print("Player %s: %s" % (r.player_id, sc_pb.Result.Name(r.result)))
outcome = True
break
self.assertTrue(outcome)
if __name__ == "__main__":
absltest.main()
|
Add a test that verifies you can observe two built in bots playing against each other.
PiperOrigin-RevId: 183666331#!/usr/bin/python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test that two built in bots can be watched by an observer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from future.builtins import range # pylint: disable=redefined-builtin
from pysc2 import maps
from pysc2 import run_configs
from pysc2.tests import utils
from s2clientprotocol import common_pb2 as sc_common
from s2clientprotocol import sc2api_pb2 as sc_pb
class TestObserver(utils.TestCase):
def test_observer(self):
run_config = run_configs.get()
map_inst = maps.get("Simple64")
with run_config.start() as controller:
create = sc_pb.RequestCreateGame(local_map=sc_pb.LocalMap(
map_path=map_inst.path, map_data=map_inst.data(run_config)))
create.player_setup.add(
type=sc_pb.Computer, race=sc_common.Random, difficulty=sc_pb.VeryEasy)
create.player_setup.add(
type=sc_pb.Computer, race=sc_common.Random, difficulty=sc_pb.VeryHard)
create.player_setup.add(type=sc_pb.Observer)
controller.create_game(create)
join = sc_pb.RequestJoinGame(
options=sc_pb.InterfaceOptions(), # cheap observations
observed_player_id=0)
controller.join_game(join)
outcome = False
for _ in range(60 * 60): # 60 minutes should be plenty.
controller.step(16)
obs = controller.observe()
if obs.player_result:
print("Outcome after %s steps (%0.1f game minutes):" % (
obs.observation.game_loop, obs.observation.game_loop / (16 * 60)))
for r in obs.player_result:
print("Player %s: %s" % (r.player_id, sc_pb.Result.Name(r.result)))
outcome = True
break
self.assertTrue(outcome)
if __name__ == "__main__":
absltest.main()
|
<commit_before><commit_msg>Add a test that verifies you can observe two built in bots playing against each other.
PiperOrigin-RevId: 183666331<commit_after>#!/usr/bin/python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test that two built in bots can be watched by an observer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from future.builtins import range # pylint: disable=redefined-builtin
from pysc2 import maps
from pysc2 import run_configs
from pysc2.tests import utils
from s2clientprotocol import common_pb2 as sc_common
from s2clientprotocol import sc2api_pb2 as sc_pb
class TestObserver(utils.TestCase):
def test_observer(self):
run_config = run_configs.get()
map_inst = maps.get("Simple64")
with run_config.start() as controller:
create = sc_pb.RequestCreateGame(local_map=sc_pb.LocalMap(
map_path=map_inst.path, map_data=map_inst.data(run_config)))
create.player_setup.add(
type=sc_pb.Computer, race=sc_common.Random, difficulty=sc_pb.VeryEasy)
create.player_setup.add(
type=sc_pb.Computer, race=sc_common.Random, difficulty=sc_pb.VeryHard)
create.player_setup.add(type=sc_pb.Observer)
controller.create_game(create)
join = sc_pb.RequestJoinGame(
options=sc_pb.InterfaceOptions(), # cheap observations
observed_player_id=0)
controller.join_game(join)
outcome = False
for _ in range(60 * 60): # 60 minutes should be plenty.
controller.step(16)
obs = controller.observe()
if obs.player_result:
print("Outcome after %s steps (%0.1f game minutes):" % (
obs.observation.game_loop, obs.observation.game_loop / (16 * 60)))
for r in obs.player_result:
print("Player %s: %s" % (r.player_id, sc_pb.Result.Name(r.result)))
outcome = True
break
self.assertTrue(outcome)
if __name__ == "__main__":
absltest.main()
|
|
bfd8d1126e771702dfe4869923927b8f4fb81ef1
|
openstack/tests/functional/network/v2/test_extension.py
|
openstack/tests/functional/network/v2/test_extension.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from openstack.tests.functional import base
class TestExtension(base.BaseFunctionalTest):
def test_list_and_find(self):
extensions = list(self.conn.network.extensions())
self.assertGreater(len(extensions), 0)
for ext in extensions:
self.assertIsInstance(ext.name, six.string_types)
self.assertIsInstance(ext.namespace, six.string_types)
self.assertIsInstance(ext.alias, six.string_types)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from openstack.tests.functional import base
class TestExtension(base.BaseFunctionalTest):
def test_list_and_find(self):
extensions = list(self.conn.network.extensions())
self.assertGreater(len(extensions), 0)
for ext in extensions:
self.assertIsInstance(ext.name, six.string_types)
self.assertIsInstance(ext.alias, six.string_types)
|
Remove namespace from network ext test
|
Remove namespace from network ext test
Change-Id: Id9b97d67ac6745fe962a76ccd9c0e4f7cbed4a89
|
Python
|
apache-2.0
|
mtougeron/python-openstacksdk,briancurtin/python-openstacksdk,stackforge/python-openstacksdk,stackforge/python-openstacksdk,dudymas/python-openstacksdk,dtroyer/python-openstacksdk,dtroyer/python-openstacksdk,mtougeron/python-openstacksdk,briancurtin/python-openstacksdk,openstack/python-openstacksdk,openstack/python-openstacksdk,dudymas/python-openstacksdk
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from openstack.tests.functional import base
class TestExtension(base.BaseFunctionalTest):
def test_list_and_find(self):
extensions = list(self.conn.network.extensions())
self.assertGreater(len(extensions), 0)
for ext in extensions:
self.assertIsInstance(ext.name, six.string_types)
self.assertIsInstance(ext.namespace, six.string_types)
self.assertIsInstance(ext.alias, six.string_types)
Remove namespace from network ext test
Change-Id: Id9b97d67ac6745fe962a76ccd9c0e4f7cbed4a89
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from openstack.tests.functional import base
class TestExtension(base.BaseFunctionalTest):
def test_list_and_find(self):
extensions = list(self.conn.network.extensions())
self.assertGreater(len(extensions), 0)
for ext in extensions:
self.assertIsInstance(ext.name, six.string_types)
self.assertIsInstance(ext.alias, six.string_types)
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from openstack.tests.functional import base
class TestExtension(base.BaseFunctionalTest):
def test_list_and_find(self):
extensions = list(self.conn.network.extensions())
self.assertGreater(len(extensions), 0)
for ext in extensions:
self.assertIsInstance(ext.name, six.string_types)
self.assertIsInstance(ext.namespace, six.string_types)
self.assertIsInstance(ext.alias, six.string_types)
<commit_msg>Remove namespace from network ext test
Change-Id: Id9b97d67ac6745fe962a76ccd9c0e4f7cbed4a89<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from openstack.tests.functional import base
class TestExtension(base.BaseFunctionalTest):
def test_list_and_find(self):
extensions = list(self.conn.network.extensions())
self.assertGreater(len(extensions), 0)
for ext in extensions:
self.assertIsInstance(ext.name, six.string_types)
self.assertIsInstance(ext.alias, six.string_types)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from openstack.tests.functional import base
class TestExtension(base.BaseFunctionalTest):
def test_list_and_find(self):
extensions = list(self.conn.network.extensions())
self.assertGreater(len(extensions), 0)
for ext in extensions:
self.assertIsInstance(ext.name, six.string_types)
self.assertIsInstance(ext.namespace, six.string_types)
self.assertIsInstance(ext.alias, six.string_types)
Remove namespace from network ext test
Change-Id: Id9b97d67ac6745fe962a76ccd9c0e4f7cbed4a89# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from openstack.tests.functional import base
class TestExtension(base.BaseFunctionalTest):
def test_list_and_find(self):
extensions = list(self.conn.network.extensions())
self.assertGreater(len(extensions), 0)
for ext in extensions:
self.assertIsInstance(ext.name, six.string_types)
self.assertIsInstance(ext.alias, six.string_types)
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from openstack.tests.functional import base
class TestExtension(base.BaseFunctionalTest):
def test_list_and_find(self):
extensions = list(self.conn.network.extensions())
self.assertGreater(len(extensions), 0)
for ext in extensions:
self.assertIsInstance(ext.name, six.string_types)
self.assertIsInstance(ext.namespace, six.string_types)
self.assertIsInstance(ext.alias, six.string_types)
<commit_msg>Remove namespace from network ext test
Change-Id: Id9b97d67ac6745fe962a76ccd9c0e4f7cbed4a89<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from openstack.tests.functional import base
class TestExtension(base.BaseFunctionalTest):
def test_list_and_find(self):
extensions = list(self.conn.network.extensions())
self.assertGreater(len(extensions), 0)
for ext in extensions:
self.assertIsInstance(ext.name, six.string_types)
self.assertIsInstance(ext.alias, six.string_types)
|
17a5275c87f90d08ffea12d2300526c5a4f27265
|
tdbwriter.py
|
tdbwriter.py
|
from datetime import datetime
from time import sleep
import random
from tempodb import Client, DataPoint
import tempodb
from os import getenv
API_KEY = getenv('API_KEY')
assert API_KEY, "API_KEY is required"
API_SECRET = getenv('API_SECRET')
assert API_SECRET, "API_SECRET is required"
SERIES_KEY = getenv('SERIES_KEY', 'prng')
API_HOST = getenv('API_HOST', tempodb.client.API_HOST)
API_PORT = int(getenv('API_PORT', tempodb.client.API_PORT))
API_SECURE = bool(getenv('API_SECURE', True))
client = Client(API_KEY, API_SECRET, API_HOST, API_PORT, API_SECURE)
while True:
client.write_key(SERIES_KEY, [DataPoint(datetime.now(), random.random() * 50.0)])
sleep(1)
|
Write a PRNG datapoint every second
|
Write a PRNG datapoint every second
|
Python
|
mit
|
InPermutation/tdbwriter
|
Write a PRNG datapoint every second
|
from datetime import datetime
from time import sleep
import random
from tempodb import Client, DataPoint
import tempodb
from os import getenv
API_KEY = getenv('API_KEY')
assert API_KEY, "API_KEY is required"
API_SECRET = getenv('API_SECRET')
assert API_SECRET, "API_SECRET is required"
SERIES_KEY = getenv('SERIES_KEY', 'prng')
API_HOST = getenv('API_HOST', tempodb.client.API_HOST)
API_PORT = int(getenv('API_PORT', tempodb.client.API_PORT))
API_SECURE = bool(getenv('API_SECURE', True))
client = Client(API_KEY, API_SECRET, API_HOST, API_PORT, API_SECURE)
while True:
client.write_key(SERIES_KEY, [DataPoint(datetime.now(), random.random() * 50.0)])
sleep(1)
|
<commit_before><commit_msg>Write a PRNG datapoint every second<commit_after>
|
from datetime import datetime
from time import sleep
import random
from tempodb import Client, DataPoint
import tempodb
from os import getenv
API_KEY = getenv('API_KEY')
assert API_KEY, "API_KEY is required"
API_SECRET = getenv('API_SECRET')
assert API_SECRET, "API_SECRET is required"
SERIES_KEY = getenv('SERIES_KEY', 'prng')
API_HOST = getenv('API_HOST', tempodb.client.API_HOST)
API_PORT = int(getenv('API_PORT', tempodb.client.API_PORT))
API_SECURE = bool(getenv('API_SECURE', True))
client = Client(API_KEY, API_SECRET, API_HOST, API_PORT, API_SECURE)
while True:
client.write_key(SERIES_KEY, [DataPoint(datetime.now(), random.random() * 50.0)])
sleep(1)
|
Write a PRNG datapoint every secondfrom datetime import datetime
from time import sleep
import random
from tempodb import Client, DataPoint
import tempodb
from os import getenv
API_KEY = getenv('API_KEY')
assert API_KEY, "API_KEY is required"
API_SECRET = getenv('API_SECRET')
assert API_SECRET, "API_SECRET is required"
SERIES_KEY = getenv('SERIES_KEY', 'prng')
API_HOST = getenv('API_HOST', tempodb.client.API_HOST)
API_PORT = int(getenv('API_PORT', tempodb.client.API_PORT))
API_SECURE = bool(getenv('API_SECURE', True))
client = Client(API_KEY, API_SECRET, API_HOST, API_PORT, API_SECURE)
while True:
client.write_key(SERIES_KEY, [DataPoint(datetime.now(), random.random() * 50.0)])
sleep(1)
|
<commit_before><commit_msg>Write a PRNG datapoint every second<commit_after>from datetime import datetime
from time import sleep
import random
from tempodb import Client, DataPoint
import tempodb
from os import getenv
API_KEY = getenv('API_KEY')
assert API_KEY, "API_KEY is required"
API_SECRET = getenv('API_SECRET')
assert API_SECRET, "API_SECRET is required"
SERIES_KEY = getenv('SERIES_KEY', 'prng')
API_HOST = getenv('API_HOST', tempodb.client.API_HOST)
API_PORT = int(getenv('API_PORT', tempodb.client.API_PORT))
API_SECURE = bool(getenv('API_SECURE', True))
client = Client(API_KEY, API_SECRET, API_HOST, API_PORT, API_SECURE)
while True:
client.write_key(SERIES_KEY, [DataPoint(datetime.now(), random.random() * 50.0)])
sleep(1)
|
|
d5eaaa3a6e748ca251c01a77f34f4dc322d9ff56
|
integration/mc-integration.py
|
integration/mc-integration.py
|
#!/usr/bin/env python3
# Usage: mc-integration.py N seed
# N is the number of random points to draw
# Seed is used in the Python RNG to ensure repeatability of results
import random
import sys
# INITIALIZATION-----------------
try:
N = int(sys.argv[1])
except IndexError:
N = 100
try:
seed = int(sys.argv[2])
except IndexError:
seed = 12345
random.seed(seed)
aboveCount = 0
belowCount = 0
# DRAWING-----------------------
def f(x):
return 1 - x*x
for _ in range(N):
x = random.uniform(-1, 1)
y = random.uniform(0, 1)
if y > f(x):
aboveCount += 1
else:
belowCount += 1
# CALCULATION-------------------
totalArea = 2
areaUnder = totalArea * belowCount / N
print('Estimate to the integral from %d points: %f' % (N, areaUnder))
|
Include a basic Monte Carlo integrator of 1 - x^2.
|
Include a basic Monte Carlo integrator of 1 - x^2.
|
Python
|
mpl-2.0
|
DanielBrookRoberge/MonteCarloExamples
|
Include a basic Monte Carlo integrator of 1 - x^2.
|
#!/usr/bin/env python3
# Usage: mc-integration.py N seed
# N is the number of random points to draw
# Seed is used in the Python RNG to ensure repeatability of results
import random
import sys
# INITIALIZATION-----------------
try:
N = int(sys.argv[1])
except IndexError:
N = 100
try:
seed = int(sys.argv[2])
except IndexError:
seed = 12345
random.seed(seed)
aboveCount = 0
belowCount = 0
# DRAWING-----------------------
def f(x):
return 1 - x*x
for _ in range(N):
x = random.uniform(-1, 1)
y = random.uniform(0, 1)
if y > f(x):
aboveCount += 1
else:
belowCount += 1
# CALCULATION-------------------
totalArea = 2
areaUnder = totalArea * belowCount / N
print('Estimate to the integral from %d points: %f' % (N, areaUnder))
|
<commit_before><commit_msg>Include a basic Monte Carlo integrator of 1 - x^2.<commit_after>
|
#!/usr/bin/env python3
# Usage: mc-integration.py N seed
# N is the number of random points to draw
# Seed is used in the Python RNG to ensure repeatability of results
import random
import sys
# INITIALIZATION-----------------
try:
N = int(sys.argv[1])
except IndexError:
N = 100
try:
seed = int(sys.argv[2])
except IndexError:
seed = 12345
random.seed(seed)
aboveCount = 0
belowCount = 0
# DRAWING-----------------------
def f(x):
return 1 - x*x
for _ in range(N):
x = random.uniform(-1, 1)
y = random.uniform(0, 1)
if y > f(x):
aboveCount += 1
else:
belowCount += 1
# CALCULATION-------------------
totalArea = 2
areaUnder = totalArea * belowCount / N
print('Estimate to the integral from %d points: %f' % (N, areaUnder))
|
Include a basic Monte Carlo integrator of 1 - x^2.#!/usr/bin/env python3
# Usage: mc-integration.py N seed
# N is the number of random points to draw
# Seed is used in the Python RNG to ensure repeatability of results
import random
import sys
# INITIALIZATION-----------------
try:
N = int(sys.argv[1])
except IndexError:
N = 100
try:
seed = int(sys.argv[2])
except IndexError:
seed = 12345
random.seed(seed)
aboveCount = 0
belowCount = 0
# DRAWING-----------------------
def f(x):
return 1 - x*x
for _ in range(N):
x = random.uniform(-1, 1)
y = random.uniform(0, 1)
if y > f(x):
aboveCount += 1
else:
belowCount += 1
# CALCULATION-------------------
totalArea = 2
areaUnder = totalArea * belowCount / N
print('Estimate to the integral from %d points: %f' % (N, areaUnder))
|
<commit_before><commit_msg>Include a basic Monte Carlo integrator of 1 - x^2.<commit_after>#!/usr/bin/env python3
# Usage: mc-integration.py N seed
# N is the number of random points to draw
# Seed is used in the Python RNG to ensure repeatability of results
import random
import sys
# INITIALIZATION-----------------
try:
N = int(sys.argv[1])
except IndexError:
N = 100
try:
seed = int(sys.argv[2])
except IndexError:
seed = 12345
random.seed(seed)
aboveCount = 0
belowCount = 0
# DRAWING-----------------------
def f(x):
return 1 - x*x
for _ in range(N):
x = random.uniform(-1, 1)
y = random.uniform(0, 1)
if y > f(x):
aboveCount += 1
else:
belowCount += 1
# CALCULATION-------------------
totalArea = 2
areaUnder = totalArea * belowCount / N
print('Estimate to the integral from %d points: %f' % (N, areaUnder))
|
|
f4fdcc573714787dfecc5e2e713b4137b170102a
|
saleor/payment/gateways/adyen/tests/test_adyen_version.py
|
saleor/payment/gateways/adyen/tests/test_adyen_version.py
|
from Adyen.settings import API_CHECKOUT_VERSION, API_PAYMENT_VERSION
def test_adyen_api_version_not_changed():
# We shouldn't bump the Adyen API version when we make a path release.
# We could bump Adyen API when we make a major or minor release.
# If we bump Adyen API we should provide it as breaking changes because
# Saleor clients may require to update part of their code (e.g. in mobile devices).
assert API_CHECKOUT_VERSION == "v64"
assert API_PAYMENT_VERSION == "v64"
|
Add test for Adyen API version
|
Add test for Adyen API version
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor
|
Add test for Adyen API version
|
from Adyen.settings import API_CHECKOUT_VERSION, API_PAYMENT_VERSION
def test_adyen_api_version_not_changed():
# We shouldn't bump the Adyen API version when we make a path release.
# We could bump Adyen API when we make a major or minor release.
# If we bump Adyen API we should provide it as breaking changes because
# Saleor clients may require to update part of their code (e.g. in mobile devices).
assert API_CHECKOUT_VERSION == "v64"
assert API_PAYMENT_VERSION == "v64"
|
<commit_before><commit_msg>Add test for Adyen API version<commit_after>
|
from Adyen.settings import API_CHECKOUT_VERSION, API_PAYMENT_VERSION
def test_adyen_api_version_not_changed():
# We shouldn't bump the Adyen API version when we make a path release.
# We could bump Adyen API when we make a major or minor release.
# If we bump Adyen API we should provide it as breaking changes because
# Saleor clients may require to update part of their code (e.g. in mobile devices).
assert API_CHECKOUT_VERSION == "v64"
assert API_PAYMENT_VERSION == "v64"
|
Add test for Adyen API versionfrom Adyen.settings import API_CHECKOUT_VERSION, API_PAYMENT_VERSION
def test_adyen_api_version_not_changed():
# We shouldn't bump the Adyen API version when we make a path release.
# We could bump Adyen API when we make a major or minor release.
# If we bump Adyen API we should provide it as breaking changes because
# Saleor clients may require to update part of their code (e.g. in mobile devices).
assert API_CHECKOUT_VERSION == "v64"
assert API_PAYMENT_VERSION == "v64"
|
<commit_before><commit_msg>Add test for Adyen API version<commit_after>from Adyen.settings import API_CHECKOUT_VERSION, API_PAYMENT_VERSION
def test_adyen_api_version_not_changed():
# We shouldn't bump the Adyen API version when we make a path release.
# We could bump Adyen API when we make a major or minor release.
# If we bump Adyen API we should provide it as breaking changes because
# Saleor clients may require to update part of their code (e.g. in mobile devices).
assert API_CHECKOUT_VERSION == "v64"
assert API_PAYMENT_VERSION == "v64"
|
|
ee99b706f4b7fe238c729c704eaf241b60f6ce2b
|
ui/app_list/PRESUBMIT.py
|
ui/app_list/PRESUBMIT.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for app_list.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def CheckChangeOnUpload(input_api, output_api):
results = []
results += input_api.canned_checks.CheckPatchFormatted(input_api, output_api)
return results
|
Add git cl format presubmit warning.
|
ui/app_list: Add git cl format presubmit warning.
Upon patch upload 'git cl format' is run on the diff and outputs a
warning if the diff has style violations, saving manual review
time for coding style.
BUG=None
TEST=None
R=xiyuan@chromium.org
Review URL: https://codereview.chromium.org/169003002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@251724 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
hgl888/chromium-crosswalk,anirudhSK/chromium,axinging/chromium-crosswalk,Just-D/chromium-1,markYoungH/chromium.src,chuan9/chromium-crosswalk,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,anirudhSK/chromium,dednal/chromium.src,Fireblend/chromium-crosswalk,patrickm/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk,dushu1203/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,Just-D/chromium-1,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,Fireblend/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,Just-D/chromium-1,anirudhSK/chromium,M4sse/chromium.src,Just-D/chromium-1,markYoungH/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,dushu1203/chromium.src,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,Chilledheart/chromium,Just-D/chromium-1,anirudhSK/chromium,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,anirudhSK/chromium,M4sse/chromium.src,M4sse/chromium.src,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,ondra-novak/chromium.src,Chilledheart/chromium,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,littlstar/chromium.src,anirudhSK/chromium,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,jaruba/chromium.src,fujunwei/chromium-crosswalk,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,chuan9/chromium-crosswalk,Chilledheart/chromium,littlstar/chromium.src,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,markYoungH/chromium.src,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,littlstar/chromium.src,M4sse/chromium.src,littlstar/chromium.src,markYoungH/chromium.src,anirudhSK/chromium,ltilve/chromium,ChromiumWebApps/chromium,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,patrickm/chromium.src,patrickm/chromium.src,fujunwei/chromium-crosswalk,patrickm/chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,ondra-novak/chromium.src,Jonekee/chromium.src,dednal/chromium.src,dednal/chromium.src,bright-sparks/chromium-spacewalk,littlstar/chromium.src,dushu1203/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,dednal/chromium.src,Just-D/chromium-1,Jonekee/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,littlstar/chromium.src,littlstar/chromium.src,jaruba/chromium.src,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,Chilledheart/chromium,ltilve/chromium,ltilve/chromium,jaruba/chromium.src,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,ltilve/chromium,ltilve/chromium,ChromiumWebApps/chromium,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,ltilve/chromium,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,anirudhSK/chromium
|
ui/app_list: Add git cl format presubmit warning.
Upon patch upload 'git cl format' is run on the diff and outputs a
warning if the diff has style violations, saving manual review
time for coding style.
BUG=None
TEST=None
R=xiyuan@chromium.org
Review URL: https://codereview.chromium.org/169003002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@251724 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for app_list.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def CheckChangeOnUpload(input_api, output_api):
results = []
results += input_api.canned_checks.CheckPatchFormatted(input_api, output_api)
return results
|
<commit_before><commit_msg>ui/app_list: Add git cl format presubmit warning.
Upon patch upload 'git cl format' is run on the diff and outputs a
warning if the diff has style violations, saving manual review
time for coding style.
BUG=None
TEST=None
R=xiyuan@chromium.org
Review URL: https://codereview.chromium.org/169003002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@251724 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for app_list.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def CheckChangeOnUpload(input_api, output_api):
results = []
results += input_api.canned_checks.CheckPatchFormatted(input_api, output_api)
return results
|
ui/app_list: Add git cl format presubmit warning.
Upon patch upload 'git cl format' is run on the diff and outputs a
warning if the diff has style violations, saving manual review
time for coding style.
BUG=None
TEST=None
R=xiyuan@chromium.org
Review URL: https://codereview.chromium.org/169003002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@251724 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for app_list.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def CheckChangeOnUpload(input_api, output_api):
results = []
results += input_api.canned_checks.CheckPatchFormatted(input_api, output_api)
return results
|
<commit_before><commit_msg>ui/app_list: Add git cl format presubmit warning.
Upon patch upload 'git cl format' is run on the diff and outputs a
warning if the diff has style violations, saving manual review
time for coding style.
BUG=None
TEST=None
R=xiyuan@chromium.org
Review URL: https://codereview.chromium.org/169003002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@251724 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for app_list.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def CheckChangeOnUpload(input_api, output_api):
results = []
results += input_api.canned_checks.CheckPatchFormatted(input_api, output_api)
return results
|
|
01732179f923394bee6e46ae1bbee0f316df8297
|
amostra/tests/test_revert.py
|
amostra/tests/test_revert.py
|
from hypothesis import given, strategies as st
from hypothesis.strategies import text
from hypothesis import settings
import random
alphabet_list = ''
for i in range(26):
alphabet_list = alphabet_list + chr(97 + i)
@given(names = st.lists(st.text(alphabet=alphabet_list, min_size=1, max_size=4), min_size=3, max_size=4, unique=True))
@settings(max_examples = 10, deadline = 1000)
def test_revert(client_conf, names):
client, mongo_client = client_conf()
n = len(names)
s = client.samples.new(name = names[0])
for name in names[1:]:
s.name = name
num = random.randint(0, n-2)
revert_target_cursor = mongo_client['tests-amostra'].samples_revisions.find({'revision': num})
s.revert(num)
target = next(revert_target_cursor)
for name, trait in s.traits().items():
if name is 'revision':
continue
else:
assert getattr(s, name) == target[name]
|
Add a revert function test
|
TST: Add a revert function test
|
Python
|
bsd-3-clause
|
NSLS-II/amostra
|
TST: Add a revert function test
|
from hypothesis import given, strategies as st
from hypothesis.strategies import text
from hypothesis import settings
import random
alphabet_list = ''
for i in range(26):
alphabet_list = alphabet_list + chr(97 + i)
@given(names = st.lists(st.text(alphabet=alphabet_list, min_size=1, max_size=4), min_size=3, max_size=4, unique=True))
@settings(max_examples = 10, deadline = 1000)
def test_revert(client_conf, names):
client, mongo_client = client_conf()
n = len(names)
s = client.samples.new(name = names[0])
for name in names[1:]:
s.name = name
num = random.randint(0, n-2)
revert_target_cursor = mongo_client['tests-amostra'].samples_revisions.find({'revision': num})
s.revert(num)
target = next(revert_target_cursor)
for name, trait in s.traits().items():
if name is 'revision':
continue
else:
assert getattr(s, name) == target[name]
|
<commit_before><commit_msg>TST: Add a revert function test<commit_after>
|
from hypothesis import given, strategies as st
from hypothesis.strategies import text
from hypothesis import settings
import random
alphabet_list = ''
for i in range(26):
alphabet_list = alphabet_list + chr(97 + i)
@given(names = st.lists(st.text(alphabet=alphabet_list, min_size=1, max_size=4), min_size=3, max_size=4, unique=True))
@settings(max_examples = 10, deadline = 1000)
def test_revert(client_conf, names):
client, mongo_client = client_conf()
n = len(names)
s = client.samples.new(name = names[0])
for name in names[1:]:
s.name = name
num = random.randint(0, n-2)
revert_target_cursor = mongo_client['tests-amostra'].samples_revisions.find({'revision': num})
s.revert(num)
target = next(revert_target_cursor)
for name, trait in s.traits().items():
if name is 'revision':
continue
else:
assert getattr(s, name) == target[name]
|
TST: Add a revert function testfrom hypothesis import given, strategies as st
from hypothesis.strategies import text
from hypothesis import settings
import random
alphabet_list = ''
for i in range(26):
alphabet_list = alphabet_list + chr(97 + i)
@given(names = st.lists(st.text(alphabet=alphabet_list, min_size=1, max_size=4), min_size=3, max_size=4, unique=True))
@settings(max_examples = 10, deadline = 1000)
def test_revert(client_conf, names):
client, mongo_client = client_conf()
n = len(names)
s = client.samples.new(name = names[0])
for name in names[1:]:
s.name = name
num = random.randint(0, n-2)
revert_target_cursor = mongo_client['tests-amostra'].samples_revisions.find({'revision': num})
s.revert(num)
target = next(revert_target_cursor)
for name, trait in s.traits().items():
if name is 'revision':
continue
else:
assert getattr(s, name) == target[name]
|
<commit_before><commit_msg>TST: Add a revert function test<commit_after>from hypothesis import given, strategies as st
from hypothesis.strategies import text
from hypothesis import settings
import random
alphabet_list = ''
for i in range(26):
alphabet_list = alphabet_list + chr(97 + i)
@given(names = st.lists(st.text(alphabet=alphabet_list, min_size=1, max_size=4), min_size=3, max_size=4, unique=True))
@settings(max_examples = 10, deadline = 1000)
def test_revert(client_conf, names):
client, mongo_client = client_conf()
n = len(names)
s = client.samples.new(name = names[0])
for name in names[1:]:
s.name = name
num = random.randint(0, n-2)
revert_target_cursor = mongo_client['tests-amostra'].samples_revisions.find({'revision': num})
s.revert(num)
target = next(revert_target_cursor)
for name, trait in s.traits().items():
if name is 'revision':
continue
else:
assert getattr(s, name) == target[name]
|
|
29f3a8dd5ee1faf4bad1e5ec969c8f975d8539b0
|
alembic/versions/4e435ff8ba74_add_unsubscribe_column_to_user_table.py
|
alembic/versions/4e435ff8ba74_add_unsubscribe_column_to_user_table.py
|
"""Add unsubscribe column to user table
Revision ID: 4e435ff8ba74
Revises: bbba2255e00
Create Date: 2015-02-09 10:36:45.935116
"""
# revision identifiers, used by Alembic.
revision = '4e435ff8ba74'
down_revision = 'bbba2255e00'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('subscribed', sa.Boolean, default=True))
query = 'UPDATE "user" SET subscribed=true;'
op.execute(query)
def downgrade():
op.drop_column('user', 'subscribed')
|
Add Column to get notifications.
|
Add Column to get notifications.
|
Python
|
agpl-3.0
|
jean/pybossa,inteligencia-coletiva-lsd/pybossa,geotagx/pybossa,OpenNewsLabs/pybossa,jean/pybossa,inteligencia-coletiva-lsd/pybossa,Scifabric/pybossa,stefanhahmann/pybossa,PyBossa/pybossa,geotagx/pybossa,PyBossa/pybossa,stefanhahmann/pybossa,OpenNewsLabs/pybossa,Scifabric/pybossa
|
Add Column to get notifications.
|
"""Add unsubscribe column to user table
Revision ID: 4e435ff8ba74
Revises: bbba2255e00
Create Date: 2015-02-09 10:36:45.935116
"""
# revision identifiers, used by Alembic.
revision = '4e435ff8ba74'
down_revision = 'bbba2255e00'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('subscribed', sa.Boolean, default=True))
query = 'UPDATE "user" SET subscribed=true;'
op.execute(query)
def downgrade():
op.drop_column('user', 'subscribed')
|
<commit_before><commit_msg>Add Column to get notifications.<commit_after>
|
"""Add unsubscribe column to user table
Revision ID: 4e435ff8ba74
Revises: bbba2255e00
Create Date: 2015-02-09 10:36:45.935116
"""
# revision identifiers, used by Alembic.
revision = '4e435ff8ba74'
down_revision = 'bbba2255e00'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('subscribed', sa.Boolean, default=True))
query = 'UPDATE "user" SET subscribed=true;'
op.execute(query)
def downgrade():
op.drop_column('user', 'subscribed')
|
Add Column to get notifications."""Add unsubscribe column to user table
Revision ID: 4e435ff8ba74
Revises: bbba2255e00
Create Date: 2015-02-09 10:36:45.935116
"""
# revision identifiers, used by Alembic.
revision = '4e435ff8ba74'
down_revision = 'bbba2255e00'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('subscribed', sa.Boolean, default=True))
query = 'UPDATE "user" SET subscribed=true;'
op.execute(query)
def downgrade():
op.drop_column('user', 'subscribed')
|
<commit_before><commit_msg>Add Column to get notifications.<commit_after>"""Add unsubscribe column to user table
Revision ID: 4e435ff8ba74
Revises: bbba2255e00
Create Date: 2015-02-09 10:36:45.935116
"""
# revision identifiers, used by Alembic.
revision = '4e435ff8ba74'
down_revision = 'bbba2255e00'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('subscribed', sa.Boolean, default=True))
query = 'UPDATE "user" SET subscribed=true;'
op.execute(query)
def downgrade():
op.drop_column('user', 'subscribed')
|
|
ee0eca16bca122b4d4c7c06cb9459ef8a9861b3c
|
site_settings-example.py
|
site_settings-example.py
|
# Site-specifig django settings.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Your name', 'your-email@example.org'),
)
SERVER_EMAIL = 'root'
MANAGERS = ADMINS
DATABASE_ENGINE = 'postgresql_psycopg2' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'tjrapid' # Or path to database file if using sqlite3.
DATABASE_USER = 'tjrapid' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Absolute path to the root directory.
ROOT = '/home/user/www/tjrapid/'
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = root + 'site_media/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/site_media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '8$w)5-w9ux6n1lzdruw7g5!%rx*pdgwi2_gb2p8^*rppxi5^dw'
TEMPLATE_DIRS = (
root + 'templates/',
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
FONTS_DIR = root + 'fonts/'
|
Add site_settings example for new installations.
|
Add site_settings example for new installations.
|
Python
|
mit
|
peterkuma/tjrapid,peterkuma/tjrapid,peterkuma/tjrapid
|
Add site_settings example for new installations.
|
# Site-specifig django settings.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Your name', 'your-email@example.org'),
)
SERVER_EMAIL = 'root'
MANAGERS = ADMINS
DATABASE_ENGINE = 'postgresql_psycopg2' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'tjrapid' # Or path to database file if using sqlite3.
DATABASE_USER = 'tjrapid' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Absolute path to the root directory.
ROOT = '/home/user/www/tjrapid/'
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = root + 'site_media/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/site_media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '8$w)5-w9ux6n1lzdruw7g5!%rx*pdgwi2_gb2p8^*rppxi5^dw'
TEMPLATE_DIRS = (
root + 'templates/',
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
FONTS_DIR = root + 'fonts/'
|
<commit_before><commit_msg>Add site_settings example for new installations.<commit_after>
|
# Site-specifig django settings.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Your name', 'your-email@example.org'),
)
SERVER_EMAIL = 'root'
MANAGERS = ADMINS
DATABASE_ENGINE = 'postgresql_psycopg2' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'tjrapid' # Or path to database file if using sqlite3.
DATABASE_USER = 'tjrapid' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Absolute path to the root directory.
ROOT = '/home/user/www/tjrapid/'
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = root + 'site_media/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/site_media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '8$w)5-w9ux6n1lzdruw7g5!%rx*pdgwi2_gb2p8^*rppxi5^dw'
TEMPLATE_DIRS = (
root + 'templates/',
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
FONTS_DIR = root + 'fonts/'
|
Add site_settings example for new installations.# Site-specifig django settings.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Your name', 'your-email@example.org'),
)
SERVER_EMAIL = 'root'
MANAGERS = ADMINS
DATABASE_ENGINE = 'postgresql_psycopg2' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'tjrapid' # Or path to database file if using sqlite3.
DATABASE_USER = 'tjrapid' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Absolute path to the root directory.
ROOT = '/home/user/www/tjrapid/'
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = root + 'site_media/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/site_media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '8$w)5-w9ux6n1lzdruw7g5!%rx*pdgwi2_gb2p8^*rppxi5^dw'
TEMPLATE_DIRS = (
root + 'templates/',
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
FONTS_DIR = root + 'fonts/'
|
<commit_before><commit_msg>Add site_settings example for new installations.<commit_after># Site-specifig django settings.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Your name', 'your-email@example.org'),
)
SERVER_EMAIL = 'root'
MANAGERS = ADMINS
DATABASE_ENGINE = 'postgresql_psycopg2' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'tjrapid' # Or path to database file if using sqlite3.
DATABASE_USER = 'tjrapid' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Absolute path to the root directory.
ROOT = '/home/user/www/tjrapid/'
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = root + 'site_media/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/site_media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '8$w)5-w9ux6n1lzdruw7g5!%rx*pdgwi2_gb2p8^*rppxi5^dw'
TEMPLATE_DIRS = (
root + 'templates/',
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
FONTS_DIR = root + 'fonts/'
|
|
6e3247b417bdb5812e0f7818552aa667855b8ed4
|
pylearn2/costs/tests/test_lp_norm_cost.py
|
pylearn2/costs/tests/test_lp_norm_cost.py
|
"""
Test LpNorm cost
"""
import numpy
import theano
from theano import tensor as T
from nose.tools import raises
def test_shared_variables():
'''
LpNorm should handle shared variables.
'''
assert False
def test_symbolic_expressions_of_shared_variables():
'''
LpNorm should handle symbolic expressions of shared variables.
'''
assert False
@raises(Exception)
def test_symbolic_variables():
'''
LpNorm should not handle symbolic variables
'''
assert True
if __name__ == '__main__':
test_shared_variables()
test_symbolic_expressions_of_shared_variables()
test_symbolic_variables()
|
Add unit test for LpNorm
|
Add unit test for LpNorm
|
Python
|
bsd-3-clause
|
hantek/pylearn2,lisa-lab/pylearn2,shiquanwang/pylearn2,hantek/pylearn2,daemonmaker/pylearn2,hantek/pylearn2,bartvm/pylearn2,lamblin/pylearn2,CIFASIS/pylearn2,abergeron/pylearn2,abergeron/pylearn2,skearnes/pylearn2,pkainz/pylearn2,fishcorn/pylearn2,daemonmaker/pylearn2,nouiz/pylearn2,kose-y/pylearn2,theoryno3/pylearn2,CIFASIS/pylearn2,msingh172/pylearn2,JesseLivezey/plankton,abergeron/pylearn2,chrish42/pylearn,sandeepkbhat/pylearn2,w1kke/pylearn2,jeremyfix/pylearn2,hyqneuron/pylearn2-maxsom,jamessergeant/pylearn2,jeremyfix/pylearn2,JesseLivezey/plankton,skearnes/pylearn2,JesseLivezey/plankton,w1kke/pylearn2,goodfeli/pylearn2,TNick/pylearn2,cosmoharrigan/pylearn2,fyffyt/pylearn2,woozzu/pylearn2,aalmah/pylearn2,cosmoharrigan/pylearn2,CIFASIS/pylearn2,se4u/pylearn2,mkraemer67/pylearn2,lamblin/pylearn2,alexjc/pylearn2,lisa-lab/pylearn2,bartvm/pylearn2,theoryno3/pylearn2,shiquanwang/pylearn2,ddboline/pylearn2,jamessergeant/pylearn2,lunyang/pylearn2,lamblin/pylearn2,kastnerkyle/pylearn2,alexjc/pylearn2,se4u/pylearn2,hyqneuron/pylearn2-maxsom,TNick/pylearn2,ddboline/pylearn2,daemonmaker/pylearn2,mclaughlin6464/pylearn2,hantek/pylearn2,chrish42/pylearn,ashhher3/pylearn2,alexjc/pylearn2,kastnerkyle/pylearn2,kastnerkyle/pylearn2,Refefer/pylearn2,daemonmaker/pylearn2,skearnes/pylearn2,lancezlin/pylearn2,chrish42/pylearn,nouiz/pylearn2,fyffyt/pylearn2,nouiz/pylearn2,lisa-lab/pylearn2,caidongyun/pylearn2,lancezlin/pylearn2,hyqneuron/pylearn2-maxsom,mclaughlin6464/pylearn2,mclaughlin6464/pylearn2,ashhher3/pylearn2,woozzu/pylearn2,kose-y/pylearn2,fulmicoton/pylearn2,matrogers/pylearn2,pkainz/pylearn2,w1kke/pylearn2,CIFASIS/pylearn2,JesseLivezey/pylearn2,KennethPierce/pylearnk,matrogers/pylearn2,pombredanne/pylearn2,jamessergeant/pylearn2,shiquanwang/pylearn2,fishcorn/pylearn2,KennethPierce/pylearnk,bartvm/pylearn2,fishcorn/pylearn2,kastnerkyle/pylearn2,skearnes/pylearn2,msingh172/pylearn2,junbochen/pylearn2,w1kke/pylearn2,se4u/pylearn2,cosmoharrigan/pylearn2,JesseLivezey/pylearn2,aalmah/pylearn2,lancezlin/pylearn2,fyffyt/pylearn2,mkraemer67/pylearn2,JesseLivezey/pylearn2,fyffyt/pylearn2,lunyang/pylearn2,lisa-lab/pylearn2,JesseLivezey/pylearn2,jeremyfix/pylearn2,junbochen/pylearn2,lunyang/pylearn2,aalmah/pylearn2,se4u/pylearn2,caidongyun/pylearn2,theoryno3/pylearn2,fishcorn/pylearn2,sandeepkbhat/pylearn2,aalmah/pylearn2,caidongyun/pylearn2,mkraemer67/pylearn2,matrogers/pylearn2,TNick/pylearn2,hyqneuron/pylearn2-maxsom,shiquanwang/pylearn2,matrogers/pylearn2,pombredanne/pylearn2,pkainz/pylearn2,JesseLivezey/plankton,alexjc/pylearn2,ddboline/pylearn2,ddboline/pylearn2,jeremyfix/pylearn2,woozzu/pylearn2,caidongyun/pylearn2,theoryno3/pylearn2,junbochen/pylearn2,lunyang/pylearn2,pombredanne/pylearn2,mclaughlin6464/pylearn2,lancezlin/pylearn2,ashhher3/pylearn2,lamblin/pylearn2,jamessergeant/pylearn2,fulmicoton/pylearn2,Refefer/pylearn2,goodfeli/pylearn2,sandeepkbhat/pylearn2,msingh172/pylearn2,msingh172/pylearn2,KennethPierce/pylearnk,bartvm/pylearn2,goodfeli/pylearn2,fulmicoton/pylearn2,ashhher3/pylearn2,nouiz/pylearn2,sandeepkbhat/pylearn2,goodfeli/pylearn2,TNick/pylearn2,KennethPierce/pylearnk,Refefer/pylearn2,mkraemer67/pylearn2,Refefer/pylearn2,woozzu/pylearn2,kose-y/pylearn2,abergeron/pylearn2,fulmicoton/pylearn2,pombredanne/pylearn2,pkainz/pylearn2,cosmoharrigan/pylearn2,junbochen/pylearn2,kose-y/pylearn2,chrish42/pylearn
|
Add unit test for LpNorm
|
"""
Test LpNorm cost
"""
import numpy
import theano
from theano import tensor as T
from nose.tools import raises
def test_shared_variables():
'''
LpNorm should handle shared variables.
'''
assert False
def test_symbolic_expressions_of_shared_variables():
'''
LpNorm should handle symbolic expressions of shared variables.
'''
assert False
@raises(Exception)
def test_symbolic_variables():
'''
LpNorm should not handle symbolic variables
'''
assert True
if __name__ == '__main__':
test_shared_variables()
test_symbolic_expressions_of_shared_variables()
test_symbolic_variables()
|
<commit_before><commit_msg>Add unit test for LpNorm<commit_after>
|
"""
Test LpNorm cost
"""
import numpy
import theano
from theano import tensor as T
from nose.tools import raises
def test_shared_variables():
'''
LpNorm should handle shared variables.
'''
assert False
def test_symbolic_expressions_of_shared_variables():
'''
LpNorm should handle symbolic expressions of shared variables.
'''
assert False
@raises(Exception)
def test_symbolic_variables():
'''
LpNorm should not handle symbolic variables
'''
assert True
if __name__ == '__main__':
test_shared_variables()
test_symbolic_expressions_of_shared_variables()
test_symbolic_variables()
|
Add unit test for LpNorm"""
Test LpNorm cost
"""
import numpy
import theano
from theano import tensor as T
from nose.tools import raises
def test_shared_variables():
'''
LpNorm should handle shared variables.
'''
assert False
def test_symbolic_expressions_of_shared_variables():
'''
LpNorm should handle symbolic expressions of shared variables.
'''
assert False
@raises(Exception)
def test_symbolic_variables():
'''
LpNorm should not handle symbolic variables
'''
assert True
if __name__ == '__main__':
test_shared_variables()
test_symbolic_expressions_of_shared_variables()
test_symbolic_variables()
|
<commit_before><commit_msg>Add unit test for LpNorm<commit_after>"""
Test LpNorm cost
"""
import numpy
import theano
from theano import tensor as T
from nose.tools import raises
def test_shared_variables():
'''
LpNorm should handle shared variables.
'''
assert False
def test_symbolic_expressions_of_shared_variables():
'''
LpNorm should handle symbolic expressions of shared variables.
'''
assert False
@raises(Exception)
def test_symbolic_variables():
'''
LpNorm should not handle symbolic variables
'''
assert True
if __name__ == '__main__':
test_shared_variables()
test_symbolic_expressions_of_shared_variables()
test_symbolic_variables()
|
|
d8579de24cdd1b381678617953fd79fc49f87952
|
affine/tests/test_rotation.py
|
affine/tests/test_rotation.py
|
from math import sqrt
from affine import Affine
def test_rotation_angle():
"""A positive angle rotates a vector counter clockwise
(1.0, 0.0):
|
|
|
|
0---------*
Affine.rotation(45.0) * (1.0, 0.0) == (0.707..., 0.707...)
|
| *
|
|
0----------
"""
x, y = Affine.rotation(45.0) * (1.0, 0.0)
assert round(x) == round(sqrt(2.0) / 2.0)
assert round(y) == round(sqrt(2.0) / 2.0)
|
Add a clear test of rotation angle
|
Add a clear test of rotation angle
With an illustration, as a guard against regressions.
|
Python
|
bsd-3-clause
|
ToddSmall/affine
|
Add a clear test of rotation angle
With an illustration, as a guard against regressions.
|
from math import sqrt
from affine import Affine
def test_rotation_angle():
"""A positive angle rotates a vector counter clockwise
(1.0, 0.0):
|
|
|
|
0---------*
Affine.rotation(45.0) * (1.0, 0.0) == (0.707..., 0.707...)
|
| *
|
|
0----------
"""
x, y = Affine.rotation(45.0) * (1.0, 0.0)
assert round(x) == round(sqrt(2.0) / 2.0)
assert round(y) == round(sqrt(2.0) / 2.0)
|
<commit_before><commit_msg>Add a clear test of rotation angle
With an illustration, as a guard against regressions.<commit_after>
|
from math import sqrt
from affine import Affine
def test_rotation_angle():
"""A positive angle rotates a vector counter clockwise
(1.0, 0.0):
|
|
|
|
0---------*
Affine.rotation(45.0) * (1.0, 0.0) == (0.707..., 0.707...)
|
| *
|
|
0----------
"""
x, y = Affine.rotation(45.0) * (1.0, 0.0)
assert round(x) == round(sqrt(2.0) / 2.0)
assert round(y) == round(sqrt(2.0) / 2.0)
|
Add a clear test of rotation angle
With an illustration, as a guard against regressions.from math import sqrt
from affine import Affine
def test_rotation_angle():
"""A positive angle rotates a vector counter clockwise
(1.0, 0.0):
|
|
|
|
0---------*
Affine.rotation(45.0) * (1.0, 0.0) == (0.707..., 0.707...)
|
| *
|
|
0----------
"""
x, y = Affine.rotation(45.0) * (1.0, 0.0)
assert round(x) == round(sqrt(2.0) / 2.0)
assert round(y) == round(sqrt(2.0) / 2.0)
|
<commit_before><commit_msg>Add a clear test of rotation angle
With an illustration, as a guard against regressions.<commit_after>from math import sqrt
from affine import Affine
def test_rotation_angle():
"""A positive angle rotates a vector counter clockwise
(1.0, 0.0):
|
|
|
|
0---------*
Affine.rotation(45.0) * (1.0, 0.0) == (0.707..., 0.707...)
|
| *
|
|
0----------
"""
x, y = Affine.rotation(45.0) * (1.0, 0.0)
assert round(x) == round(sqrt(2.0) / 2.0)
assert round(y) == round(sqrt(2.0) / 2.0)
|
|
a2a118186892ed30b5c0a3c4f1893ec1b9d3dba5
|
src/greplin/scales/util_test.py
|
src/greplin/scales/util_test.py
|
# Copyright 2012 The scales Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the util module."""
from greplin.scales import util
import unittest
class AtomicValueTest(unittest.TestCase):
"""Tests for atomic values."""
def testUpdate(self):
"""Test update functions."""
v = util.AtomicValue('hello, world')
self.assertEqual(v.update(len), ('hello, world', len('hello, world')))
self.assertEqual(v.value, len('hello, world'))
def testGetAndSet(self):
"""Test get-and-set."""
v = util.AtomicValue(42)
self.assertEqual(v.getAndSet(666), 42)
self.assertEqual(v.value, 666)
def testAddAndGet(self):
"""Test add-and-get."""
v = util.AtomicValue(42)
self.assertEqual(v.addAndGet(8), 50)
self.assertEqual(v.value, 50)
|
Add unit tests for AtomicValue methods.
|
Add unit tests for AtomicValue methods.
|
Python
|
apache-2.0
|
Cue/scales,URXtech/scales
|
Add unit tests for AtomicValue methods.
|
# Copyright 2012 The scales Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the util module."""
from greplin.scales import util
import unittest
class AtomicValueTest(unittest.TestCase):
"""Tests for atomic values."""
def testUpdate(self):
"""Test update functions."""
v = util.AtomicValue('hello, world')
self.assertEqual(v.update(len), ('hello, world', len('hello, world')))
self.assertEqual(v.value, len('hello, world'))
def testGetAndSet(self):
"""Test get-and-set."""
v = util.AtomicValue(42)
self.assertEqual(v.getAndSet(666), 42)
self.assertEqual(v.value, 666)
def testAddAndGet(self):
"""Test add-and-get."""
v = util.AtomicValue(42)
self.assertEqual(v.addAndGet(8), 50)
self.assertEqual(v.value, 50)
|
<commit_before><commit_msg>Add unit tests for AtomicValue methods.<commit_after>
|
# Copyright 2012 The scales Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the util module."""
from greplin.scales import util
import unittest
class AtomicValueTest(unittest.TestCase):
"""Tests for atomic values."""
def testUpdate(self):
"""Test update functions."""
v = util.AtomicValue('hello, world')
self.assertEqual(v.update(len), ('hello, world', len('hello, world')))
self.assertEqual(v.value, len('hello, world'))
def testGetAndSet(self):
"""Test get-and-set."""
v = util.AtomicValue(42)
self.assertEqual(v.getAndSet(666), 42)
self.assertEqual(v.value, 666)
def testAddAndGet(self):
"""Test add-and-get."""
v = util.AtomicValue(42)
self.assertEqual(v.addAndGet(8), 50)
self.assertEqual(v.value, 50)
|
Add unit tests for AtomicValue methods.# Copyright 2012 The scales Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the util module."""
from greplin.scales import util
import unittest
class AtomicValueTest(unittest.TestCase):
"""Tests for atomic values."""
def testUpdate(self):
"""Test update functions."""
v = util.AtomicValue('hello, world')
self.assertEqual(v.update(len), ('hello, world', len('hello, world')))
self.assertEqual(v.value, len('hello, world'))
def testGetAndSet(self):
"""Test get-and-set."""
v = util.AtomicValue(42)
self.assertEqual(v.getAndSet(666), 42)
self.assertEqual(v.value, 666)
def testAddAndGet(self):
"""Test add-and-get."""
v = util.AtomicValue(42)
self.assertEqual(v.addAndGet(8), 50)
self.assertEqual(v.value, 50)
|
<commit_before><commit_msg>Add unit tests for AtomicValue methods.<commit_after># Copyright 2012 The scales Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the util module."""
from greplin.scales import util
import unittest
class AtomicValueTest(unittest.TestCase):
"""Tests for atomic values."""
def testUpdate(self):
"""Test update functions."""
v = util.AtomicValue('hello, world')
self.assertEqual(v.update(len), ('hello, world', len('hello, world')))
self.assertEqual(v.value, len('hello, world'))
def testGetAndSet(self):
"""Test get-and-set."""
v = util.AtomicValue(42)
self.assertEqual(v.getAndSet(666), 42)
self.assertEqual(v.value, 666)
def testAddAndGet(self):
"""Test add-and-get."""
v = util.AtomicValue(42)
self.assertEqual(v.addAndGet(8), 50)
self.assertEqual(v.value, 50)
|
|
3b0649706b1e01c52045bdcb33f8aeb61aef2635
|
test/test_conduit/test_compound.py
|
test/test_conduit/test_compound.py
|
import unittest
utils = __import__('utils') # Suppress PyCharm warning
from picdump import conduit
class DummySource:
def __init__(self, src):
self.src = src
self.it = None
self.reset()
def __next__(self):
return next(self.it)
def reset(self):
self.it = iter(self.src)
def src(iterable):
return DummySource(iterable)
class TestCompound(unittest.TestCase):
def test_unique_cycle(self):
src_a = src([1, 2, 3, 4])
src_b = src([])
src_c = src(['a', 1, 5, 'b'])
source = conduit.unique(conduit.cyclic(src_a, src_b, src_c))
self.assertLessEqual([1, 'a', 2, 3, 5, 4, 'b'], list(source))
source.reset()
self.assertLessEqual([1, 'a', 2, 3, 5, 4, 'b'], list(source))
|
Add conduit compound test. (join test)
|
Add conduit compound test. (join test)
|
Python
|
mit
|
kanosaki/PicDump,kanosaki/PicDump
|
Add conduit compound test. (join test)
|
import unittest
utils = __import__('utils') # Suppress PyCharm warning
from picdump import conduit
class DummySource:
def __init__(self, src):
self.src = src
self.it = None
self.reset()
def __next__(self):
return next(self.it)
def reset(self):
self.it = iter(self.src)
def src(iterable):
return DummySource(iterable)
class TestCompound(unittest.TestCase):
def test_unique_cycle(self):
src_a = src([1, 2, 3, 4])
src_b = src([])
src_c = src(['a', 1, 5, 'b'])
source = conduit.unique(conduit.cyclic(src_a, src_b, src_c))
self.assertLessEqual([1, 'a', 2, 3, 5, 4, 'b'], list(source))
source.reset()
self.assertLessEqual([1, 'a', 2, 3, 5, 4, 'b'], list(source))
|
<commit_before><commit_msg>Add conduit compound test. (join test)<commit_after>
|
import unittest
utils = __import__('utils') # Suppress PyCharm warning
from picdump import conduit
class DummySource:
def __init__(self, src):
self.src = src
self.it = None
self.reset()
def __next__(self):
return next(self.it)
def reset(self):
self.it = iter(self.src)
def src(iterable):
return DummySource(iterable)
class TestCompound(unittest.TestCase):
def test_unique_cycle(self):
src_a = src([1, 2, 3, 4])
src_b = src([])
src_c = src(['a', 1, 5, 'b'])
source = conduit.unique(conduit.cyclic(src_a, src_b, src_c))
self.assertLessEqual([1, 'a', 2, 3, 5, 4, 'b'], list(source))
source.reset()
self.assertLessEqual([1, 'a', 2, 3, 5, 4, 'b'], list(source))
|
Add conduit compound test. (join test)import unittest
utils = __import__('utils') # Suppress PyCharm warning
from picdump import conduit
class DummySource:
def __init__(self, src):
self.src = src
self.it = None
self.reset()
def __next__(self):
return next(self.it)
def reset(self):
self.it = iter(self.src)
def src(iterable):
return DummySource(iterable)
class TestCompound(unittest.TestCase):
def test_unique_cycle(self):
src_a = src([1, 2, 3, 4])
src_b = src([])
src_c = src(['a', 1, 5, 'b'])
source = conduit.unique(conduit.cyclic(src_a, src_b, src_c))
self.assertLessEqual([1, 'a', 2, 3, 5, 4, 'b'], list(source))
source.reset()
self.assertLessEqual([1, 'a', 2, 3, 5, 4, 'b'], list(source))
|
<commit_before><commit_msg>Add conduit compound test. (join test)<commit_after>import unittest
utils = __import__('utils') # Suppress PyCharm warning
from picdump import conduit
class DummySource:
def __init__(self, src):
self.src = src
self.it = None
self.reset()
def __next__(self):
return next(self.it)
def reset(self):
self.it = iter(self.src)
def src(iterable):
return DummySource(iterable)
class TestCompound(unittest.TestCase):
def test_unique_cycle(self):
src_a = src([1, 2, 3, 4])
src_b = src([])
src_c = src(['a', 1, 5, 'b'])
source = conduit.unique(conduit.cyclic(src_a, src_b, src_c))
self.assertLessEqual([1, 'a', 2, 3, 5, 4, 'b'], list(source))
source.reset()
self.assertLessEqual([1, 'a', 2, 3, 5, 4, 'b'], list(source))
|
|
f55cc53892540da871394d88835249b7f594f573
|
beavy/common/morphing_schema.py
|
beavy/common/morphing_schema.py
|
class MorphingSchema():
def dump(self, obj):
return self._get_serializer(obj).dump(obj)
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
return self.registry.get(name, self.FALLBACK)()
|
Add missing Morphing Schema File
|
Add missing Morphing Schema File
|
Python
|
mpl-2.0
|
beavyHQ/beavy,beavyHQ/beavy,beavyHQ/beavy,beavyHQ/beavy
|
Add missing Morphing Schema File
|
class MorphingSchema():
def dump(self, obj):
return self._get_serializer(obj).dump(obj)
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
return self.registry.get(name, self.FALLBACK)()
|
<commit_before><commit_msg>Add missing Morphing Schema File<commit_after>
|
class MorphingSchema():
def dump(self, obj):
return self._get_serializer(obj).dump(obj)
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
return self.registry.get(name, self.FALLBACK)()
|
Add missing Morphing Schema Fileclass MorphingSchema():
def dump(self, obj):
return self._get_serializer(obj).dump(obj)
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
return self.registry.get(name, self.FALLBACK)()
|
<commit_before><commit_msg>Add missing Morphing Schema File<commit_after>class MorphingSchema():
def dump(self, obj):
return self._get_serializer(obj).dump(obj)
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
return self.registry.get(name, self.FALLBACK)()
|
|
a558672d790a07edbf42f84524db77e472de33b2
|
pandas_patch/htest.py
|
pandas_patch/htest.py
|
"""
@author: efourrier
Purpose : This module is designed to provide custom test assertion return in True or
False so you can quickly include them into tests against your DataFrame
"""
#########################################################
# Import Packages and Constants
#########################################################
import pandas as pd
from pandas_patch import *
import numpy as np
#########################################################
# Custom assert function
#########################################################
def isna(df,index,axis = 1):
""" This function will return True if there is na values in the index of the DataFrame
Parameters
-----------
index : index can be a list of columns or a rows index
"""
return pd.isnull(df[index]).any().any()
def is_nacolumns(df,columns):
""" This function will return True if at least one of the columns is composed only of missing values """
return pd.isnull(test[columns]).all().any()
def is_positive(df,columns):
""" Return True if all columns are positive """
return (df[columns] > 0).all().all()
def is_compact(df,column,inf = None,sup = None ):
""" Return True if the column meet the inf and sup criteria """
if inf is None :
(df[column] <= sup).all().all()
elif sup is None:
(df[column] <= sup).all().all()
else :
return ((df[column] >= inf) & (df[column] <= sup)).all().all()
def is_many_missing
def is_key(df,columns):
return df[columns].apply(lambda col : len(pd.is_unique(col)) == len(df.index)).all()
def is_constant(df,columns):
return df[columns].apply(lambda col : len(pd.is_unique(col)) == 1).all()
def is_numeric(df,columns):
def is_mixed_uni_str(df):
""" Return True if there is str type (byte in python 2.7) and unicode """
types = set(df.apply(lambda x :pd.lib.infer_dtype(x.values)))
if 'unicode' in types and 'string' in types:
return True
def is_unicode(df,columns):
return df[columns].apply(lambda x :pd.lib.infer_dtype(x.values) == unicode).any()
|
Add helpers for to do consistency tests on pandas DataFrame
|
Add helpers for to do consistency tests on pandas DataFrame
|
Python
|
mit
|
ericfourrier/pandas-patch
|
Add helpers for to do consistency tests on pandas DataFrame
|
"""
@author: efourrier
Purpose : This module is designed to provide custom test assertion return in True or
False so you can quickly include them into tests against your DataFrame
"""
#########################################################
# Import Packages and Constants
#########################################################
import pandas as pd
from pandas_patch import *
import numpy as np
#########################################################
# Custom assert function
#########################################################
def isna(df,index,axis = 1):
""" This function will return True if there is na values in the index of the DataFrame
Parameters
-----------
index : index can be a list of columns or a rows index
"""
return pd.isnull(df[index]).any().any()
def is_nacolumns(df,columns):
""" This function will return True if at least one of the columns is composed only of missing values """
return pd.isnull(test[columns]).all().any()
def is_positive(df,columns):
""" Return True if all columns are positive """
return (df[columns] > 0).all().all()
def is_compact(df,column,inf = None,sup = None ):
""" Return True if the column meet the inf and sup criteria """
if inf is None :
(df[column] <= sup).all().all()
elif sup is None:
(df[column] <= sup).all().all()
else :
return ((df[column] >= inf) & (df[column] <= sup)).all().all()
def is_many_missing
def is_key(df,columns):
return df[columns].apply(lambda col : len(pd.is_unique(col)) == len(df.index)).all()
def is_constant(df,columns):
return df[columns].apply(lambda col : len(pd.is_unique(col)) == 1).all()
def is_numeric(df,columns):
def is_mixed_uni_str(df):
""" Return True if there is str type (byte in python 2.7) and unicode """
types = set(df.apply(lambda x :pd.lib.infer_dtype(x.values)))
if 'unicode' in types and 'string' in types:
return True
def is_unicode(df,columns):
return df[columns].apply(lambda x :pd.lib.infer_dtype(x.values) == unicode).any()
|
<commit_before><commit_msg>Add helpers for to do consistency tests on pandas DataFrame<commit_after>
|
"""
@author: efourrier
Purpose : This module is designed to provide custom test assertion return in True or
False so you can quickly include them into tests against your DataFrame
"""
#########################################################
# Import Packages and Constants
#########################################################
import pandas as pd
from pandas_patch import *
import numpy as np
#########################################################
# Custom assert function
#########################################################
def isna(df,index,axis = 1):
""" This function will return True if there is na values in the index of the DataFrame
Parameters
-----------
index : index can be a list of columns or a rows index
"""
return pd.isnull(df[index]).any().any()
def is_nacolumns(df,columns):
""" This function will return True if at least one of the columns is composed only of missing values """
return pd.isnull(test[columns]).all().any()
def is_positive(df,columns):
""" Return True if all columns are positive """
return (df[columns] > 0).all().all()
def is_compact(df,column,inf = None,sup = None ):
""" Return True if the column meet the inf and sup criteria """
if inf is None :
(df[column] <= sup).all().all()
elif sup is None:
(df[column] <= sup).all().all()
else :
return ((df[column] >= inf) & (df[column] <= sup)).all().all()
def is_many_missing
def is_key(df,columns):
return df[columns].apply(lambda col : len(pd.is_unique(col)) == len(df.index)).all()
def is_constant(df,columns):
return df[columns].apply(lambda col : len(pd.is_unique(col)) == 1).all()
def is_numeric(df,columns):
def is_mixed_uni_str(df):
""" Return True if there is str type (byte in python 2.7) and unicode """
types = set(df.apply(lambda x :pd.lib.infer_dtype(x.values)))
if 'unicode' in types and 'string' in types:
return True
def is_unicode(df,columns):
return df[columns].apply(lambda x :pd.lib.infer_dtype(x.values) == unicode).any()
|
Add helpers for to do consistency tests on pandas DataFrame"""
@author: efourrier
Purpose : This module is designed to provide custom test assertion return in True or
False so you can quickly include them into tests against your DataFrame
"""
#########################################################
# Import Packages and Constants
#########################################################
import pandas as pd
from pandas_patch import *
import numpy as np
#########################################################
# Custom assert function
#########################################################
def isna(df,index,axis = 1):
""" This function will return True if there is na values in the index of the DataFrame
Parameters
-----------
index : index can be a list of columns or a rows index
"""
return pd.isnull(df[index]).any().any()
def is_nacolumns(df,columns):
""" This function will return True if at least one of the columns is composed only of missing values """
return pd.isnull(test[columns]).all().any()
def is_positive(df,columns):
""" Return True if all columns are positive """
return (df[columns] > 0).all().all()
def is_compact(df,column,inf = None,sup = None ):
""" Return True if the column meet the inf and sup criteria """
if inf is None :
(df[column] <= sup).all().all()
elif sup is None:
(df[column] <= sup).all().all()
else :
return ((df[column] >= inf) & (df[column] <= sup)).all().all()
def is_many_missing
def is_key(df,columns):
return df[columns].apply(lambda col : len(pd.is_unique(col)) == len(df.index)).all()
def is_constant(df,columns):
return df[columns].apply(lambda col : len(pd.is_unique(col)) == 1).all()
def is_numeric(df,columns):
def is_mixed_uni_str(df):
""" Return True if there is str type (byte in python 2.7) and unicode """
types = set(df.apply(lambda x :pd.lib.infer_dtype(x.values)))
if 'unicode' in types and 'string' in types:
return True
def is_unicode(df,columns):
return df[columns].apply(lambda x :pd.lib.infer_dtype(x.values) == unicode).any()
|
<commit_before><commit_msg>Add helpers for to do consistency tests on pandas DataFrame<commit_after>"""
@author: efourrier
Purpose : This module is designed to provide custom test assertion return in True or
False so you can quickly include them into tests against your DataFrame
"""
#########################################################
# Import Packages and Constants
#########################################################
import pandas as pd
from pandas_patch import *
import numpy as np
#########################################################
# Custom assert function
#########################################################
def isna(df,index,axis = 1):
""" This function will return True if there is na values in the index of the DataFrame
Parameters
-----------
index : index can be a list of columns or a rows index
"""
return pd.isnull(df[index]).any().any()
def is_nacolumns(df,columns):
""" This function will return True if at least one of the columns is composed only of missing values """
return pd.isnull(test[columns]).all().any()
def is_positive(df,columns):
""" Return True if all columns are positive """
return (df[columns] > 0).all().all()
def is_compact(df,column,inf = None,sup = None ):
""" Return True if the column meet the inf and sup criteria """
if inf is None :
(df[column] <= sup).all().all()
elif sup is None:
(df[column] <= sup).all().all()
else :
return ((df[column] >= inf) & (df[column] <= sup)).all().all()
def is_many_missing
def is_key(df,columns):
return df[columns].apply(lambda col : len(pd.is_unique(col)) == len(df.index)).all()
def is_constant(df,columns):
return df[columns].apply(lambda col : len(pd.is_unique(col)) == 1).all()
def is_numeric(df,columns):
def is_mixed_uni_str(df):
""" Return True if there is str type (byte in python 2.7) and unicode """
types = set(df.apply(lambda x :pd.lib.infer_dtype(x.values)))
if 'unicode' in types and 'string' in types:
return True
def is_unicode(df,columns):
return df[columns].apply(lambda x :pd.lib.infer_dtype(x.values) == unicode).any()
|
|
6bf8f2076f5b4f154359832fe69bfa877b52012e
|
polemarch/main/migrations/0029_auto_20171124_0625.py
|
polemarch/main/migrations/0029_auto_20171124_0625.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-24 06:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0028_auto_20171123_0532'),
]
operations = [
migrations.RemoveField(
model_name='periodictask',
name='inventory',
),
migrations.AddField(
model_name='periodictask',
name='_inventory',
field=models.ForeignKey(blank=True, db_column='inventory', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='periodic_tasks', related_query_name='periodic_tasks', to='main.Inventory'),
),
migrations.AddField(
model_name='periodictask',
name='inventory_file',
field=models.CharField(blank=True, max_length=2048, null=True),
),
]
|
Use inventory files from project (forgotten migration)
|
Use inventory files from project (forgotten migration)
|
Python
|
agpl-3.0
|
vstconsulting/polemarch,vstconsulting/polemarch,vstconsulting/polemarch,vstconsulting/polemarch
|
Use inventory files from project (forgotten migration)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-24 06:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0028_auto_20171123_0532'),
]
operations = [
migrations.RemoveField(
model_name='periodictask',
name='inventory',
),
migrations.AddField(
model_name='periodictask',
name='_inventory',
field=models.ForeignKey(blank=True, db_column='inventory', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='periodic_tasks', related_query_name='periodic_tasks', to='main.Inventory'),
),
migrations.AddField(
model_name='periodictask',
name='inventory_file',
field=models.CharField(blank=True, max_length=2048, null=True),
),
]
|
<commit_before><commit_msg>Use inventory files from project (forgotten migration)<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-24 06:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0028_auto_20171123_0532'),
]
operations = [
migrations.RemoveField(
model_name='periodictask',
name='inventory',
),
migrations.AddField(
model_name='periodictask',
name='_inventory',
field=models.ForeignKey(blank=True, db_column='inventory', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='periodic_tasks', related_query_name='periodic_tasks', to='main.Inventory'),
),
migrations.AddField(
model_name='periodictask',
name='inventory_file',
field=models.CharField(blank=True, max_length=2048, null=True),
),
]
|
Use inventory files from project (forgotten migration)# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-24 06:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0028_auto_20171123_0532'),
]
operations = [
migrations.RemoveField(
model_name='periodictask',
name='inventory',
),
migrations.AddField(
model_name='periodictask',
name='_inventory',
field=models.ForeignKey(blank=True, db_column='inventory', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='periodic_tasks', related_query_name='periodic_tasks', to='main.Inventory'),
),
migrations.AddField(
model_name='periodictask',
name='inventory_file',
field=models.CharField(blank=True, max_length=2048, null=True),
),
]
|
<commit_before><commit_msg>Use inventory files from project (forgotten migration)<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-24 06:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0028_auto_20171123_0532'),
]
operations = [
migrations.RemoveField(
model_name='periodictask',
name='inventory',
),
migrations.AddField(
model_name='periodictask',
name='_inventory',
field=models.ForeignKey(blank=True, db_column='inventory', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='periodic_tasks', related_query_name='periodic_tasks', to='main.Inventory'),
),
migrations.AddField(
model_name='periodictask',
name='inventory_file',
field=models.CharField(blank=True, max_length=2048, null=True),
),
]
|
|
eb19b1f428f2e5d98a128a94dd6607e4ad6965ba
|
alembic/versions/9a83475c60c3_add_favorites_user_ids.py
|
alembic/versions/9a83475c60c3_add_favorites_user_ids.py
|
"""Add favorites user_ids
Revision ID: 9a83475c60c3
Revises: 8ce9b3da799e
Create Date: 2017-03-28 11:37:03.861572
"""
# revision identifiers, used by Alembic.
revision = '9a83475c60c3'
down_revision = '8ce9b3da799e'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
field = 'fav_user_ids'
def upgrade():
op.add_column('task', sa.Column(field, postgresql.ARRAY(sa.Integer)))
def downgrade():
op.drop_column('task', field)
|
Add ARRAY column to handle user favorites.
|
Add ARRAY column to handle user favorites.
|
Python
|
agpl-3.0
|
Scifabric/pybossa,PyBossa/pybossa,Scifabric/pybossa,PyBossa/pybossa
|
Add ARRAY column to handle user favorites.
|
"""Add favorites user_ids
Revision ID: 9a83475c60c3
Revises: 8ce9b3da799e
Create Date: 2017-03-28 11:37:03.861572
"""
# revision identifiers, used by Alembic.
revision = '9a83475c60c3'
down_revision = '8ce9b3da799e'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
field = 'fav_user_ids'
def upgrade():
op.add_column('task', sa.Column(field, postgresql.ARRAY(sa.Integer)))
def downgrade():
op.drop_column('task', field)
|
<commit_before><commit_msg>Add ARRAY column to handle user favorites.<commit_after>
|
"""Add favorites user_ids
Revision ID: 9a83475c60c3
Revises: 8ce9b3da799e
Create Date: 2017-03-28 11:37:03.861572
"""
# revision identifiers, used by Alembic.
revision = '9a83475c60c3'
down_revision = '8ce9b3da799e'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
field = 'fav_user_ids'
def upgrade():
op.add_column('task', sa.Column(field, postgresql.ARRAY(sa.Integer)))
def downgrade():
op.drop_column('task', field)
|
Add ARRAY column to handle user favorites."""Add favorites user_ids
Revision ID: 9a83475c60c3
Revises: 8ce9b3da799e
Create Date: 2017-03-28 11:37:03.861572
"""
# revision identifiers, used by Alembic.
revision = '9a83475c60c3'
down_revision = '8ce9b3da799e'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
field = 'fav_user_ids'
def upgrade():
op.add_column('task', sa.Column(field, postgresql.ARRAY(sa.Integer)))
def downgrade():
op.drop_column('task', field)
|
<commit_before><commit_msg>Add ARRAY column to handle user favorites.<commit_after>"""Add favorites user_ids
Revision ID: 9a83475c60c3
Revises: 8ce9b3da799e
Create Date: 2017-03-28 11:37:03.861572
"""
# revision identifiers, used by Alembic.
revision = '9a83475c60c3'
down_revision = '8ce9b3da799e'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
field = 'fav_user_ids'
def upgrade():
op.add_column('task', sa.Column(field, postgresql.ARRAY(sa.Integer)))
def downgrade():
op.drop_column('task', field)
|
|
c8432c6c7696bc9b60e3ad25a88b6e9f10fc8f11
|
src/ggrc/migrations/versions/20160223152916_204540106539_assessment_titles.py
|
src/ggrc/migrations/versions/20160223152916_204540106539_assessment_titles.py
|
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: ivan@reciprocitylabs.com
# Maintained By: ivan@reciprocitylabs.com
"""Assessment titles
Revision ID: 204540106539
Revises: 4e989ef86619
Create Date: 2016-02-23 15:29:16.361412
"""
# revision identifiers, used by Alembic.
revision = '204540106539'
down_revision = '4e989ef86619'
from alembic import op
from ggrc.models.assessment import Assessment
from ggrc.migrations.utils import resolve_duplicates
def upgrade():
op.drop_constraint('uq_t_control_assessments', 'assessments', 'unique')
def downgrade():
resolve_duplicates(Assessment, 'title', ' ')
op.create_unique_constraint('uq_t_control_assessments', 'assessments', ['title'])
|
Make assessment titles not unique
|
Make assessment titles not unique
|
Python
|
apache-2.0
|
kr41/ggrc-core,josthkko/ggrc-core,prasannav7/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,j0gurt/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,j0gurt/ggrc-core
|
Make assessment titles not unique
|
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: ivan@reciprocitylabs.com
# Maintained By: ivan@reciprocitylabs.com
"""Assessment titles
Revision ID: 204540106539
Revises: 4e989ef86619
Create Date: 2016-02-23 15:29:16.361412
"""
# revision identifiers, used by Alembic.
revision = '204540106539'
down_revision = '4e989ef86619'
from alembic import op
from ggrc.models.assessment import Assessment
from ggrc.migrations.utils import resolve_duplicates
def upgrade():
op.drop_constraint('uq_t_control_assessments', 'assessments', 'unique')
def downgrade():
resolve_duplicates(Assessment, 'title', ' ')
op.create_unique_constraint('uq_t_control_assessments', 'assessments', ['title'])
|
<commit_before><commit_msg>Make assessment titles not unique<commit_after>
|
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: ivan@reciprocitylabs.com
# Maintained By: ivan@reciprocitylabs.com
"""Assessment titles
Revision ID: 204540106539
Revises: 4e989ef86619
Create Date: 2016-02-23 15:29:16.361412
"""
# revision identifiers, used by Alembic.
revision = '204540106539'
down_revision = '4e989ef86619'
from alembic import op
from ggrc.models.assessment import Assessment
from ggrc.migrations.utils import resolve_duplicates
def upgrade():
op.drop_constraint('uq_t_control_assessments', 'assessments', 'unique')
def downgrade():
resolve_duplicates(Assessment, 'title', ' ')
op.create_unique_constraint('uq_t_control_assessments', 'assessments', ['title'])
|
Make assessment titles not unique# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: ivan@reciprocitylabs.com
# Maintained By: ivan@reciprocitylabs.com
"""Assessment titles
Revision ID: 204540106539
Revises: 4e989ef86619
Create Date: 2016-02-23 15:29:16.361412
"""
# revision identifiers, used by Alembic.
revision = '204540106539'
down_revision = '4e989ef86619'
from alembic import op
from ggrc.models.assessment import Assessment
from ggrc.migrations.utils import resolve_duplicates
def upgrade():
op.drop_constraint('uq_t_control_assessments', 'assessments', 'unique')
def downgrade():
resolve_duplicates(Assessment, 'title', ' ')
op.create_unique_constraint('uq_t_control_assessments', 'assessments', ['title'])
|
<commit_before><commit_msg>Make assessment titles not unique<commit_after># Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: ivan@reciprocitylabs.com
# Maintained By: ivan@reciprocitylabs.com
"""Assessment titles
Revision ID: 204540106539
Revises: 4e989ef86619
Create Date: 2016-02-23 15:29:16.361412
"""
# revision identifiers, used by Alembic.
revision = '204540106539'
down_revision = '4e989ef86619'
from alembic import op
from ggrc.models.assessment import Assessment
from ggrc.migrations.utils import resolve_duplicates
def upgrade():
op.drop_constraint('uq_t_control_assessments', 'assessments', 'unique')
def downgrade():
resolve_duplicates(Assessment, 'title', ' ')
op.create_unique_constraint('uq_t_control_assessments', 'assessments', ['title'])
|
|
4e9ebd21a980894c058cdf8c17a416bf7699b12f
|
blackjack/test/test_dealer.py
|
blackjack/test/test_dealer.py
|
#!/usr/bin/python
import os
import sys
sys.path.append(os.path.join(os.getcwd(), '../'))
import pytest
import blackjack.dealer as dealer
import blackjack.table as table
def test_dummy():
for i in range(5):
t = table.Table()
def test_string_representation():
name = 'Lob'
assert "Dealer %s" % name == "%s" % dealer.Dealer(name, None)
|
Add dummy Dealer test module
|
Add dummy Dealer test module
|
Python
|
mit
|
suhasgaddam/blackjack-python,suhasgaddam/blackjack-python
|
Add dummy Dealer test module
|
#!/usr/bin/python
import os
import sys
sys.path.append(os.path.join(os.getcwd(), '../'))
import pytest
import blackjack.dealer as dealer
import blackjack.table as table
def test_dummy():
for i in range(5):
t = table.Table()
def test_string_representation():
name = 'Lob'
assert "Dealer %s" % name == "%s" % dealer.Dealer(name, None)
|
<commit_before><commit_msg>Add dummy Dealer test module<commit_after>
|
#!/usr/bin/python
import os
import sys
sys.path.append(os.path.join(os.getcwd(), '../'))
import pytest
import blackjack.dealer as dealer
import blackjack.table as table
def test_dummy():
for i in range(5):
t = table.Table()
def test_string_representation():
name = 'Lob'
assert "Dealer %s" % name == "%s" % dealer.Dealer(name, None)
|
Add dummy Dealer test module#!/usr/bin/python
import os
import sys
sys.path.append(os.path.join(os.getcwd(), '../'))
import pytest
import blackjack.dealer as dealer
import blackjack.table as table
def test_dummy():
for i in range(5):
t = table.Table()
def test_string_representation():
name = 'Lob'
assert "Dealer %s" % name == "%s" % dealer.Dealer(name, None)
|
<commit_before><commit_msg>Add dummy Dealer test module<commit_after>#!/usr/bin/python
import os
import sys
sys.path.append(os.path.join(os.getcwd(), '../'))
import pytest
import blackjack.dealer as dealer
import blackjack.table as table
def test_dummy():
for i in range(5):
t = table.Table()
def test_string_representation():
name = 'Lob'
assert "Dealer %s" % name == "%s" % dealer.Dealer(name, None)
|
|
49e88e23792c685832148b7d6f2549ecc8d30344
|
django/website/main/tests/test_merge_coverage_filtering.py
|
django/website/main/tests/test_merge_coverage_filtering.py
|
import os
import xml.etree.ElementTree as ET
from main.management.commands.merge_coverage_files import Command, PACKAGES_LIST
def test_filtering_xml_when_no_package_filters_includes_all_packages_in_list():
merge_coverage_files_command = Command()
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
included = merge_coverage_files_command.filter_xml(xml_file)
expected_pakages = ['django.website.example5', 'django.website.example5.tests', 'src.second']
actual_packages = [package.attrib['name'] for package in included]
assert expected_pakages == actual_packages
def test_filtering_xml_when_no_package_filters_includes_all_packages_in_file():
merge_coverage_files_command = Command()
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
merge_coverage_files_command.filter_xml(xml_file)
packages = xml_file.getroot().findall(PACKAGES_LIST)
expected_pakages = ['django.website.example5', 'django.website.example5.tests', 'src.second']
actual_packages = [package.attrib['name'] for package in packages]
assert expected_pakages == actual_packages
def test_filtering_xml_when_package_filters_excludes_unwanted_packages_from_list():
merge_coverage_files_command = Command()
merge_coverage_files_command.packagefilters = ['^django\.website\.example5$']
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
included = merge_coverage_files_command.filter_xml(xml_file)
expected_pakages = ['django.website.example5']
actual_packages = [package.attrib['name'] for package in included]
assert expected_pakages == actual_packages
def test_filtering_xml_when_package_filters_excludes_unwanted_packages_from_file():
merge_coverage_files_command = Command()
merge_coverage_files_command.packagefilters = ['^django\.website\.example5$']
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
merge_coverage_files_command.filter_xml(xml_file)
included = xml_file.getroot().findall(PACKAGES_LIST)
expected_packages = ['django.website.example5']
actual_packages = [package.attrib['name'] for package in included]
assert expected_packages == actual_packages
|
Add tests for filtering coverage content
|
Add tests for filtering coverage content
|
Python
|
agpl-3.0
|
aptivate/alfie,aptivate/kashana,aptivate/kashana,aptivate/alfie,aptivate/alfie,aptivate/kashana,daniell/kashana,daniell/kashana,aptivate/kashana,daniell/kashana,daniell/kashana,aptivate/alfie
|
Add tests for filtering coverage content
|
import os
import xml.etree.ElementTree as ET
from main.management.commands.merge_coverage_files import Command, PACKAGES_LIST
def test_filtering_xml_when_no_package_filters_includes_all_packages_in_list():
merge_coverage_files_command = Command()
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
included = merge_coverage_files_command.filter_xml(xml_file)
expected_pakages = ['django.website.example5', 'django.website.example5.tests', 'src.second']
actual_packages = [package.attrib['name'] for package in included]
assert expected_pakages == actual_packages
def test_filtering_xml_when_no_package_filters_includes_all_packages_in_file():
merge_coverage_files_command = Command()
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
merge_coverage_files_command.filter_xml(xml_file)
packages = xml_file.getroot().findall(PACKAGES_LIST)
expected_pakages = ['django.website.example5', 'django.website.example5.tests', 'src.second']
actual_packages = [package.attrib['name'] for package in packages]
assert expected_pakages == actual_packages
def test_filtering_xml_when_package_filters_excludes_unwanted_packages_from_list():
merge_coverage_files_command = Command()
merge_coverage_files_command.packagefilters = ['^django\.website\.example5$']
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
included = merge_coverage_files_command.filter_xml(xml_file)
expected_pakages = ['django.website.example5']
actual_packages = [package.attrib['name'] for package in included]
assert expected_pakages == actual_packages
def test_filtering_xml_when_package_filters_excludes_unwanted_packages_from_file():
merge_coverage_files_command = Command()
merge_coverage_files_command.packagefilters = ['^django\.website\.example5$']
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
merge_coverage_files_command.filter_xml(xml_file)
included = xml_file.getroot().findall(PACKAGES_LIST)
expected_packages = ['django.website.example5']
actual_packages = [package.attrib['name'] for package in included]
assert expected_packages == actual_packages
|
<commit_before><commit_msg>Add tests for filtering coverage content<commit_after>
|
import os
import xml.etree.ElementTree as ET
from main.management.commands.merge_coverage_files import Command, PACKAGES_LIST
def test_filtering_xml_when_no_package_filters_includes_all_packages_in_list():
merge_coverage_files_command = Command()
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
included = merge_coverage_files_command.filter_xml(xml_file)
expected_pakages = ['django.website.example5', 'django.website.example5.tests', 'src.second']
actual_packages = [package.attrib['name'] for package in included]
assert expected_pakages == actual_packages
def test_filtering_xml_when_no_package_filters_includes_all_packages_in_file():
merge_coverage_files_command = Command()
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
merge_coverage_files_command.filter_xml(xml_file)
packages = xml_file.getroot().findall(PACKAGES_LIST)
expected_pakages = ['django.website.example5', 'django.website.example5.tests', 'src.second']
actual_packages = [package.attrib['name'] for package in packages]
assert expected_pakages == actual_packages
def test_filtering_xml_when_package_filters_excludes_unwanted_packages_from_list():
merge_coverage_files_command = Command()
merge_coverage_files_command.packagefilters = ['^django\.website\.example5$']
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
included = merge_coverage_files_command.filter_xml(xml_file)
expected_pakages = ['django.website.example5']
actual_packages = [package.attrib['name'] for package in included]
assert expected_pakages == actual_packages
def test_filtering_xml_when_package_filters_excludes_unwanted_packages_from_file():
merge_coverage_files_command = Command()
merge_coverage_files_command.packagefilters = ['^django\.website\.example5$']
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
merge_coverage_files_command.filter_xml(xml_file)
included = xml_file.getroot().findall(PACKAGES_LIST)
expected_packages = ['django.website.example5']
actual_packages = [package.attrib['name'] for package in included]
assert expected_packages == actual_packages
|
Add tests for filtering coverage contentimport os
import xml.etree.ElementTree as ET
from main.management.commands.merge_coverage_files import Command, PACKAGES_LIST
def test_filtering_xml_when_no_package_filters_includes_all_packages_in_list():
merge_coverage_files_command = Command()
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
included = merge_coverage_files_command.filter_xml(xml_file)
expected_pakages = ['django.website.example5', 'django.website.example5.tests', 'src.second']
actual_packages = [package.attrib['name'] for package in included]
assert expected_pakages == actual_packages
def test_filtering_xml_when_no_package_filters_includes_all_packages_in_file():
merge_coverage_files_command = Command()
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
merge_coverage_files_command.filter_xml(xml_file)
packages = xml_file.getroot().findall(PACKAGES_LIST)
expected_pakages = ['django.website.example5', 'django.website.example5.tests', 'src.second']
actual_packages = [package.attrib['name'] for package in packages]
assert expected_pakages == actual_packages
def test_filtering_xml_when_package_filters_excludes_unwanted_packages_from_list():
merge_coverage_files_command = Command()
merge_coverage_files_command.packagefilters = ['^django\.website\.example5$']
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
included = merge_coverage_files_command.filter_xml(xml_file)
expected_pakages = ['django.website.example5']
actual_packages = [package.attrib['name'] for package in included]
assert expected_pakages == actual_packages
def test_filtering_xml_when_package_filters_excludes_unwanted_packages_from_file():
merge_coverage_files_command = Command()
merge_coverage_files_command.packagefilters = ['^django\.website\.example5$']
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
merge_coverage_files_command.filter_xml(xml_file)
included = xml_file.getroot().findall(PACKAGES_LIST)
expected_packages = ['django.website.example5']
actual_packages = [package.attrib['name'] for package in included]
assert expected_packages == actual_packages
|
<commit_before><commit_msg>Add tests for filtering coverage content<commit_after>import os
import xml.etree.ElementTree as ET
from main.management.commands.merge_coverage_files import Command, PACKAGES_LIST
def test_filtering_xml_when_no_package_filters_includes_all_packages_in_list():
merge_coverage_files_command = Command()
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
included = merge_coverage_files_command.filter_xml(xml_file)
expected_pakages = ['django.website.example5', 'django.website.example5.tests', 'src.second']
actual_packages = [package.attrib['name'] for package in included]
assert expected_pakages == actual_packages
def test_filtering_xml_when_no_package_filters_includes_all_packages_in_file():
merge_coverage_files_command = Command()
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
merge_coverage_files_command.filter_xml(xml_file)
packages = xml_file.getroot().findall(PACKAGES_LIST)
expected_pakages = ['django.website.example5', 'django.website.example5.tests', 'src.second']
actual_packages = [package.attrib['name'] for package in packages]
assert expected_pakages == actual_packages
def test_filtering_xml_when_package_filters_excludes_unwanted_packages_from_list():
merge_coverage_files_command = Command()
merge_coverage_files_command.packagefilters = ['^django\.website\.example5$']
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
included = merge_coverage_files_command.filter_xml(xml_file)
expected_pakages = ['django.website.example5']
actual_packages = [package.attrib['name'] for package in included]
assert expected_pakages == actual_packages
def test_filtering_xml_when_package_filters_excludes_unwanted_packages_from_file():
merge_coverage_files_command = Command()
merge_coverage_files_command.packagefilters = ['^django\.website\.example5$']
file_path = os.path.join(os.path.dirname(__file__), 'sample_coverage_reports', 'coverage.xml')
xml_file = ET.parse(file_path)
merge_coverage_files_command.filter_xml(xml_file)
included = xml_file.getroot().findall(PACKAGES_LIST)
expected_packages = ['django.website.example5']
actual_packages = [package.attrib['name'] for package in included]
assert expected_packages == actual_packages
|
|
67e76cc83f117ff20f260d25326775bcd58afc92
|
account/tests/test_email_address.py
|
account/tests/test_email_address.py
|
from account.models import EmailAddress
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.forms import ValidationError
from django.test import TestCase, override_settings
@override_settings(ACCOUNT_EMAIL_UNIQUE=True)
class UniqueEmailAddressTestCase(TestCase):
def test_unique_email(self):
user = User.objects.create_user("user1", email="user1@example.com", password="password")
email_1 = EmailAddress(user=user, email="user2@example.com")
email_1.full_clean()
email_1.save()
validation_error = False
try:
email_2 = EmailAddress(user=user, email="USER2@example.com")
email_2.full_clean()
email_2.save()
except ValidationError:
validation_error = True
self.assertTrue(validation_error)
|
Add test case for email unique validate
|
Add test case for email unique validate
|
Python
|
mit
|
pinax/django-user-accounts,pinax/django-user-accounts
|
Add test case for email unique validate
|
from account.models import EmailAddress
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.forms import ValidationError
from django.test import TestCase, override_settings
@override_settings(ACCOUNT_EMAIL_UNIQUE=True)
class UniqueEmailAddressTestCase(TestCase):
def test_unique_email(self):
user = User.objects.create_user("user1", email="user1@example.com", password="password")
email_1 = EmailAddress(user=user, email="user2@example.com")
email_1.full_clean()
email_1.save()
validation_error = False
try:
email_2 = EmailAddress(user=user, email="USER2@example.com")
email_2.full_clean()
email_2.save()
except ValidationError:
validation_error = True
self.assertTrue(validation_error)
|
<commit_before><commit_msg>Add test case for email unique validate<commit_after>
|
from account.models import EmailAddress
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.forms import ValidationError
from django.test import TestCase, override_settings
@override_settings(ACCOUNT_EMAIL_UNIQUE=True)
class UniqueEmailAddressTestCase(TestCase):
def test_unique_email(self):
user = User.objects.create_user("user1", email="user1@example.com", password="password")
email_1 = EmailAddress(user=user, email="user2@example.com")
email_1.full_clean()
email_1.save()
validation_error = False
try:
email_2 = EmailAddress(user=user, email="USER2@example.com")
email_2.full_clean()
email_2.save()
except ValidationError:
validation_error = True
self.assertTrue(validation_error)
|
Add test case for email unique validatefrom account.models import EmailAddress
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.forms import ValidationError
from django.test import TestCase, override_settings
@override_settings(ACCOUNT_EMAIL_UNIQUE=True)
class UniqueEmailAddressTestCase(TestCase):
def test_unique_email(self):
user = User.objects.create_user("user1", email="user1@example.com", password="password")
email_1 = EmailAddress(user=user, email="user2@example.com")
email_1.full_clean()
email_1.save()
validation_error = False
try:
email_2 = EmailAddress(user=user, email="USER2@example.com")
email_2.full_clean()
email_2.save()
except ValidationError:
validation_error = True
self.assertTrue(validation_error)
|
<commit_before><commit_msg>Add test case for email unique validate<commit_after>from account.models import EmailAddress
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.forms import ValidationError
from django.test import TestCase, override_settings
@override_settings(ACCOUNT_EMAIL_UNIQUE=True)
class UniqueEmailAddressTestCase(TestCase):
def test_unique_email(self):
user = User.objects.create_user("user1", email="user1@example.com", password="password")
email_1 = EmailAddress(user=user, email="user2@example.com")
email_1.full_clean()
email_1.save()
validation_error = False
try:
email_2 = EmailAddress(user=user, email="USER2@example.com")
email_2.full_clean()
email_2.save()
except ValidationError:
validation_error = True
self.assertTrue(validation_error)
|
|
52cf32557d0badba934472acf7e2af8387aa260e
|
sgraph-show-with-nx-layout.py
|
sgraph-show-with-nx-layout.py
|
import networkx as nx
g = nx.Graph()
# put the nodes and edges from the SGraph into a NetworkX graph
g.add_nodes_from(list(sg.vertices['__id']))
g.add_edges_from([(e['__src_id'], e['__dst_id']) for e in sg.edges])
# create the layout with NetworkX and convert to regular Python types
# you can substitute any of the layout algorithms here for circular_layout:
# http://networkx.github.io/documentation/latest/reference/drawing.html#module-networkx.drawing.layout
layout = nx.circular_layout(g)
layout = {k: map(float, list(v)) for k,v in layout.iteritems()}
# show the SGraph in Canvas with that layout
sg.vertices['x'] = sg.vertices.apply(lambda v: layout[v['__id']][0])
sg.vertices['y'] = sg.vertices.apply(lambda v: layout[v['__id']][1])
sg.show(vertex_positions=('x', 'y'))
|
Add example for SGraph.show with NetworkX layout.
|
Add example for SGraph.show with NetworkX layout.
|
Python
|
cc0-1.0
|
srikris/how-to,nagyistoce/how-to-graphlab-create,dato-code/how-to
|
Add example for SGraph.show with NetworkX layout.
|
import networkx as nx
g = nx.Graph()
# put the nodes and edges from the SGraph into a NetworkX graph
g.add_nodes_from(list(sg.vertices['__id']))
g.add_edges_from([(e['__src_id'], e['__dst_id']) for e in sg.edges])
# create the layout with NetworkX and convert to regular Python types
# you can substitute any of the layout algorithms here for circular_layout:
# http://networkx.github.io/documentation/latest/reference/drawing.html#module-networkx.drawing.layout
layout = nx.circular_layout(g)
layout = {k: map(float, list(v)) for k,v in layout.iteritems()}
# show the SGraph in Canvas with that layout
sg.vertices['x'] = sg.vertices.apply(lambda v: layout[v['__id']][0])
sg.vertices['y'] = sg.vertices.apply(lambda v: layout[v['__id']][1])
sg.show(vertex_positions=('x', 'y'))
|
<commit_before><commit_msg>Add example for SGraph.show with NetworkX layout.<commit_after>
|
import networkx as nx
g = nx.Graph()
# put the nodes and edges from the SGraph into a NetworkX graph
g.add_nodes_from(list(sg.vertices['__id']))
g.add_edges_from([(e['__src_id'], e['__dst_id']) for e in sg.edges])
# create the layout with NetworkX and convert to regular Python types
# you can substitute any of the layout algorithms here for circular_layout:
# http://networkx.github.io/documentation/latest/reference/drawing.html#module-networkx.drawing.layout
layout = nx.circular_layout(g)
layout = {k: map(float, list(v)) for k,v in layout.iteritems()}
# show the SGraph in Canvas with that layout
sg.vertices['x'] = sg.vertices.apply(lambda v: layout[v['__id']][0])
sg.vertices['y'] = sg.vertices.apply(lambda v: layout[v['__id']][1])
sg.show(vertex_positions=('x', 'y'))
|
Add example for SGraph.show with NetworkX layout.import networkx as nx
g = nx.Graph()
# put the nodes and edges from the SGraph into a NetworkX graph
g.add_nodes_from(list(sg.vertices['__id']))
g.add_edges_from([(e['__src_id'], e['__dst_id']) for e in sg.edges])
# create the layout with NetworkX and convert to regular Python types
# you can substitute any of the layout algorithms here for circular_layout:
# http://networkx.github.io/documentation/latest/reference/drawing.html#module-networkx.drawing.layout
layout = nx.circular_layout(g)
layout = {k: map(float, list(v)) for k,v in layout.iteritems()}
# show the SGraph in Canvas with that layout
sg.vertices['x'] = sg.vertices.apply(lambda v: layout[v['__id']][0])
sg.vertices['y'] = sg.vertices.apply(lambda v: layout[v['__id']][1])
sg.show(vertex_positions=('x', 'y'))
|
<commit_before><commit_msg>Add example for SGraph.show with NetworkX layout.<commit_after>import networkx as nx
g = nx.Graph()
# put the nodes and edges from the SGraph into a NetworkX graph
g.add_nodes_from(list(sg.vertices['__id']))
g.add_edges_from([(e['__src_id'], e['__dst_id']) for e in sg.edges])
# create the layout with NetworkX and convert to regular Python types
# you can substitute any of the layout algorithms here for circular_layout:
# http://networkx.github.io/documentation/latest/reference/drawing.html#module-networkx.drawing.layout
layout = nx.circular_layout(g)
layout = {k: map(float, list(v)) for k,v in layout.iteritems()}
# show the SGraph in Canvas with that layout
sg.vertices['x'] = sg.vertices.apply(lambda v: layout[v['__id']][0])
sg.vertices['y'] = sg.vertices.apply(lambda v: layout[v['__id']][1])
sg.show(vertex_positions=('x', 'y'))
|
|
bfb819eb58e28d15d55d6d4def6137e75ad0db18
|
benchmarks/bench_plot_ward.py
|
benchmarks/bench_plot_ward.py
|
"""
Bench the scikit's ward implement compared to scipy's
"""
import time
import numpy as np
from scipy.cluster import hierarchy
import pylab as pl
from scikits.learn.cluster import Ward
ward = Ward(n_clusters=15)
n_samples = np.logspace(.5, 2, 9)
n_features = np.logspace(1, 3.5, 7)
N_samples, N_features = np.meshgrid(n_samples,
n_features)
scikits_time = np.zeros(N_samples.shape)
scipy_time = np.zeros(N_samples.shape)
for i, n in enumerate(n_samples):
for j, p in enumerate(n_features):
X = np.random.normal(size=(n, p))
t0 = time.time()
ward.fit(X)
scikits_time[j, i] = time.time() - t0
t0 = time.time()
hierarchy.ward(X.T)
scipy_time[j, i] = time.time() - t0
ratio = scikits_time/scipy_time
pl.clf()
pl.imshow(np.log(ratio), aspect='auto', origin="lower")
pl.colorbar()
pl.contour(ratio, levels=[1, ], colors='k')
pl.yticks(range(len(n_features)), n_features.astype(np.int))
pl.ylabel('N features')
pl.xticks(range(len(n_samples)), n_samples.astype(np.int))
pl.xlabel('N samples')
pl.title("Scikit's time, in units of scipy time (log)")
pl.show()
|
Add a benchmark for ward
|
ENH: Add a benchmark for ward
Comparing the scikits and scipy
|
Python
|
bsd-3-clause
|
IssamLaradji/scikit-learn,marcocaccin/scikit-learn,walterreade/scikit-learn,appapantula/scikit-learn,poryfly/scikit-learn,rohanp/scikit-learn,Srisai85/scikit-learn,saiwing-yeung/scikit-learn,amueller/scikit-learn,icdishb/scikit-learn,jm-begon/scikit-learn,marcocaccin/scikit-learn,mehdidc/scikit-learn,bikong2/scikit-learn,AnasGhrab/scikit-learn,petosegan/scikit-learn,icdishb/scikit-learn,JosmanPS/scikit-learn,Obus/scikit-learn,zaxtax/scikit-learn,manhhomienbienthuy/scikit-learn,anirudhjayaraman/scikit-learn,phdowling/scikit-learn,shusenl/scikit-learn,xwolf12/scikit-learn,MartinSavc/scikit-learn,heli522/scikit-learn,Lawrence-Liu/scikit-learn,zihua/scikit-learn,mblondel/scikit-learn,robin-lai/scikit-learn,fengzhyuan/scikit-learn,Adai0808/scikit-learn,ilo10/scikit-learn,dsquareindia/scikit-learn,vortex-ape/scikit-learn,anurag313/scikit-learn,bnaul/scikit-learn,marcocaccin/scikit-learn,tawsifkhan/scikit-learn,trankmichael/scikit-learn,davidgbe/scikit-learn,joshloyal/scikit-learn,ahoyosid/scikit-learn,0asa/scikit-learn,yask123/scikit-learn,davidgbe/scikit-learn,xavierwu/scikit-learn,Garrett-R/scikit-learn,ZenDevelopmentSystems/scikit-learn,UNR-AERIAL/scikit-learn,mhdella/scikit-learn,sergeyf/scikit-learn,heli522/scikit-learn,ZENGXH/scikit-learn,ldirer/scikit-learn,shahankhatch/scikit-learn,trungnt13/scikit-learn,BiaDarkia/scikit-learn,PrashntS/scikit-learn,rajat1994/scikit-learn,Obus/scikit-learn,fabianp/scikit-learn,pnedunuri/scikit-learn,iismd17/scikit-learn,Clyde-fare/scikit-learn,jm-begon/scikit-learn,ningchi/scikit-learn,ivannz/scikit-learn,PatrickOReilly/scikit-learn,Achuth17/scikit-learn,belltailjp/scikit-learn,xavierwu/scikit-learn,costypetrisor/scikit-learn,HolgerPeters/scikit-learn,0asa/scikit-learn,deepesch/scikit-learn,andrewnc/scikit-learn,bigdataelephants/scikit-learn,pratapvardhan/scikit-learn,RachitKansal/scikit-learn,Aasmi/scikit-learn,ky822/scikit-learn,jorge2703/scikit-learn,abhishekkrthakur/scikit-learn,Jimmy-Morzaria/scikit-learn,sumspr/scikit-learn,ivannz/scikit-learn,belltailjp/scikit-learn,nrhine1/scikit-learn,alexsavio/scikit-learn,justincassidy/scikit-learn,yunfeilu/scikit-learn,vigilv/scikit-learn,abhishekgahlot/scikit-learn,olologin/scikit-learn,ankurankan/scikit-learn,cl4rke/scikit-learn,anntzer/scikit-learn,depet/scikit-learn,espg/scikit-learn,wazeerzulfikar/scikit-learn,nvoron23/scikit-learn,russel1237/scikit-learn,RayMick/scikit-learn,PatrickChrist/scikit-learn,saiwing-yeung/scikit-learn,depet/scikit-learn,cdegroc/scikit-learn,arjoly/scikit-learn,herilalaina/scikit-learn,jseabold/scikit-learn,florian-f/sklearn,cauchycui/scikit-learn,cwu2011/scikit-learn,meduz/scikit-learn,dsullivan7/scikit-learn,MatthieuBizien/scikit-learn,rvraghav93/scikit-learn,MartinDelzant/scikit-learn,shyamalschandra/scikit-learn,samzhang111/scikit-learn,nmayorov/scikit-learn,luo66/scikit-learn,nomadcube/scikit-learn,kjung/scikit-learn,scikit-learn/scikit-learn,r-mart/scikit-learn,dsquareindia/scikit-learn,jmschrei/scikit-learn,pythonvietnam/scikit-learn,Akshay0724/scikit-learn,tmhm/scikit-learn,eickenberg/scikit-learn,maheshakya/scikit-learn,RachitKansal/scikit-learn,loli/semisupervisedforests,carrillo/scikit-learn,bhargav/scikit-learn,wlamond/scikit-learn,shyamalschandra/scikit-learn,ashhher3/scikit-learn,Clyde-fare/scikit-learn,MechCoder/scikit-learn,olologin/scikit-learn,herilalaina/scikit-learn,justincassidy/scikit-learn,procoder317/scikit-learn,abimannans/scikit-learn,aflaxman/scikit-learn,ilyes14/scikit-learn,glennq/scikit-learn,hsiaoyi0504/scikit-learn,murali-munna/scikit-learn,joernhees/scikit-learn,yask123/scikit-learn,aflaxman/scikit-learn,CVML/scikit-learn,eg-zhang/scikit-learn,henridwyer/scikit-learn,espg/scikit-learn,h2educ/scikit-learn,fabioticconi/scikit-learn,lucidfrontier45/scikit-learn,smartscheduling/scikit-learn-categorical-tree,xuewei4d/scikit-learn,mjgrav2001/scikit-learn,abhishekgahlot/scikit-learn,sgenoud/scikit-learn,dhruv13J/scikit-learn,aminert/scikit-learn,treycausey/scikit-learn,betatim/scikit-learn,Myasuka/scikit-learn,hugobowne/scikit-learn,B3AU/waveTree,procoder317/scikit-learn,MartinDelzant/scikit-learn,manashmndl/scikit-learn,mattilyra/scikit-learn,AlexRobson/scikit-learn,djgagne/scikit-learn,rahuldhote/scikit-learn,CVML/scikit-learn,samzhang111/scikit-learn,costypetrisor/scikit-learn,idlead/scikit-learn,macks22/scikit-learn,luo66/scikit-learn,Achuth17/scikit-learn,glemaitre/scikit-learn,q1ang/scikit-learn,rahul-c1/scikit-learn,nikitasingh981/scikit-learn,hdmetor/scikit-learn,jaidevd/scikit-learn,espg/scikit-learn,herilalaina/scikit-learn,anirudhjayaraman/scikit-learn,xiaoxiamii/scikit-learn,f3r/scikit-learn,hsiaoyi0504/scikit-learn,RPGOne/scikit-learn,wzbozon/scikit-learn,maheshakya/scikit-learn,procoder317/scikit-learn,qifeigit/scikit-learn,fzalkow/scikit-learn,yanlend/scikit-learn,MartinSavc/scikit-learn,untom/scikit-learn,pratapvardhan/scikit-learn,joshloyal/scikit-learn,nikitasingh981/scikit-learn,jm-begon/scikit-learn,alexsavio/scikit-learn,rohanp/scikit-learn,zorroblue/scikit-learn,ilo10/scikit-learn,ilyes14/scikit-learn,Djabbz/scikit-learn,tomlof/scikit-learn,mxjl620/scikit-learn,toastedcornflakes/scikit-learn,Achuth17/scikit-learn,shenzebang/scikit-learn,ngoix/OCRF,jblackburne/scikit-learn,MartinSavc/scikit-learn,jakirkham/scikit-learn,moutai/scikit-learn,beepee14/scikit-learn,macks22/scikit-learn,aetilley/scikit-learn,PrashntS/scikit-learn,RayMick/scikit-learn,LiaoPan/scikit-learn,wazeerzulfikar/scikit-learn,fzalkow/scikit-learn,trungnt13/scikit-learn,michigraber/scikit-learn,zuku1985/scikit-learn,vortex-ape/scikit-learn,ephes/scikit-learn,fyffyt/scikit-learn,marcocaccin/scikit-learn,kevin-intel/scikit-learn,liyu1990/sklearn,hsuantien/scikit-learn,Titan-C/scikit-learn,shenzebang/scikit-learn,pkruskal/scikit-learn,michigraber/scikit-learn,arahuja/scikit-learn,YinongLong/scikit-learn,fzalkow/scikit-learn,plissonf/scikit-learn,phdowling/scikit-learn,HolgerPeters/scikit-learn,jpautom/scikit-learn,thilbern/scikit-learn,ChanderG/scikit-learn,bthirion/scikit-learn,jblackburne/scikit-learn,Sentient07/scikit-learn,mhdella/scikit-learn,khkaminska/scikit-learn,Adai0808/scikit-learn,zaxtax/scikit-learn,ivannz/scikit-learn,rsivapr/scikit-learn,nmayorov/scikit-learn,cybernet14/scikit-learn,iismd17/scikit-learn,TomDLT/scikit-learn,mjudsp/Tsallis,themrmax/scikit-learn,JsNoNo/scikit-learn,mattgiguere/scikit-learn,jzt5132/scikit-learn,scikit-learn/scikit-learn,hlin117/scikit-learn,aminert/scikit-learn,rajat1994/scikit-learn,larsmans/scikit-learn,henridwyer/scikit-learn,billy-inn/scikit-learn,raghavrv/scikit-learn,florian-f/sklearn,ishanic/scikit-learn,TomDLT/scikit-learn,lbishal/scikit-learn,dsullivan7/scikit-learn,mattilyra/scikit-learn,Fireblend/scikit-learn,meduz/scikit-learn,spallavolu/scikit-learn,robbymeals/scikit-learn,mwv/scikit-learn,appapantula/scikit-learn,rishikksh20/scikit-learn,zuku1985/scikit-learn,liangz0707/scikit-learn,nikitasingh981/scikit-learn,mjudsp/Tsallis,etkirsch/scikit-learn,rahuldhote/scikit-learn,michigraber/scikit-learn,Windy-Ground/scikit-learn,zorojean/scikit-learn,gotomypc/scikit-learn,glouppe/scikit-learn,yonglehou/scikit-learn,cybernet14/scikit-learn,yyjiang/scikit-learn,ndingwall/scikit-learn,f3r/scikit-learn,NelisVerhoef/scikit-learn,nvoron23/scikit-learn,tosolveit/scikit-learn,q1ang/scikit-learn,aetilley/scikit-learn,zihua/scikit-learn,HolgerPeters/scikit-learn,mblondel/scikit-learn,poryfly/scikit-learn,chrsrds/scikit-learn,massmutual/scikit-learn,MartinSavc/scikit-learn,sonnyhu/scikit-learn,evgchz/scikit-learn,hsiaoyi0504/scikit-learn,roxyboy/scikit-learn,IssamLaradji/scikit-learn,lin-credible/scikit-learn,kashif/scikit-learn,huzq/scikit-learn,kylerbrown/scikit-learn,fbagirov/scikit-learn,mattilyra/scikit-learn,liyu1990/sklearn,andaag/scikit-learn,AlexanderFabisch/scikit-learn,huobaowangxi/scikit-learn,RomainBrault/scikit-learn,phdowling/scikit-learn,fabianp/scikit-learn,henridwyer/scikit-learn,3manuek/scikit-learn,wanggang3333/scikit-learn,toastedcornflakes/scikit-learn,ningchi/scikit-learn,shikhardb/scikit-learn,r-mart/scikit-learn,nelson-liu/scikit-learn,Windy-Ground/scikit-learn,siutanwong/scikit-learn,eickenberg/scikit-learn,rahul-c1/scikit-learn,h2educ/scikit-learn,huzq/scikit-learn,Aasmi/scikit-learn,PrashntS/scikit-learn,jmetzen/scikit-learn,r-mart/scikit-learn,kmike/scikit-learn,shusenl/scikit-learn,xiaoxiamii/scikit-learn,mjgrav2001/scikit-learn,costypetrisor/scikit-learn,vshtanko/scikit-learn,cainiaocome/scikit-learn,procoder317/scikit-learn,mjudsp/Tsallis,sanketloke/scikit-learn,AIML/scikit-learn,fengzhyuan/scikit-learn,voxlol/scikit-learn,fengzhyuan/scikit-learn,jlegendary/scikit-learn,evgchz/scikit-learn,RomainBrault/scikit-learn,arabenjamin/scikit-learn,krez13/scikit-learn,eickenberg/scikit-learn,henrykironde/scikit-learn,theoryno3/scikit-learn,larsmans/scikit-learn,loli/semisupervisedforests,jakobworldpeace/scikit-learn,DSLituiev/scikit-learn,LiaoPan/scikit-learn,anirudhjayaraman/scikit-learn,Jimmy-Morzaria/scikit-learn,Obus/scikit-learn,Clyde-fare/scikit-learn,chrisburr/scikit-learn,giorgiop/scikit-learn,hitszxp/scikit-learn,clemkoa/scikit-learn,466152112/scikit-learn,dsquareindia/scikit-learn,ankurankan/scikit-learn,kmike/scikit-learn,zuku1985/scikit-learn,Aasmi/scikit-learn,PatrickOReilly/scikit-learn,zihua/scikit-learn,ogrisel/scikit-learn,nelson-liu/scikit-learn,rajat1994/scikit-learn,arjoly/scikit-learn,robin-lai/scikit-learn,chrisburr/scikit-learn,RayMick/scikit-learn,mattgiguere/scikit-learn,equialgo/scikit-learn,mikebenfield/scikit-learn,vinayak-mehta/scikit-learn,Windy-Ground/scikit-learn,hrjn/scikit-learn,bikong2/scikit-learn,nmayorov/scikit-learn,ndingwall/scikit-learn,shahankhatch/scikit-learn,UNR-AERIAL/scikit-learn,h2educ/scikit-learn,rexshihaoren/scikit-learn,manhhomienbienthuy/scikit-learn,theoryno3/scikit-learn,dsullivan7/scikit-learn,rexshihaoren/scikit-learn,zorojean/scikit-learn,sarahgrogan/scikit-learn,etkirsch/scikit-learn,zaxtax/scikit-learn,akionakamura/scikit-learn,RPGOne/scikit-learn,IshankGulati/scikit-learn,0asa/scikit-learn,kevin-intel/scikit-learn,rexshihaoren/scikit-learn,spallavolu/scikit-learn,AlexandreAbraham/scikit-learn,sarahgrogan/scikit-learn,JeanKossaifi/scikit-learn,466152112/scikit-learn,Titan-C/scikit-learn,imaculate/scikit-learn,ilo10/scikit-learn,ZENGXH/scikit-learn,roxyboy/scikit-learn,rishikksh20/scikit-learn,Vimos/scikit-learn,thientu/scikit-learn,shyamalschandra/scikit-learn,meduz/scikit-learn,joernhees/scikit-learn,hitszxp/scikit-learn,LiaoPan/scikit-learn,rishikksh20/scikit-learn,Lawrence-Liu/scikit-learn,akionakamura/scikit-learn,MohammedWasim/scikit-learn,ahoyosid/scikit-learn,mjgrav2001/scikit-learn,kaichogami/scikit-learn,chrisburr/scikit-learn,sonnyhu/scikit-learn,jaidevd/scikit-learn,shahankhatch/scikit-learn,eickenberg/scikit-learn,fredhusser/scikit-learn,arahuja/scikit-learn,ningchi/scikit-learn,Akshay0724/scikit-learn,madjelan/scikit-learn,466152112/scikit-learn,alvarofierroclavero/scikit-learn,nesterione/scikit-learn,jzt5132/scikit-learn,manashmndl/scikit-learn,larsmans/scikit-learn,henrykironde/scikit-learn,AlexRobson/scikit-learn,deepesch/scikit-learn,ahoyosid/scikit-learn,Myasuka/scikit-learn,rahuldhote/scikit-learn,rohanp/scikit-learn,jorik041/scikit-learn,shangwuhencc/scikit-learn,cwu2011/scikit-learn,lazywei/scikit-learn,jpautom/scikit-learn,trankmichael/scikit-learn,BiaDarkia/scikit-learn,nhejazi/scikit-learn,andrewnc/scikit-learn,potash/scikit-learn,quheng/scikit-learn,aewhatley/scikit-learn,Vimos/scikit-learn,wlamond/scikit-learn,mikebenfield/scikit-learn,schets/scikit-learn,IshankGulati/scikit-learn,bnaul/scikit-learn,loli/sklearn-ensembletrees,fzalkow/scikit-learn,IshankGulati/scikit-learn,moutai/scikit-learn,ephes/scikit-learn,zaxtax/scikit-learn,mrshu/scikit-learn,hainm/scikit-learn,pypot/scikit-learn,nomadcube/scikit-learn,lazywei/scikit-learn,mugizico/scikit-learn,Garrett-R/scikit-learn,PatrickOReilly/scikit-learn,BiaDarkia/scikit-learn,xzh86/scikit-learn,ldirer/scikit-learn,RayMick/scikit-learn,liberatorqjw/scikit-learn,nikitasingh981/scikit-learn,sumspr/scikit-learn,pianomania/scikit-learn,mugizico/scikit-learn,beepee14/scikit-learn,robbymeals/scikit-learn,moutai/scikit-learn,OshynSong/scikit-learn,fabioticconi/scikit-learn,pompiduskus/scikit-learn,Lawrence-Liu/scikit-learn,mikebenfield/scikit-learn,Srisai85/scikit-learn,mlyundin/scikit-learn,devanshdalal/scikit-learn,ominux/scikit-learn,ilyes14/scikit-learn,pv/scikit-learn,YinongLong/scikit-learn,tdhopper/scikit-learn,costypetrisor/scikit-learn,vortex-ape/scikit-learn,ssaeger/scikit-learn,stylianos-kampakis/scikit-learn,Djabbz/scikit-learn,qifeigit/scikit-learn,ycaihua/scikit-learn,bikong2/scikit-learn,jlegendary/scikit-learn,ZenDevelopmentSystems/scikit-learn,krez13/scikit-learn,meduz/scikit-learn,xiaoxiamii/scikit-learn,aminert/scikit-learn,mugizico/scikit-learn,JPFrancoia/scikit-learn,YinongLong/scikit-learn,yanlend/scikit-learn,glemaitre/scikit-learn,0x0all/scikit-learn,loli/semisupervisedforests,jereze/scikit-learn,0x0all/scikit-learn,jm-begon/scikit-learn,MatthieuBizien/scikit-learn,jlegendary/scikit-learn,lesteve/scikit-learn,Sentient07/scikit-learn,massmutual/scikit-learn,ElDeveloper/scikit-learn,gotomypc/scikit-learn,jzt5132/scikit-learn,anurag313/scikit-learn,OshynSong/scikit-learn,roxyboy/scikit-learn,JsNoNo/scikit-learn,adamgreenhall/scikit-learn,ngoix/OCRF,vibhorag/scikit-learn,lenovor/scikit-learn,LohithBlaze/scikit-learn,pompiduskus/scikit-learn,themrmax/scikit-learn,LiaoPan/scikit-learn,vigilv/scikit-learn,quheng/scikit-learn,bigdataelephants/scikit-learn,liyu1990/sklearn,lucidfrontier45/scikit-learn,pythonvietnam/scikit-learn,murali-munna/scikit-learn,ChanderG/scikit-learn,khkaminska/scikit-learn,MohammedWasim/scikit-learn,glemaitre/scikit-learn,gclenaghan/scikit-learn,imaculate/scikit-learn,thientu/scikit-learn,jereze/scikit-learn,rvraghav93/scikit-learn,andrewnc/scikit-learn,q1ang/scikit-learn,shahankhatch/scikit-learn,vybstat/scikit-learn,mhue/scikit-learn,Nyker510/scikit-learn,appapantula/scikit-learn,JosmanPS/scikit-learn,PatrickChrist/scikit-learn,mhue/scikit-learn,AlexRobson/scikit-learn,imaculate/scikit-learn,tawsifkhan/scikit-learn,DonBeo/scikit-learn,ogrisel/scikit-learn,mrshu/scikit-learn,JPFrancoia/scikit-learn,murali-munna/scikit-learn,jseabold/scikit-learn,untom/scikit-learn,IndraVikas/scikit-learn,B3AU/waveTree,thilbern/scikit-learn,jorik041/scikit-learn,kevin-intel/scikit-learn,liangz0707/scikit-learn,depet/scikit-learn,robin-lai/scikit-learn,petosegan/scikit-learn,chrsrds/scikit-learn,zhenv5/scikit-learn,arabenjamin/scikit-learn,vivekmishra1991/scikit-learn,Barmaley-exe/scikit-learn,justincassidy/scikit-learn,maheshakya/scikit-learn,glouppe/scikit-learn,IssamLaradji/scikit-learn,ssaeger/scikit-learn,waterponey/scikit-learn,RachitKansal/scikit-learn,pianomania/scikit-learn,NunoEdgarGub1/scikit-learn,RomainBrault/scikit-learn,mhdella/scikit-learn,davidgbe/scikit-learn,samzhang111/scikit-learn,zorroblue/scikit-learn,Achuth17/scikit-learn,NunoEdgarGub1/scikit-learn,kagayakidan/scikit-learn,rexshihaoren/scikit-learn,glouppe/scikit-learn,gotomypc/scikit-learn,DonBeo/scikit-learn,UNR-AERIAL/scikit-learn,scikit-learn/scikit-learn,harshaneelhg/scikit-learn,thilbern/scikit-learn,abimannans/scikit-learn,CforED/Machine-Learning,jjx02230808/project0223,ycaihua/scikit-learn,mayblue9/scikit-learn,elkingtonmcb/scikit-learn,equialgo/scikit-learn,shenzebang/scikit-learn,tdhopper/scikit-learn,djgagne/scikit-learn,evgchz/scikit-learn,lbishal/scikit-learn,rrohan/scikit-learn,MohammedWasim/scikit-learn,tdhopper/scikit-learn,glennq/scikit-learn,fabianp/scikit-learn,ChanChiChoi/scikit-learn,hrjn/scikit-learn,nmayorov/scikit-learn,shenzebang/scikit-learn,jkarnows/scikit-learn,quheng/scikit-learn,ltiao/scikit-learn,jorge2703/scikit-learn,Garrett-R/scikit-learn,xyguo/scikit-learn,deepesch/scikit-learn,hainm/scikit-learn,andrewnc/scikit-learn,hainm/scikit-learn,AlexandreAbraham/scikit-learn,ElDeveloper/scikit-learn,nrhine1/scikit-learn,idlead/scikit-learn,jpautom/scikit-learn,fredhusser/scikit-learn,yyjiang/scikit-learn,mojoboss/scikit-learn,sonnyhu/scikit-learn,theoryno3/scikit-learn,rvraghav93/scikit-learn,sumspr/scikit-learn,dingocuster/scikit-learn,zihua/scikit-learn,rohanp/scikit-learn,shangwuhencc/scikit-learn,amueller/scikit-learn,sinhrks/scikit-learn,loli/sklearn-ensembletrees,alvarofierroclavero/scikit-learn,Nyker510/scikit-learn,jjx02230808/project0223,Akshay0724/scikit-learn,moutai/scikit-learn,Barmaley-exe/scikit-learn,hlin117/scikit-learn,plissonf/scikit-learn,fyffyt/scikit-learn,djgagne/scikit-learn,cauchycui/scikit-learn,equialgo/scikit-learn,olologin/scikit-learn,wanggang3333/scikit-learn,Garrett-R/scikit-learn,MartinDelzant/scikit-learn,mwv/scikit-learn,dhruv13J/scikit-learn,cl4rke/scikit-learn,sanketloke/scikit-learn,wazeerzulfikar/scikit-learn,vermouthmjl/scikit-learn,AlexanderFabisch/scikit-learn,ldirer/scikit-learn,abhishekgahlot/scikit-learn,russel1237/scikit-learn,betatim/scikit-learn,liangz0707/scikit-learn,bhargav/scikit-learn,abhishekkrthakur/scikit-learn,MechCoder/scikit-learn,PrashntS/scikit-learn,yanlend/scikit-learn,depet/scikit-learn,ngoix/OCRF,untom/scikit-learn,frank-tancf/scikit-learn,mwv/scikit-learn,clemkoa/scikit-learn,xyguo/scikit-learn,jakirkham/scikit-learn,toastedcornflakes/scikit-learn,trungnt13/scikit-learn,alexeyum/scikit-learn,liangz0707/scikit-learn,anirudhjayaraman/scikit-learn,lenovor/scikit-learn,shyamalschandra/scikit-learn,tawsifkhan/scikit-learn,r-mart/scikit-learn,liberatorqjw/scikit-learn,LohithBlaze/scikit-learn,Barmaley-exe/scikit-learn,glemaitre/scikit-learn,pnedunuri/scikit-learn,rrohan/scikit-learn,AlexanderFabisch/scikit-learn,ngoix/OCRF,rajat1994/scikit-learn,IndraVikas/scikit-learn,B3AU/waveTree,fredhusser/scikit-learn,clemkoa/scikit-learn,harshaneelhg/scikit-learn,mhue/scikit-learn,ZENGXH/scikit-learn,ZENGXH/scikit-learn,xavierwu/scikit-learn,sumspr/scikit-learn,xuewei4d/scikit-learn,fbagirov/scikit-learn,lucidfrontier45/scikit-learn,jakirkham/scikit-learn,aabadie/scikit-learn,dhruv13J/scikit-learn,zuku1985/scikit-learn,luo66/scikit-learn,nhejazi/scikit-learn,dsquareindia/scikit-learn,f3r/scikit-learn,dhruv13J/scikit-learn,mikebenfield/scikit-learn,eg-zhang/scikit-learn,trankmichael/scikit-learn,MohammedWasim/scikit-learn,shangwuhencc/scikit-learn,Aasmi/scikit-learn,loli/sklearn-ensembletrees,nhejazi/scikit-learn,f3r/scikit-learn,bikong2/scikit-learn,mfjb/scikit-learn,gotomypc/scikit-learn,JeanKossaifi/scikit-learn,adamgreenhall/scikit-learn,ilyes14/scikit-learn,etkirsch/scikit-learn,jayflo/scikit-learn,massmutual/scikit-learn,nhejazi/scikit-learn,AlexRobson/scikit-learn,qifeigit/scikit-learn,mjgrav2001/scikit-learn,lbishal/scikit-learn,walterreade/scikit-learn,nvoron23/scikit-learn,Djabbz/scikit-learn,lenovor/scikit-learn,abhishekkrthakur/scikit-learn,jakobworldpeace/scikit-learn,altairpearl/scikit-learn,aflaxman/scikit-learn,tmhm/scikit-learn,phdowling/scikit-learn,h2educ/scikit-learn,ngoix/OCRF,stylianos-kampakis/scikit-learn,nesterione/scikit-learn,Nyker510/scikit-learn,YinongLong/scikit-learn,ElDeveloper/scikit-learn,themrmax/scikit-learn,devanshdalal/scikit-learn,samuel1208/scikit-learn,nomadcube/scikit-learn,btabibian/scikit-learn,themrmax/scikit-learn,ishanic/scikit-learn,ominux/scikit-learn,jmschrei/scikit-learn,sanketloke/scikit-learn,yunfeilu/scikit-learn,kjung/scikit-learn,alvarofierroclavero/scikit-learn,nvoron23/scikit-learn,poryfly/scikit-learn,btabibian/scikit-learn,Adai0808/scikit-learn,massmutual/scikit-learn,kjung/scikit-learn,trankmichael/scikit-learn,lesteve/scikit-learn,joernhees/scikit-learn,DonBeo/scikit-learn,PatrickChrist/scikit-learn,idlead/scikit-learn,jaidevd/scikit-learn,spallavolu/scikit-learn,smartscheduling/scikit-learn-categorical-tree,altairpearl/scikit-learn,Titan-C/scikit-learn,mblondel/scikit-learn,fabioticconi/scikit-learn,terkkila/scikit-learn,huobaowangxi/scikit-learn,potash/scikit-learn,cybernet14/scikit-learn,altairpearl/scikit-learn,murali-munna/scikit-learn,cauchycui/scikit-learn,adamgreenhall/scikit-learn,devanshdalal/scikit-learn,jmetzen/scikit-learn,yanlend/scikit-learn,xyguo/scikit-learn,vermouthmjl/scikit-learn,jorge2703/scikit-learn,fengzhyuan/scikit-learn,petosegan/scikit-learn,ankurankan/scikit-learn,jereze/scikit-learn,AnasGhrab/scikit-learn,liyu1990/sklearn,mattgiguere/scikit-learn,AIML/scikit-learn,pypot/scikit-learn,vibhorag/scikit-learn,thientu/scikit-learn,depet/scikit-learn,yonglehou/scikit-learn,maheshakya/scikit-learn,jseabold/scikit-learn,shikhardb/scikit-learn,ndingwall/scikit-learn,shikhardb/scikit-learn,aewhatley/scikit-learn,tomlof/scikit-learn,hitszxp/scikit-learn,UNR-AERIAL/scikit-learn,ilo10/scikit-learn,kagayakidan/scikit-learn,tawsifkhan/scikit-learn,0x0all/scikit-learn,icdishb/scikit-learn,sinhrks/scikit-learn,ycaihua/scikit-learn,victorbergelin/scikit-learn,OshynSong/scikit-learn,beepee14/scikit-learn,vigilv/scikit-learn,RachitKansal/scikit-learn,loli/semisupervisedforests,clemkoa/scikit-learn,huzq/scikit-learn,kylerbrown/scikit-learn,jayflo/scikit-learn,sergeyf/scikit-learn,ZenDevelopmentSystems/scikit-learn,vivekmishra1991/scikit-learn,hsuantien/scikit-learn,joshloyal/scikit-learn,Vimos/scikit-learn,glouppe/scikit-learn,manashmndl/scikit-learn,appapantula/scikit-learn,CforED/Machine-Learning,henrykironde/scikit-learn,mfjb/scikit-learn,NelisVerhoef/scikit-learn,wlamond/scikit-learn,frank-tancf/scikit-learn,sgenoud/scikit-learn,bnaul/scikit-learn,cwu2011/scikit-learn,ChanChiChoi/scikit-learn,lbishal/scikit-learn,cl4rke/scikit-learn,florian-f/sklearn,olologin/scikit-learn,abhishekgahlot/scikit-learn,plissonf/scikit-learn,vigilv/scikit-learn,dingocuster/scikit-learn,henrykironde/scikit-learn,Garrett-R/scikit-learn,mlyundin/scikit-learn,waterponey/scikit-learn,harshaneelhg/scikit-learn,ClimbsRocks/scikit-learn,466152112/scikit-learn,mblondel/scikit-learn,gclenaghan/scikit-learn,0asa/scikit-learn,ngoix/OCRF,anntzer/scikit-learn,waterponey/scikit-learn,Jimmy-Morzaria/scikit-learn,pianomania/scikit-learn,ominux/scikit-learn,arahuja/scikit-learn,yask123/scikit-learn,jmetzen/scikit-learn,JeanKossaifi/scikit-learn,hugobowne/scikit-learn,mehdidc/scikit-learn,liberatorqjw/scikit-learn,LohithBlaze/scikit-learn,liberatorqjw/scikit-learn,jmschrei/scikit-learn,vybstat/scikit-learn,sinhrks/scikit-learn,pianomania/scikit-learn,manhhomienbienthuy/scikit-learn,terkkila/scikit-learn,NunoEdgarGub1/scikit-learn,alvarofierroclavero/scikit-learn,billy-inn/scikit-learn,CforED/Machine-Learning,mehdidc/scikit-learn,tmhm/scikit-learn,amueller/scikit-learn,mhue/scikit-learn,khkaminska/scikit-learn,arabenjamin/scikit-learn,rishikksh20/scikit-learn,victorbergelin/scikit-learn,robbymeals/scikit-learn,Nyker510/scikit-learn,ltiao/scikit-learn,aflaxman/scikit-learn,ClimbsRocks/scikit-learn,ashhher3/scikit-learn,raghavrv/scikit-learn,aewhatley/scikit-learn,pnedunuri/scikit-learn,kashif/scikit-learn,chrsrds/scikit-learn,cdegroc/scikit-learn,IshankGulati/scikit-learn,ndingwall/scikit-learn,untom/scikit-learn,joshloyal/scikit-learn,mfjb/scikit-learn,xubenben/scikit-learn,dsullivan7/scikit-learn,xavierwu/scikit-learn,tosolveit/scikit-learn,arjoly/scikit-learn,jzt5132/scikit-learn,AnasGhrab/scikit-learn,vibhorag/scikit-learn,spallavolu/scikit-learn,TomDLT/scikit-learn,jorge2703/scikit-learn,yonglehou/scikit-learn,anurag313/scikit-learn,DSLituiev/scikit-learn,mugizico/scikit-learn,jseabold/scikit-learn,chrisburr/scikit-learn,lin-credible/scikit-learn,joernhees/scikit-learn,xwolf12/scikit-learn,zorojean/scikit-learn,beepee14/scikit-learn,ankurankan/scikit-learn,robbymeals/scikit-learn,xubenben/scikit-learn,justincassidy/scikit-learn,aabadie/scikit-learn,Fireblend/scikit-learn,MatthieuBizien/scikit-learn,mrshu/scikit-learn,lin-credible/scikit-learn,hitszxp/scikit-learn,cdegroc/scikit-learn,vermouthmjl/scikit-learn,lazywei/scikit-learn,betatim/scikit-learn,nomadcube/scikit-learn,raghavrv/scikit-learn,kylerbrown/scikit-learn,MatthieuBizien/scikit-learn,samuel1208/scikit-learn,iismd17/scikit-learn,JsNoNo/scikit-learn,treycausey/scikit-learn,cainiaocome/scikit-learn,rsivapr/scikit-learn,hsuantien/scikit-learn,ky822/scikit-learn,zorroblue/scikit-learn,wanggang3333/scikit-learn,voxlol/scikit-learn,Akshay0724/scikit-learn,krez13/scikit-learn,Jimmy-Morzaria/scikit-learn,pythonvietnam/scikit-learn,heli522/scikit-learn,rahuldhote/scikit-learn,deepesch/scikit-learn,alexeyum/scikit-learn,Djabbz/scikit-learn,etkirsch/scikit-learn,heli522/scikit-learn,AlexandreAbraham/scikit-learn,smartscheduling/scikit-learn-categorical-tree,mayblue9/scikit-learn,sarahgrogan/scikit-learn,potash/scikit-learn,mxjl620/scikit-learn,ky822/scikit-learn,larsmans/scikit-learn,dingocuster/scikit-learn,cl4rke/scikit-learn,mxjl620/scikit-learn,carrillo/scikit-learn,xwolf12/scikit-learn,rsivapr/scikit-learn,hlin117/scikit-learn,IndraVikas/scikit-learn,ky822/scikit-learn,siutanwong/scikit-learn,JosmanPS/scikit-learn,scikit-learn/scikit-learn,mattilyra/scikit-learn,theoryno3/scikit-learn,ogrisel/scikit-learn,roxyboy/scikit-learn,khkaminska/scikit-learn,arahuja/scikit-learn,RPGOne/scikit-learn,mfjb/scikit-learn,vybstat/scikit-learn,mehdidc/scikit-learn,cdegroc/scikit-learn,JPFrancoia/scikit-learn,3manuek/scikit-learn,cybernet14/scikit-learn,giorgiop/scikit-learn,vshtanko/scikit-learn,schets/scikit-learn,jlegendary/scikit-learn,jayflo/scikit-learn,xiaoxiamii/scikit-learn,OshynSong/scikit-learn,alexsavio/scikit-learn,yyjiang/scikit-learn,jakobworldpeace/scikit-learn,belltailjp/scikit-learn,akionakamura/scikit-learn,jkarnows/scikit-learn,yunfeilu/scikit-learn,fbagirov/scikit-learn,kashif/scikit-learn,ClimbsRocks/scikit-learn,ltiao/scikit-learn,fyffyt/scikit-learn,xzh86/scikit-learn,MechCoder/scikit-learn,harshaneelhg/scikit-learn,terkkila/scikit-learn,jayflo/scikit-learn,pnedunuri/scikit-learn,treycausey/scikit-learn,madjelan/scikit-learn,betatim/scikit-learn,walterreade/scikit-learn,kmike/scikit-learn,BiaDarkia/scikit-learn,kylerbrown/scikit-learn,lenovor/scikit-learn,dingocuster/scikit-learn,tomlof/scikit-learn,Myasuka/scikit-learn,madjelan/scikit-learn,wzbozon/scikit-learn,btabibian/scikit-learn,MartinDelzant/scikit-learn,anurag313/scikit-learn,jmschrei/scikit-learn,xyguo/scikit-learn,PatrickChrist/scikit-learn,pythonvietnam/scikit-learn,jblackburne/scikit-learn,lucidfrontier45/scikit-learn,Fireblend/scikit-learn,zorroblue/scikit-learn,vshtanko/scikit-learn,mjudsp/Tsallis,3manuek/scikit-learn,jjx02230808/project0223,bigdataelephants/scikit-learn,maheshakya/scikit-learn,imaculate/scikit-learn,mxjl620/scikit-learn,rrohan/scikit-learn,yyjiang/scikit-learn,0x0all/scikit-learn,ominux/scikit-learn,terkkila/scikit-learn,wanggang3333/scikit-learn,lucidfrontier45/scikit-learn,elkingtonmcb/scikit-learn,glennq/scikit-learn,jmetzen/scikit-learn,macks22/scikit-learn,billy-inn/scikit-learn,abimannans/scikit-learn,btabibian/scikit-learn,loli/sklearn-ensembletrees,Srisai85/scikit-learn,frank-tancf/scikit-learn,vivekmishra1991/scikit-learn,jblackburne/scikit-learn,Barmaley-exe/scikit-learn,0x0all/scikit-learn,stylianos-kampakis/scikit-learn,pkruskal/scikit-learn,hlin117/scikit-learn,mojoboss/scikit-learn,tosolveit/scikit-learn,thientu/scikit-learn,zhenv5/scikit-learn,AnasGhrab/scikit-learn,treycausey/scikit-learn,ahoyosid/scikit-learn,3manuek/scikit-learn,nelson-liu/scikit-learn,thilbern/scikit-learn,aetilley/scikit-learn,florian-f/sklearn,xuewei4d/scikit-learn,bhargav/scikit-learn,hdmetor/scikit-learn,ZenDevelopmentSystems/scikit-learn,plissonf/scikit-learn,pratapvardhan/scikit-learn,adamgreenhall/scikit-learn,rrohan/scikit-learn,hugobowne/scikit-learn,giorgiop/scikit-learn,mlyundin/scikit-learn,sarahgrogan/scikit-learn,siutanwong/scikit-learn,JsNoNo/scikit-learn,larsmans/scikit-learn,fbagirov/scikit-learn,shusenl/scikit-learn,B3AU/waveTree,TomDLT/scikit-learn,zhenv5/scikit-learn,cauchycui/scikit-learn,djgagne/scikit-learn,treycausey/scikit-learn,raghavrv/scikit-learn,arjoly/scikit-learn,evgchz/scikit-learn,hsuantien/scikit-learn,espg/scikit-learn,tosolveit/scikit-learn,abimannans/scikit-learn,idlead/scikit-learn,vybstat/scikit-learn,manashmndl/scikit-learn,jpautom/scikit-learn,pkruskal/scikit-learn,pv/scikit-learn,IssamLaradji/scikit-learn,pv/scikit-learn,andaag/scikit-learn,vinayak-mehta/scikit-learn,arabenjamin/scikit-learn,mojoboss/scikit-learn,Sentient07/scikit-learn,kjung/scikit-learn,pkruskal/scikit-learn,potash/scikit-learn,ashhher3/scikit-learn,ChanChiChoi/scikit-learn,tmhm/scikit-learn,xzh86/scikit-learn,NelisVerhoef/scikit-learn,fyffyt/scikit-learn,nrhine1/scikit-learn,kaichogami/scikit-learn,ycaihua/scikit-learn,russel1237/scikit-learn,Titan-C/scikit-learn,jkarnows/scikit-learn,ishanic/scikit-learn,tomlof/scikit-learn,iismd17/scikit-learn,waterponey/scikit-learn,Vimos/scikit-learn,alexsavio/scikit-learn,bnaul/scikit-learn,victorbergelin/scikit-learn,AlexanderFabisch/scikit-learn,giorgiop/scikit-learn,mhdella/scikit-learn,ClimbsRocks/scikit-learn,rahul-c1/scikit-learn,bhargav/scikit-learn,bthirion/scikit-learn,herilalaina/scikit-learn,RPGOne/scikit-learn,NunoEdgarGub1/scikit-learn,NelisVerhoef/scikit-learn,ephes/scikit-learn,yunfeilu/scikit-learn,smartscheduling/scikit-learn-categorical-tree,belltailjp/scikit-learn,schets/scikit-learn,eg-zhang/scikit-learn,sgenoud/scikit-learn,eickenberg/scikit-learn,mattilyra/scikit-learn,sgenoud/scikit-learn,saiwing-yeung/scikit-learn,anntzer/scikit-learn,q1ang/scikit-learn,samuel1208/scikit-learn,vshtanko/scikit-learn,simon-pepin/scikit-learn,icdishb/scikit-learn,frank-tancf/scikit-learn,pypot/scikit-learn,kaichogami/scikit-learn,vinayak-mehta/scikit-learn,carrillo/scikit-learn,mrshu/scikit-learn,qifeigit/scikit-learn,fabianp/scikit-learn,mayblue9/scikit-learn,samzhang111/scikit-learn,aewhatley/scikit-learn,samuel1208/scikit-learn,ashhher3/scikit-learn,nesterione/scikit-learn,krez13/scikit-learn,cainiaocome/scikit-learn,akionakamura/scikit-learn,andaag/scikit-learn,trungnt13/scikit-learn,nelson-liu/scikit-learn,xubenben/scikit-learn,jakirkham/scikit-learn,wlamond/scikit-learn,Clyde-fare/scikit-learn,jkarnows/scikit-learn,ankurankan/scikit-learn,ElDeveloper/scikit-learn,hugobowne/scikit-learn,shusenl/scikit-learn,bigdataelephants/scikit-learn,xubenben/scikit-learn,nesterione/scikit-learn,anntzer/scikit-learn,CVML/scikit-learn,sinhrks/scikit-learn,altairpearl/scikit-learn,sgenoud/scikit-learn,pompiduskus/scikit-learn,amueller/scikit-learn,Sentient07/scikit-learn,JosmanPS/scikit-learn,pv/scikit-learn,lin-credible/scikit-learn,carrillo/scikit-learn,mrshu/scikit-learn,bthirion/scikit-learn,pypot/scikit-learn,xwolf12/scikit-learn,wazeerzulfikar/scikit-learn,shangwuhencc/scikit-learn,vivekmishra1991/scikit-learn,shikhardb/scikit-learn,billy-inn/scikit-learn,simon-pepin/scikit-learn,devanshdalal/scikit-learn,walterreade/scikit-learn,eg-zhang/scikit-learn,gclenaghan/scikit-learn,simon-pepin/scikit-learn,wzbozon/scikit-learn,alexeyum/scikit-learn,ChanChiChoi/scikit-learn,yask123/scikit-learn,Windy-Ground/scikit-learn,ChanderG/scikit-learn,DSLituiev/scikit-learn,aabadie/scikit-learn,zorojean/scikit-learn,kevin-intel/scikit-learn,vinayak-mehta/scikit-learn,xuewei4d/scikit-learn,fredhusser/scikit-learn,sanketloke/scikit-learn,jorik041/scikit-learn,sergeyf/scikit-learn,davidgbe/scikit-learn,rvraghav93/scikit-learn,ephes/scikit-learn,poryfly/scikit-learn,rsivapr/scikit-learn,kagayakidan/scikit-learn,kaichogami/scikit-learn,CforED/Machine-Learning,evgchz/scikit-learn,abhishekkrthakur/scikit-learn,macks22/scikit-learn,hrjn/scikit-learn,florian-f/sklearn,aminert/scikit-learn,kashif/scikit-learn,equialgo/scikit-learn,hdmetor/scikit-learn,vortex-ape/scikit-learn,luo66/scikit-learn,madjelan/scikit-learn,mlyundin/scikit-learn,mjudsp/Tsallis,MechCoder/scikit-learn,HolgerPeters/scikit-learn,siutanwong/scikit-learn,petosegan/scikit-learn,rsivapr/scikit-learn,AIML/scikit-learn,manhhomienbienthuy/scikit-learn,mattgiguere/scikit-learn,ssaeger/scikit-learn,jorik041/scikit-learn,vermouthmjl/scikit-learn,tdhopper/scikit-learn,LohithBlaze/scikit-learn,AlexandreAbraham/scikit-learn,pompiduskus/scikit-learn,kmike/scikit-learn,alexeyum/scikit-learn,DonBeo/scikit-learn,aetilley/scikit-learn,mojoboss/scikit-learn,huzq/scikit-learn,mwv/scikit-learn,jaidevd/scikit-learn,hainm/scikit-learn,CVML/scikit-learn,xzh86/scikit-learn,kmike/scikit-learn,hdmetor/scikit-learn,jjx02230808/project0223,PatrickOReilly/scikit-learn,huobaowangxi/scikit-learn,sonnyhu/scikit-learn,ivannz/scikit-learn,Srisai85/scikit-learn,fabioticconi/scikit-learn,hrjn/scikit-learn,ishanic/scikit-learn,Fireblend/scikit-learn,toastedcornflakes/scikit-learn,elkingtonmcb/scikit-learn,ssaeger/scikit-learn,IndraVikas/scikit-learn,hsiaoyi0504/scikit-learn,chrsrds/scikit-learn,Myasuka/scikit-learn,cwu2011/scikit-learn,nrhine1/scikit-learn,pratapvardhan/scikit-learn,glennq/scikit-learn,lesteve/scikit-learn,lazywei/scikit-learn,kagayakidan/scikit-learn,loli/sklearn-ensembletrees,ltiao/scikit-learn,michigraber/scikit-learn,andaag/scikit-learn,abhishekgahlot/scikit-learn,huobaowangxi/scikit-learn,gclenaghan/scikit-learn,quheng/scikit-learn,voxlol/scikit-learn,sergeyf/scikit-learn,JPFrancoia/scikit-learn,0asa/scikit-learn,jereze/scikit-learn,DSLituiev/scikit-learn,simon-pepin/scikit-learn,voxlol/scikit-learn,stylianos-kampakis/scikit-learn,henridwyer/scikit-learn,ChanderG/scikit-learn,B3AU/waveTree,ogrisel/scikit-learn,ningchi/scikit-learn,bthirion/scikit-learn,jakobworldpeace/scikit-learn,Adai0808/scikit-learn,ldirer/scikit-learn,robin-lai/scikit-learn,JeanKossaifi/scikit-learn,rahul-c1/scikit-learn,schets/scikit-learn,lesteve/scikit-learn,victorbergelin/scikit-learn,Obus/scikit-learn,ycaihua/scikit-learn,cainiaocome/scikit-learn,mayblue9/scikit-learn,aabadie/scikit-learn,elkingtonmcb/scikit-learn,yonglehou/scikit-learn,russel1237/scikit-learn,saiwing-yeung/scikit-learn,AIML/scikit-learn,vibhorag/scikit-learn,RomainBrault/scikit-learn,hitszxp/scikit-learn,wzbozon/scikit-learn,zhenv5/scikit-learn,Lawrence-Liu/scikit-learn
|
ENH: Add a benchmark for ward
Comparing the scikits and scipy
|
"""
Bench the scikit's ward implement compared to scipy's
"""
import time
import numpy as np
from scipy.cluster import hierarchy
import pylab as pl
from scikits.learn.cluster import Ward
ward = Ward(n_clusters=15)
n_samples = np.logspace(.5, 2, 9)
n_features = np.logspace(1, 3.5, 7)
N_samples, N_features = np.meshgrid(n_samples,
n_features)
scikits_time = np.zeros(N_samples.shape)
scipy_time = np.zeros(N_samples.shape)
for i, n in enumerate(n_samples):
for j, p in enumerate(n_features):
X = np.random.normal(size=(n, p))
t0 = time.time()
ward.fit(X)
scikits_time[j, i] = time.time() - t0
t0 = time.time()
hierarchy.ward(X.T)
scipy_time[j, i] = time.time() - t0
ratio = scikits_time/scipy_time
pl.clf()
pl.imshow(np.log(ratio), aspect='auto', origin="lower")
pl.colorbar()
pl.contour(ratio, levels=[1, ], colors='k')
pl.yticks(range(len(n_features)), n_features.astype(np.int))
pl.ylabel('N features')
pl.xticks(range(len(n_samples)), n_samples.astype(np.int))
pl.xlabel('N samples')
pl.title("Scikit's time, in units of scipy time (log)")
pl.show()
|
<commit_before><commit_msg>ENH: Add a benchmark for ward
Comparing the scikits and scipy<commit_after>
|
"""
Bench the scikit's ward implement compared to scipy's
"""
import time
import numpy as np
from scipy.cluster import hierarchy
import pylab as pl
from scikits.learn.cluster import Ward
ward = Ward(n_clusters=15)
n_samples = np.logspace(.5, 2, 9)
n_features = np.logspace(1, 3.5, 7)
N_samples, N_features = np.meshgrid(n_samples,
n_features)
scikits_time = np.zeros(N_samples.shape)
scipy_time = np.zeros(N_samples.shape)
for i, n in enumerate(n_samples):
for j, p in enumerate(n_features):
X = np.random.normal(size=(n, p))
t0 = time.time()
ward.fit(X)
scikits_time[j, i] = time.time() - t0
t0 = time.time()
hierarchy.ward(X.T)
scipy_time[j, i] = time.time() - t0
ratio = scikits_time/scipy_time
pl.clf()
pl.imshow(np.log(ratio), aspect='auto', origin="lower")
pl.colorbar()
pl.contour(ratio, levels=[1, ], colors='k')
pl.yticks(range(len(n_features)), n_features.astype(np.int))
pl.ylabel('N features')
pl.xticks(range(len(n_samples)), n_samples.astype(np.int))
pl.xlabel('N samples')
pl.title("Scikit's time, in units of scipy time (log)")
pl.show()
|
ENH: Add a benchmark for ward
Comparing the scikits and scipy"""
Bench the scikit's ward implement compared to scipy's
"""
import time
import numpy as np
from scipy.cluster import hierarchy
import pylab as pl
from scikits.learn.cluster import Ward
ward = Ward(n_clusters=15)
n_samples = np.logspace(.5, 2, 9)
n_features = np.logspace(1, 3.5, 7)
N_samples, N_features = np.meshgrid(n_samples,
n_features)
scikits_time = np.zeros(N_samples.shape)
scipy_time = np.zeros(N_samples.shape)
for i, n in enumerate(n_samples):
for j, p in enumerate(n_features):
X = np.random.normal(size=(n, p))
t0 = time.time()
ward.fit(X)
scikits_time[j, i] = time.time() - t0
t0 = time.time()
hierarchy.ward(X.T)
scipy_time[j, i] = time.time() - t0
ratio = scikits_time/scipy_time
pl.clf()
pl.imshow(np.log(ratio), aspect='auto', origin="lower")
pl.colorbar()
pl.contour(ratio, levels=[1, ], colors='k')
pl.yticks(range(len(n_features)), n_features.astype(np.int))
pl.ylabel('N features')
pl.xticks(range(len(n_samples)), n_samples.astype(np.int))
pl.xlabel('N samples')
pl.title("Scikit's time, in units of scipy time (log)")
pl.show()
|
<commit_before><commit_msg>ENH: Add a benchmark for ward
Comparing the scikits and scipy<commit_after>"""
Bench the scikit's ward implement compared to scipy's
"""
import time
import numpy as np
from scipy.cluster import hierarchy
import pylab as pl
from scikits.learn.cluster import Ward
ward = Ward(n_clusters=15)
n_samples = np.logspace(.5, 2, 9)
n_features = np.logspace(1, 3.5, 7)
N_samples, N_features = np.meshgrid(n_samples,
n_features)
scikits_time = np.zeros(N_samples.shape)
scipy_time = np.zeros(N_samples.shape)
for i, n in enumerate(n_samples):
for j, p in enumerate(n_features):
X = np.random.normal(size=(n, p))
t0 = time.time()
ward.fit(X)
scikits_time[j, i] = time.time() - t0
t0 = time.time()
hierarchy.ward(X.T)
scipy_time[j, i] = time.time() - t0
ratio = scikits_time/scipy_time
pl.clf()
pl.imshow(np.log(ratio), aspect='auto', origin="lower")
pl.colorbar()
pl.contour(ratio, levels=[1, ], colors='k')
pl.yticks(range(len(n_features)), n_features.astype(np.int))
pl.ylabel('N features')
pl.xticks(range(len(n_samples)), n_samples.astype(np.int))
pl.xlabel('N samples')
pl.title("Scikit's time, in units of scipy time (log)")
pl.show()
|
|
63eb9f3dc26d33c4f6113011428a45b703a1886e
|
axelrod/__init__.py
|
axelrod/__init__.py
|
from __future__ import absolute_import
# The order of imports matters!
from .random_ import random_choice
from .plot import Plot
from .game import DefaultGame, Game
from .player import is_basic, is_cheater, update_histories, Player
from .mock_player import MockPlayer, simulate_play
from .round_robin import RoundRobin
from .strategies import *
from .tournament import Tournament
from .tournament_manager import TournamentManager
from .tournament_manager_factory import TournamentManagerFactory
from .result_set import ResultSet
from .ecosystem import Ecosystem
|
from __future__ import absolute_import
# The order of imports matters!
from .random_ import random_choice
from .plot import Plot
from .game import DefaultGame, Game
from .player import is_basic, is_cheater, update_histories, Player
from .mock_player import MockPlayer, simulate_play
from .round_robin import RoundRobin
from .strategies import *
from .tournament import Tournament
from .tournament_manager import TournamentManager
from .tournament_manager_factory import TournamentManagerFactory
from .result_set import ResultSet
from .ecosystem import Ecosystem
from .utils import run_tournaments, setup_logging
|
Fix implicit imports for tournament repo
|
Fix implicit imports for tournament repo
|
Python
|
mit
|
ranjinidas/Axelrod,ranjinidas/Axelrod,marcharper/Axelrod,marcharper/Axelrod
|
from __future__ import absolute_import
# The order of imports matters!
from .random_ import random_choice
from .plot import Plot
from .game import DefaultGame, Game
from .player import is_basic, is_cheater, update_histories, Player
from .mock_player import MockPlayer, simulate_play
from .round_robin import RoundRobin
from .strategies import *
from .tournament import Tournament
from .tournament_manager import TournamentManager
from .tournament_manager_factory import TournamentManagerFactory
from .result_set import ResultSet
from .ecosystem import Ecosystem
Fix implicit imports for tournament repo
|
from __future__ import absolute_import
# The order of imports matters!
from .random_ import random_choice
from .plot import Plot
from .game import DefaultGame, Game
from .player import is_basic, is_cheater, update_histories, Player
from .mock_player import MockPlayer, simulate_play
from .round_robin import RoundRobin
from .strategies import *
from .tournament import Tournament
from .tournament_manager import TournamentManager
from .tournament_manager_factory import TournamentManagerFactory
from .result_set import ResultSet
from .ecosystem import Ecosystem
from .utils import run_tournaments, setup_logging
|
<commit_before>from __future__ import absolute_import
# The order of imports matters!
from .random_ import random_choice
from .plot import Plot
from .game import DefaultGame, Game
from .player import is_basic, is_cheater, update_histories, Player
from .mock_player import MockPlayer, simulate_play
from .round_robin import RoundRobin
from .strategies import *
from .tournament import Tournament
from .tournament_manager import TournamentManager
from .tournament_manager_factory import TournamentManagerFactory
from .result_set import ResultSet
from .ecosystem import Ecosystem
<commit_msg>Fix implicit imports for tournament repo<commit_after>
|
from __future__ import absolute_import
# The order of imports matters!
from .random_ import random_choice
from .plot import Plot
from .game import DefaultGame, Game
from .player import is_basic, is_cheater, update_histories, Player
from .mock_player import MockPlayer, simulate_play
from .round_robin import RoundRobin
from .strategies import *
from .tournament import Tournament
from .tournament_manager import TournamentManager
from .tournament_manager_factory import TournamentManagerFactory
from .result_set import ResultSet
from .ecosystem import Ecosystem
from .utils import run_tournaments, setup_logging
|
from __future__ import absolute_import
# The order of imports matters!
from .random_ import random_choice
from .plot import Plot
from .game import DefaultGame, Game
from .player import is_basic, is_cheater, update_histories, Player
from .mock_player import MockPlayer, simulate_play
from .round_robin import RoundRobin
from .strategies import *
from .tournament import Tournament
from .tournament_manager import TournamentManager
from .tournament_manager_factory import TournamentManagerFactory
from .result_set import ResultSet
from .ecosystem import Ecosystem
Fix implicit imports for tournament repofrom __future__ import absolute_import
# The order of imports matters!
from .random_ import random_choice
from .plot import Plot
from .game import DefaultGame, Game
from .player import is_basic, is_cheater, update_histories, Player
from .mock_player import MockPlayer, simulate_play
from .round_robin import RoundRobin
from .strategies import *
from .tournament import Tournament
from .tournament_manager import TournamentManager
from .tournament_manager_factory import TournamentManagerFactory
from .result_set import ResultSet
from .ecosystem import Ecosystem
from .utils import run_tournaments, setup_logging
|
<commit_before>from __future__ import absolute_import
# The order of imports matters!
from .random_ import random_choice
from .plot import Plot
from .game import DefaultGame, Game
from .player import is_basic, is_cheater, update_histories, Player
from .mock_player import MockPlayer, simulate_play
from .round_robin import RoundRobin
from .strategies import *
from .tournament import Tournament
from .tournament_manager import TournamentManager
from .tournament_manager_factory import TournamentManagerFactory
from .result_set import ResultSet
from .ecosystem import Ecosystem
<commit_msg>Fix implicit imports for tournament repo<commit_after>from __future__ import absolute_import
# The order of imports matters!
from .random_ import random_choice
from .plot import Plot
from .game import DefaultGame, Game
from .player import is_basic, is_cheater, update_histories, Player
from .mock_player import MockPlayer, simulate_play
from .round_robin import RoundRobin
from .strategies import *
from .tournament import Tournament
from .tournament_manager import TournamentManager
from .tournament_manager_factory import TournamentManagerFactory
from .result_set import ResultSet
from .ecosystem import Ecosystem
from .utils import run_tournaments, setup_logging
|
24bb3d632561794b0785dad0977b58bb28a5273e
|
better_raw_modes.py
|
better_raw_modes.py
|
import hexchat
try:
from .util import no_recursion
except SystemError:
# Add addons path to sys.path for win32
# See https://github.com/hexchat/hexchat/issues/1396
import os
import sys
if sys.platform == "win32":
addons_path = os.path.join(hexchat.get_info("configdir"), "addons")
if addons_path not in sys.path:
sys.path.append(addons_path)
from util import no_recursion
###############################################################################
__module_name__ = "Better Raw Modes"
__module_author__ = "FichteFoll"
__module_version__ = "0.1.0"
__module_description__ = "Enhances display of the 'Raw Modes' text event"
@no_recursion
def raw_modes_cb(word, word_eol, event):
"""Transforms mode messages to remove redundant information.
Self-modes get ":" prepended.
['FichteFoll', '#channel +o FichteFoll'] => ['FichteFoll', '+o FichteFoll']
['FichteFoll', 'FichteFoll :+Tix'] => ['FichteFoll', ':+Tix']
['FichteFoll', 'FichteFoll +Tix'] => ['FichteFoll', ':+Tix']
"""
mode_args = word[1].split()
if mode_args[0] == hexchat.get_info('channel'):
del mode_args[0]
elif mode_args[0] == hexchat.get_info('nick'):
mode_args[1] = ":" + mode_args[1].lstrip(":")
del mode_args[0]
else:
return hexchat.EAT_NONE
hexchat.emit_print(event, word[0], " ".join(mode_args))
return hexchat.EAT_HEXCHAT
def main():
hexchat.hook_print('Raw Modes', raw_modes_cb, 'Raw Modes')
hexchat.get_info("")
print(__module_name__, __module_version__, "loaded")
if __name__ == '__main__':
main()
|
Add raw mode hooker that reduces redundant stuff
|
Add raw mode hooker that reduces redundant stuff
|
Python
|
mit
|
FichteFoll/hexchat-addons
|
Add raw mode hooker that reduces redundant stuff
|
import hexchat
try:
from .util import no_recursion
except SystemError:
# Add addons path to sys.path for win32
# See https://github.com/hexchat/hexchat/issues/1396
import os
import sys
if sys.platform == "win32":
addons_path = os.path.join(hexchat.get_info("configdir"), "addons")
if addons_path not in sys.path:
sys.path.append(addons_path)
from util import no_recursion
###############################################################################
__module_name__ = "Better Raw Modes"
__module_author__ = "FichteFoll"
__module_version__ = "0.1.0"
__module_description__ = "Enhances display of the 'Raw Modes' text event"
@no_recursion
def raw_modes_cb(word, word_eol, event):
"""Transforms mode messages to remove redundant information.
Self-modes get ":" prepended.
['FichteFoll', '#channel +o FichteFoll'] => ['FichteFoll', '+o FichteFoll']
['FichteFoll', 'FichteFoll :+Tix'] => ['FichteFoll', ':+Tix']
['FichteFoll', 'FichteFoll +Tix'] => ['FichteFoll', ':+Tix']
"""
mode_args = word[1].split()
if mode_args[0] == hexchat.get_info('channel'):
del mode_args[0]
elif mode_args[0] == hexchat.get_info('nick'):
mode_args[1] = ":" + mode_args[1].lstrip(":")
del mode_args[0]
else:
return hexchat.EAT_NONE
hexchat.emit_print(event, word[0], " ".join(mode_args))
return hexchat.EAT_HEXCHAT
def main():
hexchat.hook_print('Raw Modes', raw_modes_cb, 'Raw Modes')
hexchat.get_info("")
print(__module_name__, __module_version__, "loaded")
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add raw mode hooker that reduces redundant stuff<commit_after>
|
import hexchat
try:
from .util import no_recursion
except SystemError:
# Add addons path to sys.path for win32
# See https://github.com/hexchat/hexchat/issues/1396
import os
import sys
if sys.platform == "win32":
addons_path = os.path.join(hexchat.get_info("configdir"), "addons")
if addons_path not in sys.path:
sys.path.append(addons_path)
from util import no_recursion
###############################################################################
__module_name__ = "Better Raw Modes"
__module_author__ = "FichteFoll"
__module_version__ = "0.1.0"
__module_description__ = "Enhances display of the 'Raw Modes' text event"
@no_recursion
def raw_modes_cb(word, word_eol, event):
"""Transforms mode messages to remove redundant information.
Self-modes get ":" prepended.
['FichteFoll', '#channel +o FichteFoll'] => ['FichteFoll', '+o FichteFoll']
['FichteFoll', 'FichteFoll :+Tix'] => ['FichteFoll', ':+Tix']
['FichteFoll', 'FichteFoll +Tix'] => ['FichteFoll', ':+Tix']
"""
mode_args = word[1].split()
if mode_args[0] == hexchat.get_info('channel'):
del mode_args[0]
elif mode_args[0] == hexchat.get_info('nick'):
mode_args[1] = ":" + mode_args[1].lstrip(":")
del mode_args[0]
else:
return hexchat.EAT_NONE
hexchat.emit_print(event, word[0], " ".join(mode_args))
return hexchat.EAT_HEXCHAT
def main():
hexchat.hook_print('Raw Modes', raw_modes_cb, 'Raw Modes')
hexchat.get_info("")
print(__module_name__, __module_version__, "loaded")
if __name__ == '__main__':
main()
|
Add raw mode hooker that reduces redundant stuffimport hexchat
try:
from .util import no_recursion
except SystemError:
# Add addons path to sys.path for win32
# See https://github.com/hexchat/hexchat/issues/1396
import os
import sys
if sys.platform == "win32":
addons_path = os.path.join(hexchat.get_info("configdir"), "addons")
if addons_path not in sys.path:
sys.path.append(addons_path)
from util import no_recursion
###############################################################################
__module_name__ = "Better Raw Modes"
__module_author__ = "FichteFoll"
__module_version__ = "0.1.0"
__module_description__ = "Enhances display of the 'Raw Modes' text event"
@no_recursion
def raw_modes_cb(word, word_eol, event):
"""Transforms mode messages to remove redundant information.
Self-modes get ":" prepended.
['FichteFoll', '#channel +o FichteFoll'] => ['FichteFoll', '+o FichteFoll']
['FichteFoll', 'FichteFoll :+Tix'] => ['FichteFoll', ':+Tix']
['FichteFoll', 'FichteFoll +Tix'] => ['FichteFoll', ':+Tix']
"""
mode_args = word[1].split()
if mode_args[0] == hexchat.get_info('channel'):
del mode_args[0]
elif mode_args[0] == hexchat.get_info('nick'):
mode_args[1] = ":" + mode_args[1].lstrip(":")
del mode_args[0]
else:
return hexchat.EAT_NONE
hexchat.emit_print(event, word[0], " ".join(mode_args))
return hexchat.EAT_HEXCHAT
def main():
hexchat.hook_print('Raw Modes', raw_modes_cb, 'Raw Modes')
hexchat.get_info("")
print(__module_name__, __module_version__, "loaded")
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add raw mode hooker that reduces redundant stuff<commit_after>import hexchat
try:
from .util import no_recursion
except SystemError:
# Add addons path to sys.path for win32
# See https://github.com/hexchat/hexchat/issues/1396
import os
import sys
if sys.platform == "win32":
addons_path = os.path.join(hexchat.get_info("configdir"), "addons")
if addons_path not in sys.path:
sys.path.append(addons_path)
from util import no_recursion
###############################################################################
__module_name__ = "Better Raw Modes"
__module_author__ = "FichteFoll"
__module_version__ = "0.1.0"
__module_description__ = "Enhances display of the 'Raw Modes' text event"
@no_recursion
def raw_modes_cb(word, word_eol, event):
"""Transforms mode messages to remove redundant information.
Self-modes get ":" prepended.
['FichteFoll', '#channel +o FichteFoll'] => ['FichteFoll', '+o FichteFoll']
['FichteFoll', 'FichteFoll :+Tix'] => ['FichteFoll', ':+Tix']
['FichteFoll', 'FichteFoll +Tix'] => ['FichteFoll', ':+Tix']
"""
mode_args = word[1].split()
if mode_args[0] == hexchat.get_info('channel'):
del mode_args[0]
elif mode_args[0] == hexchat.get_info('nick'):
mode_args[1] = ":" + mode_args[1].lstrip(":")
del mode_args[0]
else:
return hexchat.EAT_NONE
hexchat.emit_print(event, word[0], " ".join(mode_args))
return hexchat.EAT_HEXCHAT
def main():
hexchat.hook_print('Raw Modes', raw_modes_cb, 'Raw Modes')
hexchat.get_info("")
print(__module_name__, __module_version__, "loaded")
if __name__ == '__main__':
main()
|
|
5bc1544f23542f2187448512fedad2deafe4966d
|
parse_stuff.py
|
parse_stuff.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from parsers.main_parser import MainParser
if __name__ == '__main__':
src = r"D:\nhl\official_and_json\2016-17\2017-02\2017-02-01.zip"
# src = r"D:\nhl\official_and_json\2016-17\2017-02\2017-02-20"
# src = r"D:\nhl\official_and_json\_2015-16\2016-05\2016-05-09.zip"
src_dir = r"D:\nhl\official_and_json\_2014-15\2014-12"
src_dir = r"D:\nhl\official_and_json\2016-17\2016-10"
src_dir = r"D:\nhl\official_and_json\_2015-16\2016-04"
files = os.listdir(src_dir)
files = [src]
# for f in files[17:18]:
for f in files[:1]:
print(f)
if not os.path.splitext(f)[-1].lower().endswith(".zip"):
continue
src = os.path.join(src_dir, f)
mp = MainParser(src)
print(mp.game_ids)
for game_id in mp.game_ids[:1]:
mp.parse_single_game(game_id)
|
Add initial version of parsing application
|
Add initial version of parsing application
|
Python
|
mit
|
leaffan/pynhldb
|
Add initial version of parsing application
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from parsers.main_parser import MainParser
if __name__ == '__main__':
src = r"D:\nhl\official_and_json\2016-17\2017-02\2017-02-01.zip"
# src = r"D:\nhl\official_and_json\2016-17\2017-02\2017-02-20"
# src = r"D:\nhl\official_and_json\_2015-16\2016-05\2016-05-09.zip"
src_dir = r"D:\nhl\official_and_json\_2014-15\2014-12"
src_dir = r"D:\nhl\official_and_json\2016-17\2016-10"
src_dir = r"D:\nhl\official_and_json\_2015-16\2016-04"
files = os.listdir(src_dir)
files = [src]
# for f in files[17:18]:
for f in files[:1]:
print(f)
if not os.path.splitext(f)[-1].lower().endswith(".zip"):
continue
src = os.path.join(src_dir, f)
mp = MainParser(src)
print(mp.game_ids)
for game_id in mp.game_ids[:1]:
mp.parse_single_game(game_id)
|
<commit_before><commit_msg>Add initial version of parsing application<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from parsers.main_parser import MainParser
if __name__ == '__main__':
src = r"D:\nhl\official_and_json\2016-17\2017-02\2017-02-01.zip"
# src = r"D:\nhl\official_and_json\2016-17\2017-02\2017-02-20"
# src = r"D:\nhl\official_and_json\_2015-16\2016-05\2016-05-09.zip"
src_dir = r"D:\nhl\official_and_json\_2014-15\2014-12"
src_dir = r"D:\nhl\official_and_json\2016-17\2016-10"
src_dir = r"D:\nhl\official_and_json\_2015-16\2016-04"
files = os.listdir(src_dir)
files = [src]
# for f in files[17:18]:
for f in files[:1]:
print(f)
if not os.path.splitext(f)[-1].lower().endswith(".zip"):
continue
src = os.path.join(src_dir, f)
mp = MainParser(src)
print(mp.game_ids)
for game_id in mp.game_ids[:1]:
mp.parse_single_game(game_id)
|
Add initial version of parsing application#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from parsers.main_parser import MainParser
if __name__ == '__main__':
src = r"D:\nhl\official_and_json\2016-17\2017-02\2017-02-01.zip"
# src = r"D:\nhl\official_and_json\2016-17\2017-02\2017-02-20"
# src = r"D:\nhl\official_and_json\_2015-16\2016-05\2016-05-09.zip"
src_dir = r"D:\nhl\official_and_json\_2014-15\2014-12"
src_dir = r"D:\nhl\official_and_json\2016-17\2016-10"
src_dir = r"D:\nhl\official_and_json\_2015-16\2016-04"
files = os.listdir(src_dir)
files = [src]
# for f in files[17:18]:
for f in files[:1]:
print(f)
if not os.path.splitext(f)[-1].lower().endswith(".zip"):
continue
src = os.path.join(src_dir, f)
mp = MainParser(src)
print(mp.game_ids)
for game_id in mp.game_ids[:1]:
mp.parse_single_game(game_id)
|
<commit_before><commit_msg>Add initial version of parsing application<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from parsers.main_parser import MainParser
if __name__ == '__main__':
src = r"D:\nhl\official_and_json\2016-17\2017-02\2017-02-01.zip"
# src = r"D:\nhl\official_and_json\2016-17\2017-02\2017-02-20"
# src = r"D:\nhl\official_and_json\_2015-16\2016-05\2016-05-09.zip"
src_dir = r"D:\nhl\official_and_json\_2014-15\2014-12"
src_dir = r"D:\nhl\official_and_json\2016-17\2016-10"
src_dir = r"D:\nhl\official_and_json\_2015-16\2016-04"
files = os.listdir(src_dir)
files = [src]
# for f in files[17:18]:
for f in files[:1]:
print(f)
if not os.path.splitext(f)[-1].lower().endswith(".zip"):
continue
src = os.path.join(src_dir, f)
mp = MainParser(src)
print(mp.game_ids)
for game_id in mp.game_ids[:1]:
mp.parse_single_game(game_id)
|
|
9137431ef3d57363bbf6e9a5912d4ca5399c08c0
|
control/test/test_heading_filter.py
|
control/test/test_heading_filter.py
|
"""Tests the heading Kalman Filter."""
import math
import numpy
import operator
import random
import unittest
from heading_filter import HeadingFilter
#pylint: disable=protected-access
#pylint: disable=too-many-public-methods
class TestHeadingFilter(unittest.TestCase):
"""Tests the heading Kalman filter."""
def test_multiply(self):
"""Test the matrix multiply method."""
with self.assertRaises(TypeError):
HeadingFilter._multiply(0, 0)
with self.assertRaises(ValueError):
HeadingFilter._multiply(
[[1, 1]],
[[1, 1]]
)
with self.assertRaises(ValueError):
HeadingFilter._multiply(
[[1, 1]],
[[1, 1], [1, 1], [1, 1]]
)
self.assertEqual(
HeadingFilter._multiply(
[[1, 2]],
[[2, 3],
[5, 8]]
),
[[2 + 10, 3 + 16]]
)
self.assertEqual(
HeadingFilter._multiply(
[[1, 2, 4],
[3, 7, 8]],
[[2, 0, 1, 4, 6],
[1, 1, 1, 1, 1],
[5, 3, 8, 9, 7]]
),
[[24, 14, 35, 42, 36],
[53, 31, 74, 91, 81]]
)
def test_add(self):
"""test the matrix addition method."""
with self.assertRaises(TypeError):
HeadingFilter._add(0, 0)
with self.assertRaises(ValueError):
HeadingFilter._add(
[[1, 1, 1]],
[[1, 1]]
)
with self.assertRaises(ValueError):
HeadingFilter._add(
[[1, 1]],
[[1, 1], [1, 1], [1, 1]]
)
self.assertEqual(
HeadingFilter._add(
[[1, 2]],
[[3, 0]],
),
[[4, 2]]
)
self.assertEqual(
HeadingFilter._add(
[[1, 2],
[3, 0]],
[[3, 0],
[4, 1]]
),
[[4, 2],
[7, 1]]
)
def test_inverse(self):
foo = [[2, 3],
[1, 4]]
identity = [[1, 0],
[0, 1]]
print(HeadingFilter._inverse(foo))
self.assertEqual(
HeadingFilter._multiply(
foo,
HeadingFilter._inverse(foo)
),
identity
)
self.assertEqual(
HeadingFilter._multiply(
HeadingFilter._inverse(foo),
foo
),
identity
)
|
Add Kalman filter for heading
|
Add Kalman filter for heading
|
Python
|
mit
|
bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc
|
Add Kalman filter for heading
|
"""Tests the heading Kalman Filter."""
import math
import numpy
import operator
import random
import unittest
from heading_filter import HeadingFilter
#pylint: disable=protected-access
#pylint: disable=too-many-public-methods
class TestHeadingFilter(unittest.TestCase):
"""Tests the heading Kalman filter."""
def test_multiply(self):
"""Test the matrix multiply method."""
with self.assertRaises(TypeError):
HeadingFilter._multiply(0, 0)
with self.assertRaises(ValueError):
HeadingFilter._multiply(
[[1, 1]],
[[1, 1]]
)
with self.assertRaises(ValueError):
HeadingFilter._multiply(
[[1, 1]],
[[1, 1], [1, 1], [1, 1]]
)
self.assertEqual(
HeadingFilter._multiply(
[[1, 2]],
[[2, 3],
[5, 8]]
),
[[2 + 10, 3 + 16]]
)
self.assertEqual(
HeadingFilter._multiply(
[[1, 2, 4],
[3, 7, 8]],
[[2, 0, 1, 4, 6],
[1, 1, 1, 1, 1],
[5, 3, 8, 9, 7]]
),
[[24, 14, 35, 42, 36],
[53, 31, 74, 91, 81]]
)
def test_add(self):
"""test the matrix addition method."""
with self.assertRaises(TypeError):
HeadingFilter._add(0, 0)
with self.assertRaises(ValueError):
HeadingFilter._add(
[[1, 1, 1]],
[[1, 1]]
)
with self.assertRaises(ValueError):
HeadingFilter._add(
[[1, 1]],
[[1, 1], [1, 1], [1, 1]]
)
self.assertEqual(
HeadingFilter._add(
[[1, 2]],
[[3, 0]],
),
[[4, 2]]
)
self.assertEqual(
HeadingFilter._add(
[[1, 2],
[3, 0]],
[[3, 0],
[4, 1]]
),
[[4, 2],
[7, 1]]
)
def test_inverse(self):
foo = [[2, 3],
[1, 4]]
identity = [[1, 0],
[0, 1]]
print(HeadingFilter._inverse(foo))
self.assertEqual(
HeadingFilter._multiply(
foo,
HeadingFilter._inverse(foo)
),
identity
)
self.assertEqual(
HeadingFilter._multiply(
HeadingFilter._inverse(foo),
foo
),
identity
)
|
<commit_before><commit_msg>Add Kalman filter for heading<commit_after>
|
"""Tests the heading Kalman Filter."""
import math
import numpy
import operator
import random
import unittest
from heading_filter import HeadingFilter
#pylint: disable=protected-access
#pylint: disable=too-many-public-methods
class TestHeadingFilter(unittest.TestCase):
"""Tests the heading Kalman filter."""
def test_multiply(self):
"""Test the matrix multiply method."""
with self.assertRaises(TypeError):
HeadingFilter._multiply(0, 0)
with self.assertRaises(ValueError):
HeadingFilter._multiply(
[[1, 1]],
[[1, 1]]
)
with self.assertRaises(ValueError):
HeadingFilter._multiply(
[[1, 1]],
[[1, 1], [1, 1], [1, 1]]
)
self.assertEqual(
HeadingFilter._multiply(
[[1, 2]],
[[2, 3],
[5, 8]]
),
[[2 + 10, 3 + 16]]
)
self.assertEqual(
HeadingFilter._multiply(
[[1, 2, 4],
[3, 7, 8]],
[[2, 0, 1, 4, 6],
[1, 1, 1, 1, 1],
[5, 3, 8, 9, 7]]
),
[[24, 14, 35, 42, 36],
[53, 31, 74, 91, 81]]
)
def test_add(self):
"""test the matrix addition method."""
with self.assertRaises(TypeError):
HeadingFilter._add(0, 0)
with self.assertRaises(ValueError):
HeadingFilter._add(
[[1, 1, 1]],
[[1, 1]]
)
with self.assertRaises(ValueError):
HeadingFilter._add(
[[1, 1]],
[[1, 1], [1, 1], [1, 1]]
)
self.assertEqual(
HeadingFilter._add(
[[1, 2]],
[[3, 0]],
),
[[4, 2]]
)
self.assertEqual(
HeadingFilter._add(
[[1, 2],
[3, 0]],
[[3, 0],
[4, 1]]
),
[[4, 2],
[7, 1]]
)
def test_inverse(self):
foo = [[2, 3],
[1, 4]]
identity = [[1, 0],
[0, 1]]
print(HeadingFilter._inverse(foo))
self.assertEqual(
HeadingFilter._multiply(
foo,
HeadingFilter._inverse(foo)
),
identity
)
self.assertEqual(
HeadingFilter._multiply(
HeadingFilter._inverse(foo),
foo
),
identity
)
|
Add Kalman filter for heading"""Tests the heading Kalman Filter."""
import math
import numpy
import operator
import random
import unittest
from heading_filter import HeadingFilter
#pylint: disable=protected-access
#pylint: disable=too-many-public-methods
class TestHeadingFilter(unittest.TestCase):
"""Tests the heading Kalman filter."""
def test_multiply(self):
"""Test the matrix multiply method."""
with self.assertRaises(TypeError):
HeadingFilter._multiply(0, 0)
with self.assertRaises(ValueError):
HeadingFilter._multiply(
[[1, 1]],
[[1, 1]]
)
with self.assertRaises(ValueError):
HeadingFilter._multiply(
[[1, 1]],
[[1, 1], [1, 1], [1, 1]]
)
self.assertEqual(
HeadingFilter._multiply(
[[1, 2]],
[[2, 3],
[5, 8]]
),
[[2 + 10, 3 + 16]]
)
self.assertEqual(
HeadingFilter._multiply(
[[1, 2, 4],
[3, 7, 8]],
[[2, 0, 1, 4, 6],
[1, 1, 1, 1, 1],
[5, 3, 8, 9, 7]]
),
[[24, 14, 35, 42, 36],
[53, 31, 74, 91, 81]]
)
def test_add(self):
"""test the matrix addition method."""
with self.assertRaises(TypeError):
HeadingFilter._add(0, 0)
with self.assertRaises(ValueError):
HeadingFilter._add(
[[1, 1, 1]],
[[1, 1]]
)
with self.assertRaises(ValueError):
HeadingFilter._add(
[[1, 1]],
[[1, 1], [1, 1], [1, 1]]
)
self.assertEqual(
HeadingFilter._add(
[[1, 2]],
[[3, 0]],
),
[[4, 2]]
)
self.assertEqual(
HeadingFilter._add(
[[1, 2],
[3, 0]],
[[3, 0],
[4, 1]]
),
[[4, 2],
[7, 1]]
)
def test_inverse(self):
foo = [[2, 3],
[1, 4]]
identity = [[1, 0],
[0, 1]]
print(HeadingFilter._inverse(foo))
self.assertEqual(
HeadingFilter._multiply(
foo,
HeadingFilter._inverse(foo)
),
identity
)
self.assertEqual(
HeadingFilter._multiply(
HeadingFilter._inverse(foo),
foo
),
identity
)
|
<commit_before><commit_msg>Add Kalman filter for heading<commit_after>"""Tests the heading Kalman Filter."""
import math
import numpy
import operator
import random
import unittest
from heading_filter import HeadingFilter
#pylint: disable=protected-access
#pylint: disable=too-many-public-methods
class TestHeadingFilter(unittest.TestCase):
"""Tests the heading Kalman filter."""
def test_multiply(self):
"""Test the matrix multiply method."""
with self.assertRaises(TypeError):
HeadingFilter._multiply(0, 0)
with self.assertRaises(ValueError):
HeadingFilter._multiply(
[[1, 1]],
[[1, 1]]
)
with self.assertRaises(ValueError):
HeadingFilter._multiply(
[[1, 1]],
[[1, 1], [1, 1], [1, 1]]
)
self.assertEqual(
HeadingFilter._multiply(
[[1, 2]],
[[2, 3],
[5, 8]]
),
[[2 + 10, 3 + 16]]
)
self.assertEqual(
HeadingFilter._multiply(
[[1, 2, 4],
[3, 7, 8]],
[[2, 0, 1, 4, 6],
[1, 1, 1, 1, 1],
[5, 3, 8, 9, 7]]
),
[[24, 14, 35, 42, 36],
[53, 31, 74, 91, 81]]
)
def test_add(self):
"""test the matrix addition method."""
with self.assertRaises(TypeError):
HeadingFilter._add(0, 0)
with self.assertRaises(ValueError):
HeadingFilter._add(
[[1, 1, 1]],
[[1, 1]]
)
with self.assertRaises(ValueError):
HeadingFilter._add(
[[1, 1]],
[[1, 1], [1, 1], [1, 1]]
)
self.assertEqual(
HeadingFilter._add(
[[1, 2]],
[[3, 0]],
),
[[4, 2]]
)
self.assertEqual(
HeadingFilter._add(
[[1, 2],
[3, 0]],
[[3, 0],
[4, 1]]
),
[[4, 2],
[7, 1]]
)
def test_inverse(self):
foo = [[2, 3],
[1, 4]]
identity = [[1, 0],
[0, 1]]
print(HeadingFilter._inverse(foo))
self.assertEqual(
HeadingFilter._multiply(
foo,
HeadingFilter._inverse(foo)
),
identity
)
self.assertEqual(
HeadingFilter._multiply(
HeadingFilter._inverse(foo),
foo
),
identity
)
|
|
af7d9c137f198d6ab11da134d4cfe3b07dd0438c
|
nutsurv/dashboard/migrations/0006_auto_20150413_0901.py
|
nutsurv/dashboard/migrations/0006_auto_20150413_0901.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import dashboard.models
class Migration(migrations.Migration):
dependencies = [
('dashboard', '0005_auto_20150409_1509'),
]
operations = [
migrations.CreateModel(
name='HouseholdMember',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('gender', models.CharField(max_length=1, choices=[(b'M', b'Male'), (b'F', b'Female'), (b'O', b'Other')])),
('firstName', models.TextField()),
('index', models.SmallIntegerField()),
('muac', models.SmallIntegerField(null=True)),
('birthdate', models.DateField()),
('weight', models.FloatField(null=True)),
('height', models.FloatField(null=True)),
('edema', models.NullBooleanField()),
('household_survey', models.ForeignKey(related_name='members', to='dashboard.HouseholdSurveyJSON')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='householdsurveyjson',
name='household_number',
field=models.SmallIntegerField(default=1),
preserve_default=False,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='json',
field=jsonfield.fields.JSONField(help_text=b'A JSON document containing data acquired from one household. Typically not edited here but uploaded from a mobile application used by a team of surveyors in the field. If in doubt, do not edit.', validators=[dashboard.models.validate_json]),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_anthropometrist',
field=models.ForeignKey(related_name='householdsurveyjson_surveys_as_team_anthropometrist', to='dashboard.TeamMember'),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_assistant',
field=models.ForeignKey(related_name='householdsurveyjson_surveys_as_team_assistant', to='dashboard.TeamMember'),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_lead',
field=models.ForeignKey(related_name='householdsurveyjson_as_team_lead', to='dashboard.TeamMember'),
preserve_default=True,
),
]
|
Add migrations for training module prep
|
Add migrations for training module prep
|
Python
|
agpl-3.0
|
johanneswilm/eha-nutsurv-django,eHealthAfrica/nutsurv,eHealthAfrica/nutsurv,eHealthAfrica/nutsurv,johanneswilm/eha-nutsurv-django,johanneswilm/eha-nutsurv-django
|
Add migrations for training module prep
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import dashboard.models
class Migration(migrations.Migration):
dependencies = [
('dashboard', '0005_auto_20150409_1509'),
]
operations = [
migrations.CreateModel(
name='HouseholdMember',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('gender', models.CharField(max_length=1, choices=[(b'M', b'Male'), (b'F', b'Female'), (b'O', b'Other')])),
('firstName', models.TextField()),
('index', models.SmallIntegerField()),
('muac', models.SmallIntegerField(null=True)),
('birthdate', models.DateField()),
('weight', models.FloatField(null=True)),
('height', models.FloatField(null=True)),
('edema', models.NullBooleanField()),
('household_survey', models.ForeignKey(related_name='members', to='dashboard.HouseholdSurveyJSON')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='householdsurveyjson',
name='household_number',
field=models.SmallIntegerField(default=1),
preserve_default=False,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='json',
field=jsonfield.fields.JSONField(help_text=b'A JSON document containing data acquired from one household. Typically not edited here but uploaded from a mobile application used by a team of surveyors in the field. If in doubt, do not edit.', validators=[dashboard.models.validate_json]),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_anthropometrist',
field=models.ForeignKey(related_name='householdsurveyjson_surveys_as_team_anthropometrist', to='dashboard.TeamMember'),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_assistant',
field=models.ForeignKey(related_name='householdsurveyjson_surveys_as_team_assistant', to='dashboard.TeamMember'),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_lead',
field=models.ForeignKey(related_name='householdsurveyjson_as_team_lead', to='dashboard.TeamMember'),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add migrations for training module prep<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import dashboard.models
class Migration(migrations.Migration):
dependencies = [
('dashboard', '0005_auto_20150409_1509'),
]
operations = [
migrations.CreateModel(
name='HouseholdMember',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('gender', models.CharField(max_length=1, choices=[(b'M', b'Male'), (b'F', b'Female'), (b'O', b'Other')])),
('firstName', models.TextField()),
('index', models.SmallIntegerField()),
('muac', models.SmallIntegerField(null=True)),
('birthdate', models.DateField()),
('weight', models.FloatField(null=True)),
('height', models.FloatField(null=True)),
('edema', models.NullBooleanField()),
('household_survey', models.ForeignKey(related_name='members', to='dashboard.HouseholdSurveyJSON')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='householdsurveyjson',
name='household_number',
field=models.SmallIntegerField(default=1),
preserve_default=False,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='json',
field=jsonfield.fields.JSONField(help_text=b'A JSON document containing data acquired from one household. Typically not edited here but uploaded from a mobile application used by a team of surveyors in the field. If in doubt, do not edit.', validators=[dashboard.models.validate_json]),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_anthropometrist',
field=models.ForeignKey(related_name='householdsurveyjson_surveys_as_team_anthropometrist', to='dashboard.TeamMember'),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_assistant',
field=models.ForeignKey(related_name='householdsurveyjson_surveys_as_team_assistant', to='dashboard.TeamMember'),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_lead',
field=models.ForeignKey(related_name='householdsurveyjson_as_team_lead', to='dashboard.TeamMember'),
preserve_default=True,
),
]
|
Add migrations for training module prep# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import dashboard.models
class Migration(migrations.Migration):
dependencies = [
('dashboard', '0005_auto_20150409_1509'),
]
operations = [
migrations.CreateModel(
name='HouseholdMember',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('gender', models.CharField(max_length=1, choices=[(b'M', b'Male'), (b'F', b'Female'), (b'O', b'Other')])),
('firstName', models.TextField()),
('index', models.SmallIntegerField()),
('muac', models.SmallIntegerField(null=True)),
('birthdate', models.DateField()),
('weight', models.FloatField(null=True)),
('height', models.FloatField(null=True)),
('edema', models.NullBooleanField()),
('household_survey', models.ForeignKey(related_name='members', to='dashboard.HouseholdSurveyJSON')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='householdsurveyjson',
name='household_number',
field=models.SmallIntegerField(default=1),
preserve_default=False,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='json',
field=jsonfield.fields.JSONField(help_text=b'A JSON document containing data acquired from one household. Typically not edited here but uploaded from a mobile application used by a team of surveyors in the field. If in doubt, do not edit.', validators=[dashboard.models.validate_json]),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_anthropometrist',
field=models.ForeignKey(related_name='householdsurveyjson_surveys_as_team_anthropometrist', to='dashboard.TeamMember'),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_assistant',
field=models.ForeignKey(related_name='householdsurveyjson_surveys_as_team_assistant', to='dashboard.TeamMember'),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_lead',
field=models.ForeignKey(related_name='householdsurveyjson_as_team_lead', to='dashboard.TeamMember'),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add migrations for training module prep<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import dashboard.models
class Migration(migrations.Migration):
dependencies = [
('dashboard', '0005_auto_20150409_1509'),
]
operations = [
migrations.CreateModel(
name='HouseholdMember',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('gender', models.CharField(max_length=1, choices=[(b'M', b'Male'), (b'F', b'Female'), (b'O', b'Other')])),
('firstName', models.TextField()),
('index', models.SmallIntegerField()),
('muac', models.SmallIntegerField(null=True)),
('birthdate', models.DateField()),
('weight', models.FloatField(null=True)),
('height', models.FloatField(null=True)),
('edema', models.NullBooleanField()),
('household_survey', models.ForeignKey(related_name='members', to='dashboard.HouseholdSurveyJSON')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='householdsurveyjson',
name='household_number',
field=models.SmallIntegerField(default=1),
preserve_default=False,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='json',
field=jsonfield.fields.JSONField(help_text=b'A JSON document containing data acquired from one household. Typically not edited here but uploaded from a mobile application used by a team of surveyors in the field. If in doubt, do not edit.', validators=[dashboard.models.validate_json]),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_anthropometrist',
field=models.ForeignKey(related_name='householdsurveyjson_surveys_as_team_anthropometrist', to='dashboard.TeamMember'),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_assistant',
field=models.ForeignKey(related_name='householdsurveyjson_surveys_as_team_assistant', to='dashboard.TeamMember'),
preserve_default=True,
),
migrations.AlterField(
model_name='householdsurveyjson',
name='team_lead',
field=models.ForeignKey(related_name='householdsurveyjson_as_team_lead', to='dashboard.TeamMember'),
preserve_default=True,
),
]
|
|
d96aefe57fab9ee2c2915ebd6f5a659be1c9cec1
|
bin_search.py
|
bin_search.py
|
"""You're going to write a binary search function.
You should use an iterative approach - meaning
using loops.
Your function should take two inputs:
a Python list to search through, and the value
you're searching for.
Assume the list only has distinct elements,
meaning there are no repeated values, and
elements are in a strictly increasing order.
Return the index of value, or -1 if the value
doesn't exist in the list."""
def binary_search(input_array, value):
"""Your code goes here."""
""" Given input is already in sorted order
Here is how it should be done:
1. Find the middle element of the array from range (0...size)
2. Compare it with the given value
2.1 If value == mid-element
then return it and terminate the program
2.2 Else If value < mid-element
then repeat step 1 with range (0...mid-element-index-1)
2.3 Else If value > mid-element
then repeat step 2 with range (mid-element-index + 1, size)
3. If loop finishes normally then return -1"""
""" Step 1"""
size = len(input_array)
start = 0
end = size
while end > start:
mid = (start + end) / 2
mid_value = input_array[mid]
if value == mid_value:
return mid
elif value < mid_value:
end = mid
else:
start = mid + 1
""" Step 3"""
return -1
test_list = [1,3,9,11,15,19,29]
test_val1 = 25
test_val2 = 15
print binary_search(test_list, test_val1)
print binary_search(test_list, test_val2)
print binary_search(test_list, 1)
print binary_search(test_list, 3)
print binary_search(test_list, 9)
print binary_search(test_list, 11)
print binary_search(test_list, 19)
print binary_search(test_list, 29)
print binary_search(test_list, 19)
print binary_search(test_list, 2)
|
Add Binary Search source code
|
Add Binary Search source code
|
Python
|
mit
|
rav1n/basic-algorithms
|
Add Binary Search source code
|
"""You're going to write a binary search function.
You should use an iterative approach - meaning
using loops.
Your function should take two inputs:
a Python list to search through, and the value
you're searching for.
Assume the list only has distinct elements,
meaning there are no repeated values, and
elements are in a strictly increasing order.
Return the index of value, or -1 if the value
doesn't exist in the list."""
def binary_search(input_array, value):
"""Your code goes here."""
""" Given input is already in sorted order
Here is how it should be done:
1. Find the middle element of the array from range (0...size)
2. Compare it with the given value
2.1 If value == mid-element
then return it and terminate the program
2.2 Else If value < mid-element
then repeat step 1 with range (0...mid-element-index-1)
2.3 Else If value > mid-element
then repeat step 2 with range (mid-element-index + 1, size)
3. If loop finishes normally then return -1"""
""" Step 1"""
size = len(input_array)
start = 0
end = size
while end > start:
mid = (start + end) / 2
mid_value = input_array[mid]
if value == mid_value:
return mid
elif value < mid_value:
end = mid
else:
start = mid + 1
""" Step 3"""
return -1
test_list = [1,3,9,11,15,19,29]
test_val1 = 25
test_val2 = 15
print binary_search(test_list, test_val1)
print binary_search(test_list, test_val2)
print binary_search(test_list, 1)
print binary_search(test_list, 3)
print binary_search(test_list, 9)
print binary_search(test_list, 11)
print binary_search(test_list, 19)
print binary_search(test_list, 29)
print binary_search(test_list, 19)
print binary_search(test_list, 2)
|
<commit_before><commit_msg>Add Binary Search source code<commit_after>
|
"""You're going to write a binary search function.
You should use an iterative approach - meaning
using loops.
Your function should take two inputs:
a Python list to search through, and the value
you're searching for.
Assume the list only has distinct elements,
meaning there are no repeated values, and
elements are in a strictly increasing order.
Return the index of value, or -1 if the value
doesn't exist in the list."""
def binary_search(input_array, value):
"""Your code goes here."""
""" Given input is already in sorted order
Here is how it should be done:
1. Find the middle element of the array from range (0...size)
2. Compare it with the given value
2.1 If value == mid-element
then return it and terminate the program
2.2 Else If value < mid-element
then repeat step 1 with range (0...mid-element-index-1)
2.3 Else If value > mid-element
then repeat step 2 with range (mid-element-index + 1, size)
3. If loop finishes normally then return -1"""
""" Step 1"""
size = len(input_array)
start = 0
end = size
while end > start:
mid = (start + end) / 2
mid_value = input_array[mid]
if value == mid_value:
return mid
elif value < mid_value:
end = mid
else:
start = mid + 1
""" Step 3"""
return -1
test_list = [1,3,9,11,15,19,29]
test_val1 = 25
test_val2 = 15
print binary_search(test_list, test_val1)
print binary_search(test_list, test_val2)
print binary_search(test_list, 1)
print binary_search(test_list, 3)
print binary_search(test_list, 9)
print binary_search(test_list, 11)
print binary_search(test_list, 19)
print binary_search(test_list, 29)
print binary_search(test_list, 19)
print binary_search(test_list, 2)
|
Add Binary Search source code"""You're going to write a binary search function.
You should use an iterative approach - meaning
using loops.
Your function should take two inputs:
a Python list to search through, and the value
you're searching for.
Assume the list only has distinct elements,
meaning there are no repeated values, and
elements are in a strictly increasing order.
Return the index of value, or -1 if the value
doesn't exist in the list."""
def binary_search(input_array, value):
"""Your code goes here."""
""" Given input is already in sorted order
Here is how it should be done:
1. Find the middle element of the array from range (0...size)
2. Compare it with the given value
2.1 If value == mid-element
then return it and terminate the program
2.2 Else If value < mid-element
then repeat step 1 with range (0...mid-element-index-1)
2.3 Else If value > mid-element
then repeat step 2 with range (mid-element-index + 1, size)
3. If loop finishes normally then return -1"""
""" Step 1"""
size = len(input_array)
start = 0
end = size
while end > start:
mid = (start + end) / 2
mid_value = input_array[mid]
if value == mid_value:
return mid
elif value < mid_value:
end = mid
else:
start = mid + 1
""" Step 3"""
return -1
test_list = [1,3,9,11,15,19,29]
test_val1 = 25
test_val2 = 15
print binary_search(test_list, test_val1)
print binary_search(test_list, test_val2)
print binary_search(test_list, 1)
print binary_search(test_list, 3)
print binary_search(test_list, 9)
print binary_search(test_list, 11)
print binary_search(test_list, 19)
print binary_search(test_list, 29)
print binary_search(test_list, 19)
print binary_search(test_list, 2)
|
<commit_before><commit_msg>Add Binary Search source code<commit_after>"""You're going to write a binary search function.
You should use an iterative approach - meaning
using loops.
Your function should take two inputs:
a Python list to search through, and the value
you're searching for.
Assume the list only has distinct elements,
meaning there are no repeated values, and
elements are in a strictly increasing order.
Return the index of value, or -1 if the value
doesn't exist in the list."""
def binary_search(input_array, value):
"""Your code goes here."""
""" Given input is already in sorted order
Here is how it should be done:
1. Find the middle element of the array from range (0...size)
2. Compare it with the given value
2.1 If value == mid-element
then return it and terminate the program
2.2 Else If value < mid-element
then repeat step 1 with range (0...mid-element-index-1)
2.3 Else If value > mid-element
then repeat step 2 with range (mid-element-index + 1, size)
3. If loop finishes normally then return -1"""
""" Step 1"""
size = len(input_array)
start = 0
end = size
while end > start:
mid = (start + end) / 2
mid_value = input_array[mid]
if value == mid_value:
return mid
elif value < mid_value:
end = mid
else:
start = mid + 1
""" Step 3"""
return -1
test_list = [1,3,9,11,15,19,29]
test_val1 = 25
test_val2 = 15
print binary_search(test_list, test_val1)
print binary_search(test_list, test_val2)
print binary_search(test_list, 1)
print binary_search(test_list, 3)
print binary_search(test_list, 9)
print binary_search(test_list, 11)
print binary_search(test_list, 19)
print binary_search(test_list, 29)
print binary_search(test_list, 19)
print binary_search(test_list, 2)
|
|
5997adbb07b05c6ef4439d106a80c1342e38b9c7
|
mothermayi/entryway.py
|
mothermayi/entryway.py
|
import pkg_resources
def get_entries(name):
entries = []
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
if entry.name != name:
continue
runner = entry.load()
entries.append(runner)
return entries
|
Add a function for getting plugin entry points
|
Add a function for getting plugin entry points
This will be how we pull up plugins that have been installed in the
environment and execute them
|
Python
|
mit
|
EliRibble/mothermayi
|
Add a function for getting plugin entry points
This will be how we pull up plugins that have been installed in the
environment and execute them
|
import pkg_resources
def get_entries(name):
entries = []
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
if entry.name != name:
continue
runner = entry.load()
entries.append(runner)
return entries
|
<commit_before><commit_msg>Add a function for getting plugin entry points
This will be how we pull up plugins that have been installed in the
environment and execute them<commit_after>
|
import pkg_resources
def get_entries(name):
entries = []
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
if entry.name != name:
continue
runner = entry.load()
entries.append(runner)
return entries
|
Add a function for getting plugin entry points
This will be how we pull up plugins that have been installed in the
environment and execute themimport pkg_resources
def get_entries(name):
entries = []
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
if entry.name != name:
continue
runner = entry.load()
entries.append(runner)
return entries
|
<commit_before><commit_msg>Add a function for getting plugin entry points
This will be how we pull up plugins that have been installed in the
environment and execute them<commit_after>import pkg_resources
def get_entries(name):
entries = []
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
if entry.name != name:
continue
runner = entry.load()
entries.append(runner)
return entries
|
|
6e58e0fbf059f137bfcfef070968191f2ea42655
|
tests/libpeas/plugins/extension-python/extension-python.py
|
tests/libpeas/plugins/extension-python/extension-python.py
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
from gi.repository import GObject, Introspection, Peas
class ExtensionPythonPlugin(GObject.Object, Peas.Activatable,
Introspection.Base, Introspection.Callable,
Introspection.PropertiesPrerequisite,
Introspection.Properties,
Introspection.HasPrerequisite):
object = GObject.property(type=GObject.Object)
construct_only = GObject.property(type=str)
read_only = GObject.property(type=str, default="read-only")
write_only = GObject.property(type=str)
readwrite = GObject.property(type=str, default="readwrite")
prerequisite = GObject.property(type=str)
def do_activate(self):
pass
def do_deactivate(self):
pass
def do_update_state(self):
pass
def do_get_plugin_info(self):
return self.plugin_info
def do_get_settings(self):
return self.plugin_info.get_settings(None)
def do_call_with_return(self):
return "Hello, World!";
def do_call_no_args(self):
pass
def do_call_single_arg(self):
return True
def do_call_multi_args(self, in_, inout):
return (inout, in_)
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
from gi.repository import GObject, Introspection, Peas
class ExtensionPythonPlugin(GObject.Object, Peas.Activatable,
Introspection.Base, Introspection.Callable,
Introspection.PropertiesPrerequisite,
Introspection.Properties,
Introspection.HasPrerequisite):
object = GObject.property(type=GObject.Object)
construct_only = GObject.property(type=str)
read_only = GObject.property(type=str, default="read-only")
write_only = GObject.property(type=str)
readwrite = GObject.property(type=str, default="readwrite")
prerequisite = GObject.property(type=str)
def do_activate(self):
pass
def do_deactivate(self):
pass
def do_update_state(self):
pass
def do_get_plugin_info(self):
return self.plugin_info
def do_get_settings(self):
return self.plugin_info.get_settings(None)
def do_call_with_return(self):
return "Hello, World!"
def do_call_no_args(self):
pass
def do_call_single_arg(self):
return True
def do_call_multi_args(self, in_, inout):
return (inout, in_)
|
Fix style issues in python test plugin
|
Fix style issues in python test plugin
https://bugzilla.gnome.org/show_bug.cgi?id=678339
|
Python
|
lgpl-2.1
|
Distrotech/libpeas,chergert/libpeas,gregier/libpeas,chergert/libpeas,GNOME/libpeas,gregier/libpeas,gregier/libpeas,gregier/libpeas,Distrotech/libpeas,Distrotech/libpeas,GNOME/libpeas,chergert/libpeas
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
from gi.repository import GObject, Introspection, Peas
class ExtensionPythonPlugin(GObject.Object, Peas.Activatable,
Introspection.Base, Introspection.Callable,
Introspection.PropertiesPrerequisite,
Introspection.Properties,
Introspection.HasPrerequisite):
object = GObject.property(type=GObject.Object)
construct_only = GObject.property(type=str)
read_only = GObject.property(type=str, default="read-only")
write_only = GObject.property(type=str)
readwrite = GObject.property(type=str, default="readwrite")
prerequisite = GObject.property(type=str)
def do_activate(self):
pass
def do_deactivate(self):
pass
def do_update_state(self):
pass
def do_get_plugin_info(self):
return self.plugin_info
def do_get_settings(self):
return self.plugin_info.get_settings(None)
def do_call_with_return(self):
return "Hello, World!";
def do_call_no_args(self):
pass
def do_call_single_arg(self):
return True
def do_call_multi_args(self, in_, inout):
return (inout, in_)
Fix style issues in python test plugin
https://bugzilla.gnome.org/show_bug.cgi?id=678339
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
from gi.repository import GObject, Introspection, Peas
class ExtensionPythonPlugin(GObject.Object, Peas.Activatable,
Introspection.Base, Introspection.Callable,
Introspection.PropertiesPrerequisite,
Introspection.Properties,
Introspection.HasPrerequisite):
object = GObject.property(type=GObject.Object)
construct_only = GObject.property(type=str)
read_only = GObject.property(type=str, default="read-only")
write_only = GObject.property(type=str)
readwrite = GObject.property(type=str, default="readwrite")
prerequisite = GObject.property(type=str)
def do_activate(self):
pass
def do_deactivate(self):
pass
def do_update_state(self):
pass
def do_get_plugin_info(self):
return self.plugin_info
def do_get_settings(self):
return self.plugin_info.get_settings(None)
def do_call_with_return(self):
return "Hello, World!"
def do_call_no_args(self):
pass
def do_call_single_arg(self):
return True
def do_call_multi_args(self, in_, inout):
return (inout, in_)
|
<commit_before># -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
from gi.repository import GObject, Introspection, Peas
class ExtensionPythonPlugin(GObject.Object, Peas.Activatable,
Introspection.Base, Introspection.Callable,
Introspection.PropertiesPrerequisite,
Introspection.Properties,
Introspection.HasPrerequisite):
object = GObject.property(type=GObject.Object)
construct_only = GObject.property(type=str)
read_only = GObject.property(type=str, default="read-only")
write_only = GObject.property(type=str)
readwrite = GObject.property(type=str, default="readwrite")
prerequisite = GObject.property(type=str)
def do_activate(self):
pass
def do_deactivate(self):
pass
def do_update_state(self):
pass
def do_get_plugin_info(self):
return self.plugin_info
def do_get_settings(self):
return self.plugin_info.get_settings(None)
def do_call_with_return(self):
return "Hello, World!";
def do_call_no_args(self):
pass
def do_call_single_arg(self):
return True
def do_call_multi_args(self, in_, inout):
return (inout, in_)
<commit_msg>Fix style issues in python test plugin
https://bugzilla.gnome.org/show_bug.cgi?id=678339<commit_after>
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
from gi.repository import GObject, Introspection, Peas
class ExtensionPythonPlugin(GObject.Object, Peas.Activatable,
Introspection.Base, Introspection.Callable,
Introspection.PropertiesPrerequisite,
Introspection.Properties,
Introspection.HasPrerequisite):
object = GObject.property(type=GObject.Object)
construct_only = GObject.property(type=str)
read_only = GObject.property(type=str, default="read-only")
write_only = GObject.property(type=str)
readwrite = GObject.property(type=str, default="readwrite")
prerequisite = GObject.property(type=str)
def do_activate(self):
pass
def do_deactivate(self):
pass
def do_update_state(self):
pass
def do_get_plugin_info(self):
return self.plugin_info
def do_get_settings(self):
return self.plugin_info.get_settings(None)
def do_call_with_return(self):
return "Hello, World!"
def do_call_no_args(self):
pass
def do_call_single_arg(self):
return True
def do_call_multi_args(self, in_, inout):
return (inout, in_)
|
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
from gi.repository import GObject, Introspection, Peas
class ExtensionPythonPlugin(GObject.Object, Peas.Activatable,
Introspection.Base, Introspection.Callable,
Introspection.PropertiesPrerequisite,
Introspection.Properties,
Introspection.HasPrerequisite):
object = GObject.property(type=GObject.Object)
construct_only = GObject.property(type=str)
read_only = GObject.property(type=str, default="read-only")
write_only = GObject.property(type=str)
readwrite = GObject.property(type=str, default="readwrite")
prerequisite = GObject.property(type=str)
def do_activate(self):
pass
def do_deactivate(self):
pass
def do_update_state(self):
pass
def do_get_plugin_info(self):
return self.plugin_info
def do_get_settings(self):
return self.plugin_info.get_settings(None)
def do_call_with_return(self):
return "Hello, World!";
def do_call_no_args(self):
pass
def do_call_single_arg(self):
return True
def do_call_multi_args(self, in_, inout):
return (inout, in_)
Fix style issues in python test plugin
https://bugzilla.gnome.org/show_bug.cgi?id=678339# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
from gi.repository import GObject, Introspection, Peas
class ExtensionPythonPlugin(GObject.Object, Peas.Activatable,
Introspection.Base, Introspection.Callable,
Introspection.PropertiesPrerequisite,
Introspection.Properties,
Introspection.HasPrerequisite):
object = GObject.property(type=GObject.Object)
construct_only = GObject.property(type=str)
read_only = GObject.property(type=str, default="read-only")
write_only = GObject.property(type=str)
readwrite = GObject.property(type=str, default="readwrite")
prerequisite = GObject.property(type=str)
def do_activate(self):
pass
def do_deactivate(self):
pass
def do_update_state(self):
pass
def do_get_plugin_info(self):
return self.plugin_info
def do_get_settings(self):
return self.plugin_info.get_settings(None)
def do_call_with_return(self):
return "Hello, World!"
def do_call_no_args(self):
pass
def do_call_single_arg(self):
return True
def do_call_multi_args(self, in_, inout):
return (inout, in_)
|
<commit_before># -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
from gi.repository import GObject, Introspection, Peas
class ExtensionPythonPlugin(GObject.Object, Peas.Activatable,
Introspection.Base, Introspection.Callable,
Introspection.PropertiesPrerequisite,
Introspection.Properties,
Introspection.HasPrerequisite):
object = GObject.property(type=GObject.Object)
construct_only = GObject.property(type=str)
read_only = GObject.property(type=str, default="read-only")
write_only = GObject.property(type=str)
readwrite = GObject.property(type=str, default="readwrite")
prerequisite = GObject.property(type=str)
def do_activate(self):
pass
def do_deactivate(self):
pass
def do_update_state(self):
pass
def do_get_plugin_info(self):
return self.plugin_info
def do_get_settings(self):
return self.plugin_info.get_settings(None)
def do_call_with_return(self):
return "Hello, World!";
def do_call_no_args(self):
pass
def do_call_single_arg(self):
return True
def do_call_multi_args(self, in_, inout):
return (inout, in_)
<commit_msg>Fix style issues in python test plugin
https://bugzilla.gnome.org/show_bug.cgi?id=678339<commit_after># -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
from gi.repository import GObject, Introspection, Peas
class ExtensionPythonPlugin(GObject.Object, Peas.Activatable,
Introspection.Base, Introspection.Callable,
Introspection.PropertiesPrerequisite,
Introspection.Properties,
Introspection.HasPrerequisite):
object = GObject.property(type=GObject.Object)
construct_only = GObject.property(type=str)
read_only = GObject.property(type=str, default="read-only")
write_only = GObject.property(type=str)
readwrite = GObject.property(type=str, default="readwrite")
prerequisite = GObject.property(type=str)
def do_activate(self):
pass
def do_deactivate(self):
pass
def do_update_state(self):
pass
def do_get_plugin_info(self):
return self.plugin_info
def do_get_settings(self):
return self.plugin_info.get_settings(None)
def do_call_with_return(self):
return "Hello, World!"
def do_call_no_args(self):
pass
def do_call_single_arg(self):
return True
def do_call_multi_args(self, in_, inout):
return (inout, in_)
|
4faecdf426af4b2d9a14ef65efe3e72fa088cdb3
|
src/examples/tutorial/example_bpmhistogramdescriptors.py
|
src/examples/tutorial/example_bpmhistogramdescriptors.py
|
import sys
from essentia.standard import *
from essentia import Pool
import pylab as plt
try:
input_file = sys.argv[1]
except:
print "usage:", sys.argv[0], "<input_file>"
sys.exit()
audio = MonoLoader(filename=input_file)()
bpm, _, _, _, intervals = RhythmExtractor2013()(audio)
peak1_bpm, peak1_weight, peak1_spread, peak2_bpm, peak2_weight, peak2_spread, histogram = BpmHistogramDescriptors()(intervals)
print "Overall BPM:", bpm
print "First peak:", peak1_bpm, "bpm"
print "Second peak:", peak2_bpm, "bpm"
fig, ax = plt.subplots()
ax.bar(range(len(histogram)), histogram, width=1)
ax.set_xlabel('BPM')
ax.set_ylabel('Frequency')
ax.set_xticks([20 * x + 0.5 for x in range(len(histogram) / 20)])
ax.set_xticklabels([str(20 * x) for x in range(len(histogram) / 20)])
plt.show()
|
Add python examples for BpmHistogramDescriptors
|
Add python examples for BpmHistogramDescriptors
|
Python
|
agpl-3.0
|
MTG/essentia,MTG/essentia,carthach/essentia,MTG/essentia,carthach/essentia,carthach/essentia,carthach/essentia,MTG/essentia,MTG/essentia,carthach/essentia
|
Add python examples for BpmHistogramDescriptors
|
import sys
from essentia.standard import *
from essentia import Pool
import pylab as plt
try:
input_file = sys.argv[1]
except:
print "usage:", sys.argv[0], "<input_file>"
sys.exit()
audio = MonoLoader(filename=input_file)()
bpm, _, _, _, intervals = RhythmExtractor2013()(audio)
peak1_bpm, peak1_weight, peak1_spread, peak2_bpm, peak2_weight, peak2_spread, histogram = BpmHistogramDescriptors()(intervals)
print "Overall BPM:", bpm
print "First peak:", peak1_bpm, "bpm"
print "Second peak:", peak2_bpm, "bpm"
fig, ax = plt.subplots()
ax.bar(range(len(histogram)), histogram, width=1)
ax.set_xlabel('BPM')
ax.set_ylabel('Frequency')
ax.set_xticks([20 * x + 0.5 for x in range(len(histogram) / 20)])
ax.set_xticklabels([str(20 * x) for x in range(len(histogram) / 20)])
plt.show()
|
<commit_before><commit_msg>Add python examples for BpmHistogramDescriptors<commit_after>
|
import sys
from essentia.standard import *
from essentia import Pool
import pylab as plt
try:
input_file = sys.argv[1]
except:
print "usage:", sys.argv[0], "<input_file>"
sys.exit()
audio = MonoLoader(filename=input_file)()
bpm, _, _, _, intervals = RhythmExtractor2013()(audio)
peak1_bpm, peak1_weight, peak1_spread, peak2_bpm, peak2_weight, peak2_spread, histogram = BpmHistogramDescriptors()(intervals)
print "Overall BPM:", bpm
print "First peak:", peak1_bpm, "bpm"
print "Second peak:", peak2_bpm, "bpm"
fig, ax = plt.subplots()
ax.bar(range(len(histogram)), histogram, width=1)
ax.set_xlabel('BPM')
ax.set_ylabel('Frequency')
ax.set_xticks([20 * x + 0.5 for x in range(len(histogram) / 20)])
ax.set_xticklabels([str(20 * x) for x in range(len(histogram) / 20)])
plt.show()
|
Add python examples for BpmHistogramDescriptorsimport sys
from essentia.standard import *
from essentia import Pool
import pylab as plt
try:
input_file = sys.argv[1]
except:
print "usage:", sys.argv[0], "<input_file>"
sys.exit()
audio = MonoLoader(filename=input_file)()
bpm, _, _, _, intervals = RhythmExtractor2013()(audio)
peak1_bpm, peak1_weight, peak1_spread, peak2_bpm, peak2_weight, peak2_spread, histogram = BpmHistogramDescriptors()(intervals)
print "Overall BPM:", bpm
print "First peak:", peak1_bpm, "bpm"
print "Second peak:", peak2_bpm, "bpm"
fig, ax = plt.subplots()
ax.bar(range(len(histogram)), histogram, width=1)
ax.set_xlabel('BPM')
ax.set_ylabel('Frequency')
ax.set_xticks([20 * x + 0.5 for x in range(len(histogram) / 20)])
ax.set_xticklabels([str(20 * x) for x in range(len(histogram) / 20)])
plt.show()
|
<commit_before><commit_msg>Add python examples for BpmHistogramDescriptors<commit_after>import sys
from essentia.standard import *
from essentia import Pool
import pylab as plt
try:
input_file = sys.argv[1]
except:
print "usage:", sys.argv[0], "<input_file>"
sys.exit()
audio = MonoLoader(filename=input_file)()
bpm, _, _, _, intervals = RhythmExtractor2013()(audio)
peak1_bpm, peak1_weight, peak1_spread, peak2_bpm, peak2_weight, peak2_spread, histogram = BpmHistogramDescriptors()(intervals)
print "Overall BPM:", bpm
print "First peak:", peak1_bpm, "bpm"
print "Second peak:", peak2_bpm, "bpm"
fig, ax = plt.subplots()
ax.bar(range(len(histogram)), histogram, width=1)
ax.set_xlabel('BPM')
ax.set_ylabel('Frequency')
ax.set_xticks([20 * x + 0.5 for x in range(len(histogram) / 20)])
ax.set_xticklabels([str(20 * x) for x in range(len(histogram) / 20)])
plt.show()
|
|
32e227941d10b6e4ae478d28adb0deb7b7fc463f
|
tools/serialization_time.py
|
tools/serialization_time.py
|
"""
:author: samu
:created: 4/17/13 10:37 PM
"""
import timeit
from smartrpyc.utils import serialization
import msgpack
def run_test(module):
module.unpackb(module.packb({
'string': "This is a strijng",
'unicode': u"This is a unicode",
'None': None,
'True': True,
'False': False,
'List': ['str', u'unicode', 10, 10.5, None, True, False],
'int': 1000,
'float': 3.141592,
}))
def run_vanilla():
run_test(msgpack)
def run_custom():
run_test(serialization)
if __name__ == '__main__':
time1 = timeit.timeit(stmt='run_vanilla()',
setup='from __main__ import run_vanilla',
number=100000)
time2 = timeit.timeit(stmt='run_custom()',
setup='from __main__ import run_custom',
number=100000)
print time1
print time2
|
Add script to time serialization functions
|
Add script to time serialization functions
|
Python
|
apache-2.0
|
xk0/SmartRPyC,xk0/SmartRPyC
|
Add script to time serialization functions
|
"""
:author: samu
:created: 4/17/13 10:37 PM
"""
import timeit
from smartrpyc.utils import serialization
import msgpack
def run_test(module):
module.unpackb(module.packb({
'string': "This is a strijng",
'unicode': u"This is a unicode",
'None': None,
'True': True,
'False': False,
'List': ['str', u'unicode', 10, 10.5, None, True, False],
'int': 1000,
'float': 3.141592,
}))
def run_vanilla():
run_test(msgpack)
def run_custom():
run_test(serialization)
if __name__ == '__main__':
time1 = timeit.timeit(stmt='run_vanilla()',
setup='from __main__ import run_vanilla',
number=100000)
time2 = timeit.timeit(stmt='run_custom()',
setup='from __main__ import run_custom',
number=100000)
print time1
print time2
|
<commit_before><commit_msg>Add script to time serialization functions<commit_after>
|
"""
:author: samu
:created: 4/17/13 10:37 PM
"""
import timeit
from smartrpyc.utils import serialization
import msgpack
def run_test(module):
module.unpackb(module.packb({
'string': "This is a strijng",
'unicode': u"This is a unicode",
'None': None,
'True': True,
'False': False,
'List': ['str', u'unicode', 10, 10.5, None, True, False],
'int': 1000,
'float': 3.141592,
}))
def run_vanilla():
run_test(msgpack)
def run_custom():
run_test(serialization)
if __name__ == '__main__':
time1 = timeit.timeit(stmt='run_vanilla()',
setup='from __main__ import run_vanilla',
number=100000)
time2 = timeit.timeit(stmt='run_custom()',
setup='from __main__ import run_custom',
number=100000)
print time1
print time2
|
Add script to time serialization functions"""
:author: samu
:created: 4/17/13 10:37 PM
"""
import timeit
from smartrpyc.utils import serialization
import msgpack
def run_test(module):
module.unpackb(module.packb({
'string': "This is a strijng",
'unicode': u"This is a unicode",
'None': None,
'True': True,
'False': False,
'List': ['str', u'unicode', 10, 10.5, None, True, False],
'int': 1000,
'float': 3.141592,
}))
def run_vanilla():
run_test(msgpack)
def run_custom():
run_test(serialization)
if __name__ == '__main__':
time1 = timeit.timeit(stmt='run_vanilla()',
setup='from __main__ import run_vanilla',
number=100000)
time2 = timeit.timeit(stmt='run_custom()',
setup='from __main__ import run_custom',
number=100000)
print time1
print time2
|
<commit_before><commit_msg>Add script to time serialization functions<commit_after>"""
:author: samu
:created: 4/17/13 10:37 PM
"""
import timeit
from smartrpyc.utils import serialization
import msgpack
def run_test(module):
module.unpackb(module.packb({
'string': "This is a strijng",
'unicode': u"This is a unicode",
'None': None,
'True': True,
'False': False,
'List': ['str', u'unicode', 10, 10.5, None, True, False],
'int': 1000,
'float': 3.141592,
}))
def run_vanilla():
run_test(msgpack)
def run_custom():
run_test(serialization)
if __name__ == '__main__':
time1 = timeit.timeit(stmt='run_vanilla()',
setup='from __main__ import run_vanilla',
number=100000)
time2 = timeit.timeit(stmt='run_custom()',
setup='from __main__ import run_custom',
number=100000)
print time1
print time2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.