commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0d64439bf587bad91d4ec622ef936c1d1fa19352
|
accelerator/migrations/0054_update_eventbrite_organizer_id_field.py
|
accelerator/migrations/0054_update_eventbrite_organizer_id_field.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0053_add_eventbrite_organization_id_field'),
]
operations = [
migrations.RenameField(
model_name='program',
old_name='eventbrite_organization_id',
new_name='eventbrite_organizer_id'
)
]
|
Update the field to eventbrite_organizer_id
|
Update the field to eventbrite_organizer_id
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
Update the field to eventbrite_organizer_id
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0053_add_eventbrite_organization_id_field'),
]
operations = [
migrations.RenameField(
model_name='program',
old_name='eventbrite_organization_id',
new_name='eventbrite_organizer_id'
)
]
|
<commit_before><commit_msg>Update the field to eventbrite_organizer_id<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0053_add_eventbrite_organization_id_field'),
]
operations = [
migrations.RenameField(
model_name='program',
old_name='eventbrite_organization_id',
new_name='eventbrite_organizer_id'
)
]
|
Update the field to eventbrite_organizer_id# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0053_add_eventbrite_organization_id_field'),
]
operations = [
migrations.RenameField(
model_name='program',
old_name='eventbrite_organization_id',
new_name='eventbrite_organizer_id'
)
]
|
<commit_before><commit_msg>Update the field to eventbrite_organizer_id<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0053_add_eventbrite_organization_id_field'),
]
operations = [
migrations.RenameField(
model_name='program',
old_name='eventbrite_organization_id',
new_name='eventbrite_organizer_id'
)
]
|
|
7b7ae3bd3ea80af90238edbcec8e6377b30b9d35
|
supriya/tools/nonrealtimetools/test/test_diff.py
|
supriya/tools/nonrealtimetools/test/test_diff.py
|
def p2c_to_c2p(p2c):
c2p = {}
nodes_needing_parents = list(p2c)
for parent, children in p2c.items():
if not children:
continue
for child in children:
c2p[child] = parent
nodes_needing_parents.remove(child)
assert len(nodes_needing_parents) == 1
c2p[nodes_needing_parents[0]] = None
return c2p
def test_diff_01():
source_p2c = {
'A': ('B', 'C', 'D'),
'B': ('E', 'F'),
'C': None,
'D': ('G', 'H'),
'E': ('I',),
'F': ('J',),
'G': None,
'H': None,
'I': None,
'J': None,
}
source_c2p = p2c_to_c2p(source_p2c)
target_p2c = {
'A': ('K', 'D', 'B'),
'B': ('M', 'N'),
'C': None,
'D': ('G',),
'G': ('H',),
'H': None,
'J': None,
'K': ('J', 'L'),
'L': None,
'M': ('C',),
'N': None,
}
target_c2p = p2c_to_c2p(target_p2c)
initial_node = 'A'
requests = []
def recurse(parent, children):
if parent not in target_p2c:
request = ('free', parent)
|
Add sketch for tree-diff test.
|
Add sketch for tree-diff test.
|
Python
|
mit
|
josiah-wolf-oberholtzer/supriya,Pulgama/supriya,Pulgama/supriya,Pulgama/supriya,Pulgama/supriya
|
Add sketch for tree-diff test.
|
def p2c_to_c2p(p2c):
c2p = {}
nodes_needing_parents = list(p2c)
for parent, children in p2c.items():
if not children:
continue
for child in children:
c2p[child] = parent
nodes_needing_parents.remove(child)
assert len(nodes_needing_parents) == 1
c2p[nodes_needing_parents[0]] = None
return c2p
def test_diff_01():
source_p2c = {
'A': ('B', 'C', 'D'),
'B': ('E', 'F'),
'C': None,
'D': ('G', 'H'),
'E': ('I',),
'F': ('J',),
'G': None,
'H': None,
'I': None,
'J': None,
}
source_c2p = p2c_to_c2p(source_p2c)
target_p2c = {
'A': ('K', 'D', 'B'),
'B': ('M', 'N'),
'C': None,
'D': ('G',),
'G': ('H',),
'H': None,
'J': None,
'K': ('J', 'L'),
'L': None,
'M': ('C',),
'N': None,
}
target_c2p = p2c_to_c2p(target_p2c)
initial_node = 'A'
requests = []
def recurse(parent, children):
if parent not in target_p2c:
request = ('free', parent)
|
<commit_before><commit_msg>Add sketch for tree-diff test.<commit_after>
|
def p2c_to_c2p(p2c):
c2p = {}
nodes_needing_parents = list(p2c)
for parent, children in p2c.items():
if not children:
continue
for child in children:
c2p[child] = parent
nodes_needing_parents.remove(child)
assert len(nodes_needing_parents) == 1
c2p[nodes_needing_parents[0]] = None
return c2p
def test_diff_01():
source_p2c = {
'A': ('B', 'C', 'D'),
'B': ('E', 'F'),
'C': None,
'D': ('G', 'H'),
'E': ('I',),
'F': ('J',),
'G': None,
'H': None,
'I': None,
'J': None,
}
source_c2p = p2c_to_c2p(source_p2c)
target_p2c = {
'A': ('K', 'D', 'B'),
'B': ('M', 'N'),
'C': None,
'D': ('G',),
'G': ('H',),
'H': None,
'J': None,
'K': ('J', 'L'),
'L': None,
'M': ('C',),
'N': None,
}
target_c2p = p2c_to_c2p(target_p2c)
initial_node = 'A'
requests = []
def recurse(parent, children):
if parent not in target_p2c:
request = ('free', parent)
|
Add sketch for tree-diff test.
def p2c_to_c2p(p2c):
c2p = {}
nodes_needing_parents = list(p2c)
for parent, children in p2c.items():
if not children:
continue
for child in children:
c2p[child] = parent
nodes_needing_parents.remove(child)
assert len(nodes_needing_parents) == 1
c2p[nodes_needing_parents[0]] = None
return c2p
def test_diff_01():
source_p2c = {
'A': ('B', 'C', 'D'),
'B': ('E', 'F'),
'C': None,
'D': ('G', 'H'),
'E': ('I',),
'F': ('J',),
'G': None,
'H': None,
'I': None,
'J': None,
}
source_c2p = p2c_to_c2p(source_p2c)
target_p2c = {
'A': ('K', 'D', 'B'),
'B': ('M', 'N'),
'C': None,
'D': ('G',),
'G': ('H',),
'H': None,
'J': None,
'K': ('J', 'L'),
'L': None,
'M': ('C',),
'N': None,
}
target_c2p = p2c_to_c2p(target_p2c)
initial_node = 'A'
requests = []
def recurse(parent, children):
if parent not in target_p2c:
request = ('free', parent)
|
<commit_before><commit_msg>Add sketch for tree-diff test.<commit_after>
def p2c_to_c2p(p2c):
c2p = {}
nodes_needing_parents = list(p2c)
for parent, children in p2c.items():
if not children:
continue
for child in children:
c2p[child] = parent
nodes_needing_parents.remove(child)
assert len(nodes_needing_parents) == 1
c2p[nodes_needing_parents[0]] = None
return c2p
def test_diff_01():
source_p2c = {
'A': ('B', 'C', 'D'),
'B': ('E', 'F'),
'C': None,
'D': ('G', 'H'),
'E': ('I',),
'F': ('J',),
'G': None,
'H': None,
'I': None,
'J': None,
}
source_c2p = p2c_to_c2p(source_p2c)
target_p2c = {
'A': ('K', 'D', 'B'),
'B': ('M', 'N'),
'C': None,
'D': ('G',),
'G': ('H',),
'H': None,
'J': None,
'K': ('J', 'L'),
'L': None,
'M': ('C',),
'N': None,
}
target_c2p = p2c_to_c2p(target_p2c)
initial_node = 'A'
requests = []
def recurse(parent, children):
if parent not in target_p2c:
request = ('free', parent)
|
|
7106069d4dc6dd9a55634da419d4c270c43f0849
|
zerver/migrations/0304_remove_default_status_of_default_private_streams.py
|
zerver/migrations/0304_remove_default_status_of_default_private_streams.py
|
# Generated by Django 2.2.14 on 2020-08-10 20:21
from django.db import migrations
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def remove_default_status_of_default_private_streams(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
DefaultStream = apps.get_model('zerver', 'DefaultStream')
DefaultStream.objects.filter(stream__invite_only=True).delete()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0303_realm_wildcard_mention_policy'),
]
operations = [
migrations.RunPython(remove_default_status_of_default_private_streams,
reverse_code=migrations.RunPython.noop,
elidable=True),
]
|
Add migration to remove default status of private streams.
|
migration: Add migration to remove default status of private streams.
This commit adds migration which removes default status of exisitng
default private streams, i.e. private stream exists but they are no
longer default.
|
Python
|
apache-2.0
|
eeshangarg/zulip,andersk/zulip,showell/zulip,rht/zulip,rht/zulip,zulip/zulip,hackerkid/zulip,showell/zulip,hackerkid/zulip,andersk/zulip,zulip/zulip,zulip/zulip,eeshangarg/zulip,andersk/zulip,zulip/zulip,kou/zulip,andersk/zulip,rht/zulip,rht/zulip,rht/zulip,eeshangarg/zulip,punchagan/zulip,showell/zulip,zulip/zulip,punchagan/zulip,showell/zulip,kou/zulip,hackerkid/zulip,punchagan/zulip,showell/zulip,kou/zulip,kou/zulip,punchagan/zulip,eeshangarg/zulip,andersk/zulip,showell/zulip,eeshangarg/zulip,punchagan/zulip,punchagan/zulip,andersk/zulip,punchagan/zulip,hackerkid/zulip,zulip/zulip,eeshangarg/zulip,eeshangarg/zulip,rht/zulip,kou/zulip,rht/zulip,showell/zulip,hackerkid/zulip,hackerkid/zulip,kou/zulip,andersk/zulip,kou/zulip,zulip/zulip,hackerkid/zulip
|
migration: Add migration to remove default status of private streams.
This commit adds migration which removes default status of exisitng
default private streams, i.e. private stream exists but they are no
longer default.
|
# Generated by Django 2.2.14 on 2020-08-10 20:21
from django.db import migrations
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def remove_default_status_of_default_private_streams(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
DefaultStream = apps.get_model('zerver', 'DefaultStream')
DefaultStream.objects.filter(stream__invite_only=True).delete()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0303_realm_wildcard_mention_policy'),
]
operations = [
migrations.RunPython(remove_default_status_of_default_private_streams,
reverse_code=migrations.RunPython.noop,
elidable=True),
]
|
<commit_before><commit_msg>migration: Add migration to remove default status of private streams.
This commit adds migration which removes default status of exisitng
default private streams, i.e. private stream exists but they are no
longer default.<commit_after>
|
# Generated by Django 2.2.14 on 2020-08-10 20:21
from django.db import migrations
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def remove_default_status_of_default_private_streams(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
DefaultStream = apps.get_model('zerver', 'DefaultStream')
DefaultStream.objects.filter(stream__invite_only=True).delete()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0303_realm_wildcard_mention_policy'),
]
operations = [
migrations.RunPython(remove_default_status_of_default_private_streams,
reverse_code=migrations.RunPython.noop,
elidable=True),
]
|
migration: Add migration to remove default status of private streams.
This commit adds migration which removes default status of exisitng
default private streams, i.e. private stream exists but they are no
longer default.# Generated by Django 2.2.14 on 2020-08-10 20:21
from django.db import migrations
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def remove_default_status_of_default_private_streams(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
DefaultStream = apps.get_model('zerver', 'DefaultStream')
DefaultStream.objects.filter(stream__invite_only=True).delete()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0303_realm_wildcard_mention_policy'),
]
operations = [
migrations.RunPython(remove_default_status_of_default_private_streams,
reverse_code=migrations.RunPython.noop,
elidable=True),
]
|
<commit_before><commit_msg>migration: Add migration to remove default status of private streams.
This commit adds migration which removes default status of exisitng
default private streams, i.e. private stream exists but they are no
longer default.<commit_after># Generated by Django 2.2.14 on 2020-08-10 20:21
from django.db import migrations
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def remove_default_status_of_default_private_streams(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
DefaultStream = apps.get_model('zerver', 'DefaultStream')
DefaultStream.objects.filter(stream__invite_only=True).delete()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0303_realm_wildcard_mention_policy'),
]
operations = [
migrations.RunPython(remove_default_status_of_default_private_streams,
reverse_code=migrations.RunPython.noop,
elidable=True),
]
|
|
3d6324080e112857e2cc8fab6a9c0c168ec0ab1f
|
tests/chainer_tests/functions_tests/pooling_tests/test_pooling_nd_kernel.py
|
tests/chainer_tests/functions_tests/pooling_tests/test_pooling_nd_kernel.py
|
import unittest
import mock
import chainer
from chainer import testing
from chainer.testing import attr
from chainer.functions.pooling import pooling_nd_kernel
@testing.parameterize(*testing.product({
'ndim': [2, 3, 4],
}))
@attr.gpu
class TestPoolingNDKernelMemo(unittest.TestCase):
def setUp(self):
chainer.cuda.clear_memo()
def test_pooling_nd_kernel_forward_memo(self):
ndim = self.ndim
with mock.patch('chainer.functions.pooling.pooling_nd_kernel.'
'PoolingNDKernelForward._generate') as m:
pooling_nd_kernel.PoolingNDKernelForward.generate(ndim)
m.assert_called_once_with(ndim)
pooling_nd_kernel.PoolingNDKernelForward.generate(ndim)
m.assert_called_once_with(ndim)
def test_pooling_nd_kernel_backward_memo(self):
ndim = self.ndim
with mock.patch('chainer.functions.pooling.pooling_nd_kernel.'
'PoolingNDKernelBackward._generate') as m:
pooling_nd_kernel.PoolingNDKernelBackward.generate(ndim)
m.assert_called_once_with(ndim)
pooling_nd_kernel.PoolingNDKernelBackward.generate(ndim)
m.assert_called_once_with(ndim)
testing.run_module(__name__, __file__)
|
Add test for memoization in N-dimensional pooling kernel generator.
|
Add test for memoization in N-dimensional pooling kernel generator.
|
Python
|
mit
|
hvy/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,wkentaro/chainer,rezoo/chainer,jnishi/chainer,kashif/chainer,jnishi/chainer,ronekko/chainer,okuta/chainer,chainer/chainer,wkentaro/chainer,ktnyt/chainer,okuta/chainer,niboshi/chainer,chainer/chainer,cupy/cupy,jnishi/chainer,niboshi/chainer,okuta/chainer,niboshi/chainer,aonotas/chainer,cupy/cupy,ktnyt/chainer,anaruse/chainer,ktnyt/chainer,hvy/chainer,wkentaro/chainer,chainer/chainer,hvy/chainer,chainer/chainer,ysekky/chainer,tkerola/chainer,wkentaro/chainer,cupy/cupy,keisuke-umezawa/chainer,niboshi/chainer,keisuke-umezawa/chainer,okuta/chainer,kiyukuta/chainer,hvy/chainer,cupy/cupy,delta2323/chainer,jnishi/chainer,ktnyt/chainer,pfnet/chainer
|
Add test for memoization in N-dimensional pooling kernel generator.
|
import unittest
import mock
import chainer
from chainer import testing
from chainer.testing import attr
from chainer.functions.pooling import pooling_nd_kernel
@testing.parameterize(*testing.product({
'ndim': [2, 3, 4],
}))
@attr.gpu
class TestPoolingNDKernelMemo(unittest.TestCase):
def setUp(self):
chainer.cuda.clear_memo()
def test_pooling_nd_kernel_forward_memo(self):
ndim = self.ndim
with mock.patch('chainer.functions.pooling.pooling_nd_kernel.'
'PoolingNDKernelForward._generate') as m:
pooling_nd_kernel.PoolingNDKernelForward.generate(ndim)
m.assert_called_once_with(ndim)
pooling_nd_kernel.PoolingNDKernelForward.generate(ndim)
m.assert_called_once_with(ndim)
def test_pooling_nd_kernel_backward_memo(self):
ndim = self.ndim
with mock.patch('chainer.functions.pooling.pooling_nd_kernel.'
'PoolingNDKernelBackward._generate') as m:
pooling_nd_kernel.PoolingNDKernelBackward.generate(ndim)
m.assert_called_once_with(ndim)
pooling_nd_kernel.PoolingNDKernelBackward.generate(ndim)
m.assert_called_once_with(ndim)
testing.run_module(__name__, __file__)
|
<commit_before><commit_msg>Add test for memoization in N-dimensional pooling kernel generator.<commit_after>
|
import unittest
import mock
import chainer
from chainer import testing
from chainer.testing import attr
from chainer.functions.pooling import pooling_nd_kernel
@testing.parameterize(*testing.product({
'ndim': [2, 3, 4],
}))
@attr.gpu
class TestPoolingNDKernelMemo(unittest.TestCase):
def setUp(self):
chainer.cuda.clear_memo()
def test_pooling_nd_kernel_forward_memo(self):
ndim = self.ndim
with mock.patch('chainer.functions.pooling.pooling_nd_kernel.'
'PoolingNDKernelForward._generate') as m:
pooling_nd_kernel.PoolingNDKernelForward.generate(ndim)
m.assert_called_once_with(ndim)
pooling_nd_kernel.PoolingNDKernelForward.generate(ndim)
m.assert_called_once_with(ndim)
def test_pooling_nd_kernel_backward_memo(self):
ndim = self.ndim
with mock.patch('chainer.functions.pooling.pooling_nd_kernel.'
'PoolingNDKernelBackward._generate') as m:
pooling_nd_kernel.PoolingNDKernelBackward.generate(ndim)
m.assert_called_once_with(ndim)
pooling_nd_kernel.PoolingNDKernelBackward.generate(ndim)
m.assert_called_once_with(ndim)
testing.run_module(__name__, __file__)
|
Add test for memoization in N-dimensional pooling kernel generator.import unittest
import mock
import chainer
from chainer import testing
from chainer.testing import attr
from chainer.functions.pooling import pooling_nd_kernel
@testing.parameterize(*testing.product({
'ndim': [2, 3, 4],
}))
@attr.gpu
class TestPoolingNDKernelMemo(unittest.TestCase):
def setUp(self):
chainer.cuda.clear_memo()
def test_pooling_nd_kernel_forward_memo(self):
ndim = self.ndim
with mock.patch('chainer.functions.pooling.pooling_nd_kernel.'
'PoolingNDKernelForward._generate') as m:
pooling_nd_kernel.PoolingNDKernelForward.generate(ndim)
m.assert_called_once_with(ndim)
pooling_nd_kernel.PoolingNDKernelForward.generate(ndim)
m.assert_called_once_with(ndim)
def test_pooling_nd_kernel_backward_memo(self):
ndim = self.ndim
with mock.patch('chainer.functions.pooling.pooling_nd_kernel.'
'PoolingNDKernelBackward._generate') as m:
pooling_nd_kernel.PoolingNDKernelBackward.generate(ndim)
m.assert_called_once_with(ndim)
pooling_nd_kernel.PoolingNDKernelBackward.generate(ndim)
m.assert_called_once_with(ndim)
testing.run_module(__name__, __file__)
|
<commit_before><commit_msg>Add test for memoization in N-dimensional pooling kernel generator.<commit_after>import unittest
import mock
import chainer
from chainer import testing
from chainer.testing import attr
from chainer.functions.pooling import pooling_nd_kernel
@testing.parameterize(*testing.product({
'ndim': [2, 3, 4],
}))
@attr.gpu
class TestPoolingNDKernelMemo(unittest.TestCase):
def setUp(self):
chainer.cuda.clear_memo()
def test_pooling_nd_kernel_forward_memo(self):
ndim = self.ndim
with mock.patch('chainer.functions.pooling.pooling_nd_kernel.'
'PoolingNDKernelForward._generate') as m:
pooling_nd_kernel.PoolingNDKernelForward.generate(ndim)
m.assert_called_once_with(ndim)
pooling_nd_kernel.PoolingNDKernelForward.generate(ndim)
m.assert_called_once_with(ndim)
def test_pooling_nd_kernel_backward_memo(self):
ndim = self.ndim
with mock.patch('chainer.functions.pooling.pooling_nd_kernel.'
'PoolingNDKernelBackward._generate') as m:
pooling_nd_kernel.PoolingNDKernelBackward.generate(ndim)
m.assert_called_once_with(ndim)
pooling_nd_kernel.PoolingNDKernelBackward.generate(ndim)
m.assert_called_once_with(ndim)
testing.run_module(__name__, __file__)
|
|
f3a22d8f615710d7f62704dd9769542bad96fecb
|
apps/profile/management/commands/send_notification.py
|
apps/profile/management/commands/send_notification.py
|
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from optparse import make_option
import datetime
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-u", "--username", dest="username", nargs=1, help="Specify user id or username"),
)
def handle(self, *args, **options):
username = options.get('username')
user = None
if username:
try:
user = User.objects.get(username__icontains=username)
except User.MultipleObjectsReturned:
user = User.objects.get(username__iexact=username)
except User.DoesNotExist:
user = User.objects.get(email__iexact=username)
except User.DoesNotExist:
print " ---> No user found at: %s" % username
if user:
user.profile.send_notification_email()
else:
# users = User.objects.all()
for user in users:
user.profile.send_notification_email()
print " ---> Mail sent to %s." % user.id
print " ---> All notification sent!"
|
Add a command to send mail
|
Add a command to send mail
|
Python
|
mit
|
bruceyou/NewsBlur,bruceyou/NewsBlur,bruceyou/NewsBlur,bruceyou/NewsBlur,bruceyou/NewsBlur,bruceyou/NewsBlur,bruceyou/NewsBlur,bruceyou/NewsBlur,bruceyou/NewsBlur
|
Add a command to send mail
|
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from optparse import make_option
import datetime
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-u", "--username", dest="username", nargs=1, help="Specify user id or username"),
)
def handle(self, *args, **options):
username = options.get('username')
user = None
if username:
try:
user = User.objects.get(username__icontains=username)
except User.MultipleObjectsReturned:
user = User.objects.get(username__iexact=username)
except User.DoesNotExist:
user = User.objects.get(email__iexact=username)
except User.DoesNotExist:
print " ---> No user found at: %s" % username
if user:
user.profile.send_notification_email()
else:
# users = User.objects.all()
for user in users:
user.profile.send_notification_email()
print " ---> Mail sent to %s." % user.id
print " ---> All notification sent!"
|
<commit_before><commit_msg>Add a command to send mail<commit_after>
|
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from optparse import make_option
import datetime
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-u", "--username", dest="username", nargs=1, help="Specify user id or username"),
)
def handle(self, *args, **options):
username = options.get('username')
user = None
if username:
try:
user = User.objects.get(username__icontains=username)
except User.MultipleObjectsReturned:
user = User.objects.get(username__iexact=username)
except User.DoesNotExist:
user = User.objects.get(email__iexact=username)
except User.DoesNotExist:
print " ---> No user found at: %s" % username
if user:
user.profile.send_notification_email()
else:
# users = User.objects.all()
for user in users:
user.profile.send_notification_email()
print " ---> Mail sent to %s." % user.id
print " ---> All notification sent!"
|
Add a command to send mailfrom django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from optparse import make_option
import datetime
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-u", "--username", dest="username", nargs=1, help="Specify user id or username"),
)
def handle(self, *args, **options):
username = options.get('username')
user = None
if username:
try:
user = User.objects.get(username__icontains=username)
except User.MultipleObjectsReturned:
user = User.objects.get(username__iexact=username)
except User.DoesNotExist:
user = User.objects.get(email__iexact=username)
except User.DoesNotExist:
print " ---> No user found at: %s" % username
if user:
user.profile.send_notification_email()
else:
# users = User.objects.all()
for user in users:
user.profile.send_notification_email()
print " ---> Mail sent to %s." % user.id
print " ---> All notification sent!"
|
<commit_before><commit_msg>Add a command to send mail<commit_after>from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from optparse import make_option
import datetime
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-u", "--username", dest="username", nargs=1, help="Specify user id or username"),
)
def handle(self, *args, **options):
username = options.get('username')
user = None
if username:
try:
user = User.objects.get(username__icontains=username)
except User.MultipleObjectsReturned:
user = User.objects.get(username__iexact=username)
except User.DoesNotExist:
user = User.objects.get(email__iexact=username)
except User.DoesNotExist:
print " ---> No user found at: %s" % username
if user:
user.profile.send_notification_email()
else:
# users = User.objects.all()
for user in users:
user.profile.send_notification_email()
print " ---> Mail sent to %s." % user.id
print " ---> All notification sent!"
|
|
60b01109b2504e990b30a70f6cba25dd9d440626
|
CFC_DataCollector/tests/recommenderTests/TestNewPipeline.py
|
CFC_DataCollector/tests/recommenderTests/TestNewPipeline.py
|
import unittest
import json
import logging
import numpy as np
from get_database import get_db, get_mode_db, get_section_db
from recommender import pipeline
import sys
import os
logging.basicConfig(level=logging.DEBUG)
class TestPipeline(unittest.TestCase):
def setUp(self):
self.testUserEmails = ["test@example.com", "best@example.com", "fest@example.com",
"rest@example.com", "nest@example.com"]
self.serverName = 'localhost'
self.testUsers = []
for userEmail in self.testUserEmails:
User.register(userEmail)
self.testUsers += [User.fromEmail(section['user_id'])] # can access uuid with .uuid
# import data from tests/data/testModeInferFiles
self.pipeline = pipeline.ModeRecommendationPipeline()
self.testRecommendationPipeline()
# register each of the users and add sample trips to each user
def tearDown(self):
for testUser in self.testUsersEmails:
purge_database_json.purgeData('localhost', testUser)
def testTargetTripsPipeline(self):
self.testTargetTrips = [pipeline.getTargetTrips(testUser.uuid) for testUser in testUsers]
# series of assertions to check output
def testAugmentTripsPipeline(self):
# call functions in the augment_trips_pipeline file using trips from self.testTargetTrips
# assertions to check various characteristics of output
def testUtilityModelPipeline(self):
# call functions from utility_model_pipeline.py and using augmented trips from testAugmentTripsPipeline and self.testUsers
# assertions to check various characteristics of output
def testRecommendationPipeline(self):
self.testTripsToImprovePipeline();
self.testUtilityModelPipeline();
self.testRecommendationPipeline();
# assertions to check various characteristcs of pipeline overall
if __name__ == '__main__':
unittest.main()
|
Add a simple test script that invokes the pipeline
|
Add a simple test script that invokes the pipeline
So that we can make sure that all the pieces work together.
Modelled on the existing testPipeline code.
Thanks to Gautham (@gaukes)
|
Python
|
bsd-3-clause
|
sunil07t/e-mission-server,yw374cornell/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,joshzarrabi/e-mission-server,joshzarrabi/e-mission-server,sunil07t/e-mission-server,yw374cornell/e-mission-server,joshzarrabi/e-mission-server,sunil07t/e-mission-server,yw374cornell/e-mission-server,joshzarrabi/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server
|
Add a simple test script that invokes the pipeline
So that we can make sure that all the pieces work together.
Modelled on the existing testPipeline code.
Thanks to Gautham (@gaukes)
|
import unittest
import json
import logging
import numpy as np
from get_database import get_db, get_mode_db, get_section_db
from recommender import pipeline
import sys
import os
logging.basicConfig(level=logging.DEBUG)
class TestPipeline(unittest.TestCase):
def setUp(self):
self.testUserEmails = ["test@example.com", "best@example.com", "fest@example.com",
"rest@example.com", "nest@example.com"]
self.serverName = 'localhost'
self.testUsers = []
for userEmail in self.testUserEmails:
User.register(userEmail)
self.testUsers += [User.fromEmail(section['user_id'])] # can access uuid with .uuid
# import data from tests/data/testModeInferFiles
self.pipeline = pipeline.ModeRecommendationPipeline()
self.testRecommendationPipeline()
# register each of the users and add sample trips to each user
def tearDown(self):
for testUser in self.testUsersEmails:
purge_database_json.purgeData('localhost', testUser)
def testTargetTripsPipeline(self):
self.testTargetTrips = [pipeline.getTargetTrips(testUser.uuid) for testUser in testUsers]
# series of assertions to check output
def testAugmentTripsPipeline(self):
# call functions in the augment_trips_pipeline file using trips from self.testTargetTrips
# assertions to check various characteristics of output
def testUtilityModelPipeline(self):
# call functions from utility_model_pipeline.py and using augmented trips from testAugmentTripsPipeline and self.testUsers
# assertions to check various characteristics of output
def testRecommendationPipeline(self):
self.testTripsToImprovePipeline();
self.testUtilityModelPipeline();
self.testRecommendationPipeline();
# assertions to check various characteristcs of pipeline overall
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a simple test script that invokes the pipeline
So that we can make sure that all the pieces work together.
Modelled on the existing testPipeline code.
Thanks to Gautham (@gaukes)<commit_after>
|
import unittest
import json
import logging
import numpy as np
from get_database import get_db, get_mode_db, get_section_db
from recommender import pipeline
import sys
import os
logging.basicConfig(level=logging.DEBUG)
class TestPipeline(unittest.TestCase):
def setUp(self):
self.testUserEmails = ["test@example.com", "best@example.com", "fest@example.com",
"rest@example.com", "nest@example.com"]
self.serverName = 'localhost'
self.testUsers = []
for userEmail in self.testUserEmails:
User.register(userEmail)
self.testUsers += [User.fromEmail(section['user_id'])] # can access uuid with .uuid
# import data from tests/data/testModeInferFiles
self.pipeline = pipeline.ModeRecommendationPipeline()
self.testRecommendationPipeline()
# register each of the users and add sample trips to each user
def tearDown(self):
for testUser in self.testUsersEmails:
purge_database_json.purgeData('localhost', testUser)
def testTargetTripsPipeline(self):
self.testTargetTrips = [pipeline.getTargetTrips(testUser.uuid) for testUser in testUsers]
# series of assertions to check output
def testAugmentTripsPipeline(self):
# call functions in the augment_trips_pipeline file using trips from self.testTargetTrips
# assertions to check various characteristics of output
def testUtilityModelPipeline(self):
# call functions from utility_model_pipeline.py and using augmented trips from testAugmentTripsPipeline and self.testUsers
# assertions to check various characteristics of output
def testRecommendationPipeline(self):
self.testTripsToImprovePipeline();
self.testUtilityModelPipeline();
self.testRecommendationPipeline();
# assertions to check various characteristcs of pipeline overall
if __name__ == '__main__':
unittest.main()
|
Add a simple test script that invokes the pipeline
So that we can make sure that all the pieces work together.
Modelled on the existing testPipeline code.
Thanks to Gautham (@gaukes)import unittest
import json
import logging
import numpy as np
from get_database import get_db, get_mode_db, get_section_db
from recommender import pipeline
import sys
import os
logging.basicConfig(level=logging.DEBUG)
class TestPipeline(unittest.TestCase):
def setUp(self):
self.testUserEmails = ["test@example.com", "best@example.com", "fest@example.com",
"rest@example.com", "nest@example.com"]
self.serverName = 'localhost'
self.testUsers = []
for userEmail in self.testUserEmails:
User.register(userEmail)
self.testUsers += [User.fromEmail(section['user_id'])] # can access uuid with .uuid
# import data from tests/data/testModeInferFiles
self.pipeline = pipeline.ModeRecommendationPipeline()
self.testRecommendationPipeline()
# register each of the users and add sample trips to each user
def tearDown(self):
for testUser in self.testUsersEmails:
purge_database_json.purgeData('localhost', testUser)
def testTargetTripsPipeline(self):
self.testTargetTrips = [pipeline.getTargetTrips(testUser.uuid) for testUser in testUsers]
# series of assertions to check output
def testAugmentTripsPipeline(self):
# call functions in the augment_trips_pipeline file using trips from self.testTargetTrips
# assertions to check various characteristics of output
def testUtilityModelPipeline(self):
# call functions from utility_model_pipeline.py and using augmented trips from testAugmentTripsPipeline and self.testUsers
# assertions to check various characteristics of output
def testRecommendationPipeline(self):
self.testTripsToImprovePipeline();
self.testUtilityModelPipeline();
self.testRecommendationPipeline();
# assertions to check various characteristcs of pipeline overall
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a simple test script that invokes the pipeline
So that we can make sure that all the pieces work together.
Modelled on the existing testPipeline code.
Thanks to Gautham (@gaukes)<commit_after>import unittest
import json
import logging
import numpy as np
from get_database import get_db, get_mode_db, get_section_db
from recommender import pipeline
import sys
import os
logging.basicConfig(level=logging.DEBUG)
class TestPipeline(unittest.TestCase):
def setUp(self):
self.testUserEmails = ["test@example.com", "best@example.com", "fest@example.com",
"rest@example.com", "nest@example.com"]
self.serverName = 'localhost'
self.testUsers = []
for userEmail in self.testUserEmails:
User.register(userEmail)
self.testUsers += [User.fromEmail(section['user_id'])] # can access uuid with .uuid
# import data from tests/data/testModeInferFiles
self.pipeline = pipeline.ModeRecommendationPipeline()
self.testRecommendationPipeline()
# register each of the users and add sample trips to each user
def tearDown(self):
for testUser in self.testUsersEmails:
purge_database_json.purgeData('localhost', testUser)
def testTargetTripsPipeline(self):
self.testTargetTrips = [pipeline.getTargetTrips(testUser.uuid) for testUser in testUsers]
# series of assertions to check output
def testAugmentTripsPipeline(self):
# call functions in the augment_trips_pipeline file using trips from self.testTargetTrips
# assertions to check various characteristics of output
def testUtilityModelPipeline(self):
# call functions from utility_model_pipeline.py and using augmented trips from testAugmentTripsPipeline and self.testUsers
# assertions to check various characteristics of output
def testRecommendationPipeline(self):
self.testTripsToImprovePipeline();
self.testUtilityModelPipeline();
self.testRecommendationPipeline();
# assertions to check various characteristcs of pipeline overall
if __name__ == '__main__':
unittest.main()
|
|
d0e8a202597d5c28d7dc6efc4762040c83072223
|
pombola/core/management/commands/core_find_stale_elasticsearch_documents.py
|
pombola/core/management/commands/core_find_stale_elasticsearch_documents.py
|
import sys
from django.core.management.base import BaseCommand, CommandError
from haystack import connections as haystack_connections
from haystack.exceptions import NotHandled
from haystack.query import SearchQuerySet
from haystack.utils.app_loading import get_models, load_apps
def get_all_indexed_models():
backends = haystack_connections.connections_info.keys()
available_models = {}
for backend_key in backends:
unified_index = haystack_connections[backend_key].get_unified_index()
for app in load_apps():
for model in get_models(app):
try:
unified_index.get_index(model)
except NotHandled:
continue
model_name = model.__module__ + '.' + model.__name__
available_models[model_name] = {
'backend_key': backend_key,
'app': app,
'model': model,
}
return available_models
def get_models_to_check(model_names, available_models):
models_to_check = []
if model_names:
missing_models = False
for model_name in model_names:
if model_name in available_models:
models_to_check.append(model_name)
else:
missing_models = True
print "There was no model {0} with a search index".format(model_name)
if missing_models:
print "Some models were not found; they must be one of:"
for model in sorted(available_models.keys()):
print " ", model
sys.exit(1)
else:
models_to_check = sorted(available_models.keys())
return models_to_check
class Command(BaseCommand):
args = 'MODEL ...'
help = 'Get all search results for the given models'
def handle(self, *args, **options):
available_models = get_all_indexed_models()
models_to_check = get_models_to_check(args, available_models)
# Now we know which models to check, do that:
for model_name in models_to_check:
model_details = available_models[model_name]
backend_key = model_details['backend_key']
model = model_details['model']
backend = haystack_connections[backend_key].get_backend()
unified_index = haystack_connections[backend_key].get_unified_index()
index = unified_index.get_index(model)
qs = index.build_queryset()
print "Checking {0} ({1} objects in the database)".format(
model_name, qs.count()
)
# Get all the primary keys from the database:
pks_in_database = set(
unicode(pk) for pk in qs.values_list('pk', flat=True)
)
# Then go through every search result for that
# model, and check that the primary key is one
# that's in the database:
for search_result in SearchQuerySet(using=backend.connection_alias).models(model):
if search_result.pk not in pks_in_database:
print " stale search entry for primary key", search_result.pk
|
Add a command to help find out-of-sync objects in Elasticsearch
|
Add a command to help find out-of-sync objects in Elasticsearch
This is to help trying to figure out the problems behind issue #1424.
|
Python
|
agpl-3.0
|
geoffkilpin/pombola,geoffkilpin/pombola,geoffkilpin/pombola,mysociety/pombola,geoffkilpin/pombola,mysociety/pombola,mysociety/pombola,geoffkilpin/pombola,mysociety/pombola,geoffkilpin/pombola,mysociety/pombola,mysociety/pombola
|
Add a command to help find out-of-sync objects in Elasticsearch
This is to help trying to figure out the problems behind issue #1424.
|
import sys
from django.core.management.base import BaseCommand, CommandError
from haystack import connections as haystack_connections
from haystack.exceptions import NotHandled
from haystack.query import SearchQuerySet
from haystack.utils.app_loading import get_models, load_apps
def get_all_indexed_models():
backends = haystack_connections.connections_info.keys()
available_models = {}
for backend_key in backends:
unified_index = haystack_connections[backend_key].get_unified_index()
for app in load_apps():
for model in get_models(app):
try:
unified_index.get_index(model)
except NotHandled:
continue
model_name = model.__module__ + '.' + model.__name__
available_models[model_name] = {
'backend_key': backend_key,
'app': app,
'model': model,
}
return available_models
def get_models_to_check(model_names, available_models):
models_to_check = []
if model_names:
missing_models = False
for model_name in model_names:
if model_name in available_models:
models_to_check.append(model_name)
else:
missing_models = True
print "There was no model {0} with a search index".format(model_name)
if missing_models:
print "Some models were not found; they must be one of:"
for model in sorted(available_models.keys()):
print " ", model
sys.exit(1)
else:
models_to_check = sorted(available_models.keys())
return models_to_check
class Command(BaseCommand):
args = 'MODEL ...'
help = 'Get all search results for the given models'
def handle(self, *args, **options):
available_models = get_all_indexed_models()
models_to_check = get_models_to_check(args, available_models)
# Now we know which models to check, do that:
for model_name in models_to_check:
model_details = available_models[model_name]
backend_key = model_details['backend_key']
model = model_details['model']
backend = haystack_connections[backend_key].get_backend()
unified_index = haystack_connections[backend_key].get_unified_index()
index = unified_index.get_index(model)
qs = index.build_queryset()
print "Checking {0} ({1} objects in the database)".format(
model_name, qs.count()
)
# Get all the primary keys from the database:
pks_in_database = set(
unicode(pk) for pk in qs.values_list('pk', flat=True)
)
# Then go through every search result for that
# model, and check that the primary key is one
# that's in the database:
for search_result in SearchQuerySet(using=backend.connection_alias).models(model):
if search_result.pk not in pks_in_database:
print " stale search entry for primary key", search_result.pk
|
<commit_before><commit_msg>Add a command to help find out-of-sync objects in Elasticsearch
This is to help trying to figure out the problems behind issue #1424.<commit_after>
|
import sys
from django.core.management.base import BaseCommand, CommandError
from haystack import connections as haystack_connections
from haystack.exceptions import NotHandled
from haystack.query import SearchQuerySet
from haystack.utils.app_loading import get_models, load_apps
def get_all_indexed_models():
backends = haystack_connections.connections_info.keys()
available_models = {}
for backend_key in backends:
unified_index = haystack_connections[backend_key].get_unified_index()
for app in load_apps():
for model in get_models(app):
try:
unified_index.get_index(model)
except NotHandled:
continue
model_name = model.__module__ + '.' + model.__name__
available_models[model_name] = {
'backend_key': backend_key,
'app': app,
'model': model,
}
return available_models
def get_models_to_check(model_names, available_models):
models_to_check = []
if model_names:
missing_models = False
for model_name in model_names:
if model_name in available_models:
models_to_check.append(model_name)
else:
missing_models = True
print "There was no model {0} with a search index".format(model_name)
if missing_models:
print "Some models were not found; they must be one of:"
for model in sorted(available_models.keys()):
print " ", model
sys.exit(1)
else:
models_to_check = sorted(available_models.keys())
return models_to_check
class Command(BaseCommand):
args = 'MODEL ...'
help = 'Get all search results for the given models'
def handle(self, *args, **options):
available_models = get_all_indexed_models()
models_to_check = get_models_to_check(args, available_models)
# Now we know which models to check, do that:
for model_name in models_to_check:
model_details = available_models[model_name]
backend_key = model_details['backend_key']
model = model_details['model']
backend = haystack_connections[backend_key].get_backend()
unified_index = haystack_connections[backend_key].get_unified_index()
index = unified_index.get_index(model)
qs = index.build_queryset()
print "Checking {0} ({1} objects in the database)".format(
model_name, qs.count()
)
# Get all the primary keys from the database:
pks_in_database = set(
unicode(pk) for pk in qs.values_list('pk', flat=True)
)
# Then go through every search result for that
# model, and check that the primary key is one
# that's in the database:
for search_result in SearchQuerySet(using=backend.connection_alias).models(model):
if search_result.pk not in pks_in_database:
print " stale search entry for primary key", search_result.pk
|
Add a command to help find out-of-sync objects in Elasticsearch
This is to help trying to figure out the problems behind issue #1424.import sys
from django.core.management.base import BaseCommand, CommandError
from haystack import connections as haystack_connections
from haystack.exceptions import NotHandled
from haystack.query import SearchQuerySet
from haystack.utils.app_loading import get_models, load_apps
def get_all_indexed_models():
backends = haystack_connections.connections_info.keys()
available_models = {}
for backend_key in backends:
unified_index = haystack_connections[backend_key].get_unified_index()
for app in load_apps():
for model in get_models(app):
try:
unified_index.get_index(model)
except NotHandled:
continue
model_name = model.__module__ + '.' + model.__name__
available_models[model_name] = {
'backend_key': backend_key,
'app': app,
'model': model,
}
return available_models
def get_models_to_check(model_names, available_models):
models_to_check = []
if model_names:
missing_models = False
for model_name in model_names:
if model_name in available_models:
models_to_check.append(model_name)
else:
missing_models = True
print "There was no model {0} with a search index".format(model_name)
if missing_models:
print "Some models were not found; they must be one of:"
for model in sorted(available_models.keys()):
print " ", model
sys.exit(1)
else:
models_to_check = sorted(available_models.keys())
return models_to_check
class Command(BaseCommand):
args = 'MODEL ...'
help = 'Get all search results for the given models'
def handle(self, *args, **options):
available_models = get_all_indexed_models()
models_to_check = get_models_to_check(args, available_models)
# Now we know which models to check, do that:
for model_name in models_to_check:
model_details = available_models[model_name]
backend_key = model_details['backend_key']
model = model_details['model']
backend = haystack_connections[backend_key].get_backend()
unified_index = haystack_connections[backend_key].get_unified_index()
index = unified_index.get_index(model)
qs = index.build_queryset()
print "Checking {0} ({1} objects in the database)".format(
model_name, qs.count()
)
# Get all the primary keys from the database:
pks_in_database = set(
unicode(pk) for pk in qs.values_list('pk', flat=True)
)
# Then go through every search result for that
# model, and check that the primary key is one
# that's in the database:
for search_result in SearchQuerySet(using=backend.connection_alias).models(model):
if search_result.pk not in pks_in_database:
print " stale search entry for primary key", search_result.pk
|
<commit_before><commit_msg>Add a command to help find out-of-sync objects in Elasticsearch
This is to help trying to figure out the problems behind issue #1424.<commit_after>import sys
from django.core.management.base import BaseCommand, CommandError
from haystack import connections as haystack_connections
from haystack.exceptions import NotHandled
from haystack.query import SearchQuerySet
from haystack.utils.app_loading import get_models, load_apps
def get_all_indexed_models():
backends = haystack_connections.connections_info.keys()
available_models = {}
for backend_key in backends:
unified_index = haystack_connections[backend_key].get_unified_index()
for app in load_apps():
for model in get_models(app):
try:
unified_index.get_index(model)
except NotHandled:
continue
model_name = model.__module__ + '.' + model.__name__
available_models[model_name] = {
'backend_key': backend_key,
'app': app,
'model': model,
}
return available_models
def get_models_to_check(model_names, available_models):
models_to_check = []
if model_names:
missing_models = False
for model_name in model_names:
if model_name in available_models:
models_to_check.append(model_name)
else:
missing_models = True
print "There was no model {0} with a search index".format(model_name)
if missing_models:
print "Some models were not found; they must be one of:"
for model in sorted(available_models.keys()):
print " ", model
sys.exit(1)
else:
models_to_check = sorted(available_models.keys())
return models_to_check
class Command(BaseCommand):
args = 'MODEL ...'
help = 'Get all search results for the given models'
def handle(self, *args, **options):
available_models = get_all_indexed_models()
models_to_check = get_models_to_check(args, available_models)
# Now we know which models to check, do that:
for model_name in models_to_check:
model_details = available_models[model_name]
backend_key = model_details['backend_key']
model = model_details['model']
backend = haystack_connections[backend_key].get_backend()
unified_index = haystack_connections[backend_key].get_unified_index()
index = unified_index.get_index(model)
qs = index.build_queryset()
print "Checking {0} ({1} objects in the database)".format(
model_name, qs.count()
)
# Get all the primary keys from the database:
pks_in_database = set(
unicode(pk) for pk in qs.values_list('pk', flat=True)
)
# Then go through every search result for that
# model, and check that the primary key is one
# that's in the database:
for search_result in SearchQuerySet(using=backend.connection_alias).models(model):
if search_result.pk not in pks_in_database:
print " stale search entry for primary key", search_result.pk
|
|
3d603d4d86f1822cf50eaee1818bfee7e5a2fe17
|
recipe-server/normandy/recipes/migrations/0042_remove_invalid_signatures.py
|
recipe-server/normandy/recipes/migrations/0042_remove_invalid_signatures.py
|
"""
Removes signatures, so they can be easily recreated during deployment.
This migration is intended to be used between "eras" of signatures. As
the serialization format of recipes changes, the signatures need to
also change. This could be handled automatically, but it is easier to
deploy if we just remove everything in a migration, and allow the
normal processes to regenerate the signatures.
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def remove_signatures(apps, schema_editor):
Recipe = apps.get_model('recipes', 'Recipe')
Signature = apps.get_model('recipes', 'Signature')
for recipe in Recipe.objects.exclude(signature=None):
sig = recipe.signature
recipe.signature = None
recipe.save()
sig.delete()
for sig in Signature.objects.all():
sig.delete()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0041_remove_invalid_signatures'),
]
operations = [
# This function as both a forward and reverse migration
migrations.RunPython(remove_signatures, remove_signatures),
]
|
Add migration to reset signatures
|
recipe-server: Add migration to reset signatures
This is because the change to send only required fields in the
/api/v1/recipe/signed/ in f816fbcfc63e4ce56896c0d9e09ff82930f191b9.
|
Python
|
mpl-2.0
|
mozilla/normandy,Osmose/normandy,Osmose/normandy,mozilla/normandy,mozilla/normandy,Osmose/normandy,Osmose/normandy,mozilla/normandy
|
recipe-server: Add migration to reset signatures
This is because the change to send only required fields in the
/api/v1/recipe/signed/ in f816fbcfc63e4ce56896c0d9e09ff82930f191b9.
|
"""
Removes signatures, so they can be easily recreated during deployment.
This migration is intended to be used between "eras" of signatures. As
the serialization format of recipes changes, the signatures need to
also change. This could be handled automatically, but it is easier to
deploy if we just remove everything in a migration, and allow the
normal processes to regenerate the signatures.
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def remove_signatures(apps, schema_editor):
Recipe = apps.get_model('recipes', 'Recipe')
Signature = apps.get_model('recipes', 'Signature')
for recipe in Recipe.objects.exclude(signature=None):
sig = recipe.signature
recipe.signature = None
recipe.save()
sig.delete()
for sig in Signature.objects.all():
sig.delete()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0041_remove_invalid_signatures'),
]
operations = [
# This function as both a forward and reverse migration
migrations.RunPython(remove_signatures, remove_signatures),
]
|
<commit_before><commit_msg>recipe-server: Add migration to reset signatures
This is because the change to send only required fields in the
/api/v1/recipe/signed/ in f816fbcfc63e4ce56896c0d9e09ff82930f191b9.<commit_after>
|
"""
Removes signatures, so they can be easily recreated during deployment.
This migration is intended to be used between "eras" of signatures. As
the serialization format of recipes changes, the signatures need to
also change. This could be handled automatically, but it is easier to
deploy if we just remove everything in a migration, and allow the
normal processes to regenerate the signatures.
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def remove_signatures(apps, schema_editor):
Recipe = apps.get_model('recipes', 'Recipe')
Signature = apps.get_model('recipes', 'Signature')
for recipe in Recipe.objects.exclude(signature=None):
sig = recipe.signature
recipe.signature = None
recipe.save()
sig.delete()
for sig in Signature.objects.all():
sig.delete()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0041_remove_invalid_signatures'),
]
operations = [
# This function as both a forward and reverse migration
migrations.RunPython(remove_signatures, remove_signatures),
]
|
recipe-server: Add migration to reset signatures
This is because the change to send only required fields in the
/api/v1/recipe/signed/ in f816fbcfc63e4ce56896c0d9e09ff82930f191b9."""
Removes signatures, so they can be easily recreated during deployment.
This migration is intended to be used between "eras" of signatures. As
the serialization format of recipes changes, the signatures need to
also change. This could be handled automatically, but it is easier to
deploy if we just remove everything in a migration, and allow the
normal processes to regenerate the signatures.
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def remove_signatures(apps, schema_editor):
Recipe = apps.get_model('recipes', 'Recipe')
Signature = apps.get_model('recipes', 'Signature')
for recipe in Recipe.objects.exclude(signature=None):
sig = recipe.signature
recipe.signature = None
recipe.save()
sig.delete()
for sig in Signature.objects.all():
sig.delete()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0041_remove_invalid_signatures'),
]
operations = [
# This function as both a forward and reverse migration
migrations.RunPython(remove_signatures, remove_signatures),
]
|
<commit_before><commit_msg>recipe-server: Add migration to reset signatures
This is because the change to send only required fields in the
/api/v1/recipe/signed/ in f816fbcfc63e4ce56896c0d9e09ff82930f191b9.<commit_after>"""
Removes signatures, so they can be easily recreated during deployment.
This migration is intended to be used between "eras" of signatures. As
the serialization format of recipes changes, the signatures need to
also change. This could be handled automatically, but it is easier to
deploy if we just remove everything in a migration, and allow the
normal processes to regenerate the signatures.
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def remove_signatures(apps, schema_editor):
Recipe = apps.get_model('recipes', 'Recipe')
Signature = apps.get_model('recipes', 'Signature')
for recipe in Recipe.objects.exclude(signature=None):
sig = recipe.signature
recipe.signature = None
recipe.save()
sig.delete()
for sig in Signature.objects.all():
sig.delete()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0041_remove_invalid_signatures'),
]
operations = [
# This function as both a forward and reverse migration
migrations.RunPython(remove_signatures, remove_signatures),
]
|
|
0ff4afacf49e75eb9a299c00b5e03ad3da91265c
|
django_openid_provider/signals.py
|
django_openid_provider/signals.py
|
from registration.signals import user_registered
from django.contrib.auth.models import User
def create_openid(sender, user, **kwargs):
user.openid_set.create(openid = user.username, default = True)
user_registered.connect(create_openid, dispatch_uid="authentic.openid_provider")
|
Create an openid url for the new user register
|
[openid-provider] Create an openid url for the new user register
Create an openid url with username of the new user automatically when he
registered.
|
Python
|
agpl-3.0
|
BryceLohr/authentic,incuna/authentic,pu239ppy/authentic2,adieu/authentic2,incuna/authentic,BryceLohr/authentic,adieu/authentic2,pu239ppy/authentic2,BryceLohr/authentic,incuna/authentic,incuna/authentic,pu239ppy/authentic2,pu239ppy/authentic2,adieu/authentic2,incuna/authentic,BryceLohr/authentic,adieu/authentic2
|
[openid-provider] Create an openid url for the new user register
Create an openid url with username of the new user automatically when he
registered.
|
from registration.signals import user_registered
from django.contrib.auth.models import User
def create_openid(sender, user, **kwargs):
user.openid_set.create(openid = user.username, default = True)
user_registered.connect(create_openid, dispatch_uid="authentic.openid_provider")
|
<commit_before><commit_msg>[openid-provider] Create an openid url for the new user register
Create an openid url with username of the new user automatically when he
registered.<commit_after>
|
from registration.signals import user_registered
from django.contrib.auth.models import User
def create_openid(sender, user, **kwargs):
user.openid_set.create(openid = user.username, default = True)
user_registered.connect(create_openid, dispatch_uid="authentic.openid_provider")
|
[openid-provider] Create an openid url for the new user register
Create an openid url with username of the new user automatically when he
registered.from registration.signals import user_registered
from django.contrib.auth.models import User
def create_openid(sender, user, **kwargs):
user.openid_set.create(openid = user.username, default = True)
user_registered.connect(create_openid, dispatch_uid="authentic.openid_provider")
|
<commit_before><commit_msg>[openid-provider] Create an openid url for the new user register
Create an openid url with username of the new user automatically when he
registered.<commit_after>from registration.signals import user_registered
from django.contrib.auth.models import User
def create_openid(sender, user, **kwargs):
user.openid_set.create(openid = user.username, default = True)
user_registered.connect(create_openid, dispatch_uid="authentic.openid_provider")
|
|
337e66b111f012f5faac8abc60256e1cf7513847
|
mezzanine_sermons/migrations/0002_auto_20150606_1059.py
|
mezzanine_sermons/migrations/0002_auto_20150606_1059.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mezzanine_sermons', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='sermon',
name='preacher',
field=models.CharField(blank=True, max_length=100),
),
]
|
Add migration to allow sermon_preacher to be blank
|
Add migration to allow sermon_preacher to be blank
|
Python
|
bsd-2-clause
|
philipsouthwell/mezzanine-sermons,philipsouthwell/mezzanine-sermons
|
Add migration to allow sermon_preacher to be blank
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mezzanine_sermons', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='sermon',
name='preacher',
field=models.CharField(blank=True, max_length=100),
),
]
|
<commit_before><commit_msg>Add migration to allow sermon_preacher to be blank<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mezzanine_sermons', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='sermon',
name='preacher',
field=models.CharField(blank=True, max_length=100),
),
]
|
Add migration to allow sermon_preacher to be blank# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mezzanine_sermons', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='sermon',
name='preacher',
field=models.CharField(blank=True, max_length=100),
),
]
|
<commit_before><commit_msg>Add migration to allow sermon_preacher to be blank<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mezzanine_sermons', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='sermon',
name='preacher',
field=models.CharField(blank=True, max_length=100),
),
]
|
|
a1deee1f63aa2271bc40731dfceb7e7852a9c38a
|
experimental/sngp/__init__.py
|
experimental/sngp/__init__.py
|
# coding=utf-8
# Copyright 2020 The Edward2 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Spectral-normalized neural Gaussian process (SNGP)."""
# pylint: disable=wildcard-import
from experimental.sngp.gaussian_process import * # local file import
from experimental.sngp.normalization import * # local file import
# pylint: enable=wildcard-import
|
Add package-level import for SNGP.
|
Add package-level import for SNGP.
PiperOrigin-RevId: 322276743
|
Python
|
apache-2.0
|
google/edward2
|
Add package-level import for SNGP.
PiperOrigin-RevId: 322276743
|
# coding=utf-8
# Copyright 2020 The Edward2 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Spectral-normalized neural Gaussian process (SNGP)."""
# pylint: disable=wildcard-import
from experimental.sngp.gaussian_process import * # local file import
from experimental.sngp.normalization import * # local file import
# pylint: enable=wildcard-import
|
<commit_before><commit_msg>Add package-level import for SNGP.
PiperOrigin-RevId: 322276743<commit_after>
|
# coding=utf-8
# Copyright 2020 The Edward2 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Spectral-normalized neural Gaussian process (SNGP)."""
# pylint: disable=wildcard-import
from experimental.sngp.gaussian_process import * # local file import
from experimental.sngp.normalization import * # local file import
# pylint: enable=wildcard-import
|
Add package-level import for SNGP.
PiperOrigin-RevId: 322276743# coding=utf-8
# Copyright 2020 The Edward2 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Spectral-normalized neural Gaussian process (SNGP)."""
# pylint: disable=wildcard-import
from experimental.sngp.gaussian_process import * # local file import
from experimental.sngp.normalization import * # local file import
# pylint: enable=wildcard-import
|
<commit_before><commit_msg>Add package-level import for SNGP.
PiperOrigin-RevId: 322276743<commit_after># coding=utf-8
# Copyright 2020 The Edward2 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Spectral-normalized neural Gaussian process (SNGP)."""
# pylint: disable=wildcard-import
from experimental.sngp.gaussian_process import * # local file import
from experimental.sngp.normalization import * # local file import
# pylint: enable=wildcard-import
|
|
80be0b153aa082fd22d365dc16f6c48b800165cd
|
pymatgen/symmetry/tests/test_spacegroup.py
|
pymatgen/symmetry/tests/test_spacegroup.py
|
#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
Add a unittest for spacegroup. Still very basic.
|
Add a unittest for spacegroup. Still very basic.
Former-commit-id: 515d29a50e2b0abf9889329545a6218ec8dbb707 [formerly ad5f851b7959f7bf09d7cd669d8db126fa962982]
Former-commit-id: 51cd323da532e54c94f8d427ed631c11c9ecfae5
|
Python
|
mit
|
tallakahath/pymatgen,gpetretto/pymatgen,nisse3000/pymatgen,gpetretto/pymatgen,gpetretto/pymatgen,vorwerkc/pymatgen,Bismarrck/pymatgen,blondegeek/pymatgen,xhqu1981/pymatgen,ndardenne/pymatgen,blondegeek/pymatgen,tallakahath/pymatgen,gVallverdu/pymatgen,montoyjh/pymatgen,tschaume/pymatgen,fraricci/pymatgen,dongsenfo/pymatgen,dongsenfo/pymatgen,Bismarrck/pymatgen,setten/pymatgen,richardtran415/pymatgen,fraricci/pymatgen,gVallverdu/pymatgen,gVallverdu/pymatgen,tallakahath/pymatgen,czhengsci/pymatgen,nisse3000/pymatgen,xhqu1981/pymatgen,dongsenfo/pymatgen,Bismarrck/pymatgen,ndardenne/pymatgen,richardtran415/pymatgen,xhqu1981/pymatgen,davidwaroquiers/pymatgen,aykol/pymatgen,matk86/pymatgen,nisse3000/pymatgen,mbkumar/pymatgen,tschaume/pymatgen,matk86/pymatgen,matk86/pymatgen,czhengsci/pymatgen,richardtran415/pymatgen,johnson1228/pymatgen,davidwaroquiers/pymatgen,vorwerkc/pymatgen,johnson1228/pymatgen,montoyjh/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,mbkumar/pymatgen,blondegeek/pymatgen,czhengsci/pymatgen,davidwaroquiers/pymatgen,johnson1228/pymatgen,ndardenne/pymatgen,fraricci/pymatgen,blondegeek/pymatgen,vorwerkc/pymatgen,montoyjh/pymatgen,tschaume/pymatgen,setten/pymatgen,matk86/pymatgen,richardtran415/pymatgen,davidwaroquiers/pymatgen,montoyjh/pymatgen,dongsenfo/pymatgen,gVallverdu/pymatgen,aykol/pymatgen,mbkumar/pymatgen,setten/pymatgen,gmatteo/pymatgen,Bismarrck/pymatgen,setten/pymatgen,czhengsci/pymatgen,nisse3000/pymatgen,tschaume/pymatgen,mbkumar/pymatgen,johnson1228/pymatgen,gmatteo/pymatgen,aykol/pymatgen,tschaume/pymatgen,Bismarrck/pymatgen,gpetretto/pymatgen
|
Add a unittest for spacegroup. Still very basic.
Former-commit-id: 515d29a50e2b0abf9889329545a6218ec8dbb707 [formerly ad5f851b7959f7bf09d7cd669d8db126fa962982]
Former-commit-id: 51cd323da532e54c94f8d427ed631c11c9ecfae5
|
#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
<commit_before><commit_msg>Add a unittest for spacegroup. Still very basic.
Former-commit-id: 515d29a50e2b0abf9889329545a6218ec8dbb707 [formerly ad5f851b7959f7bf09d7cd669d8db126fa962982]
Former-commit-id: 51cd323da532e54c94f8d427ed631c11c9ecfae5<commit_after>
|
#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
Add a unittest for spacegroup. Still very basic.
Former-commit-id: 515d29a50e2b0abf9889329545a6218ec8dbb707 [formerly ad5f851b7959f7bf09d7cd669d8db126fa962982]
Former-commit-id: 51cd323da532e54c94f8d427ed631c11c9ecfae5#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
<commit_before><commit_msg>Add a unittest for spacegroup. Still very basic.
Former-commit-id: 515d29a50e2b0abf9889329545a6218ec8dbb707 [formerly ad5f851b7959f7bf09d7cd669d8db126fa962982]
Former-commit-id: 51cd323da532e54c94f8d427ed631c11c9ecfae5<commit_after>#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
|
870de29a70255027a97053e089149cc98150c021
|
basic-model-import/src/main/python/basic-model.py
|
basic-model-import/src/main/python/basic-model.py
|
# -*- coding: utf-8 -*-
import tensorflow as tf
x = tf.placeholder("double")
y = tf.placeholder("double")
z = tf.mul(x, y)
with tf.Session() as sess:
tf.train.write_graph(sess.graph_def, '/tmp/my-model', 'basic.pb', as_text=False)
|
Add (python) basic model itself
|
Add (python) basic model itself
|
Python
|
mit
|
vjuranek/tensorflow-snippets,vjuranek/tensorflow-snippets
|
Add (python) basic model itself
|
# -*- coding: utf-8 -*-
import tensorflow as tf
x = tf.placeholder("double")
y = tf.placeholder("double")
z = tf.mul(x, y)
with tf.Session() as sess:
tf.train.write_graph(sess.graph_def, '/tmp/my-model', 'basic.pb', as_text=False)
|
<commit_before><commit_msg>Add (python) basic model itself<commit_after>
|
# -*- coding: utf-8 -*-
import tensorflow as tf
x = tf.placeholder("double")
y = tf.placeholder("double")
z = tf.mul(x, y)
with tf.Session() as sess:
tf.train.write_graph(sess.graph_def, '/tmp/my-model', 'basic.pb', as_text=False)
|
Add (python) basic model itself# -*- coding: utf-8 -*-
import tensorflow as tf
x = tf.placeholder("double")
y = tf.placeholder("double")
z = tf.mul(x, y)
with tf.Session() as sess:
tf.train.write_graph(sess.graph_def, '/tmp/my-model', 'basic.pb', as_text=False)
|
<commit_before><commit_msg>Add (python) basic model itself<commit_after># -*- coding: utf-8 -*-
import tensorflow as tf
x = tf.placeholder("double")
y = tf.placeholder("double")
z = tf.mul(x, y)
with tf.Session() as sess:
tf.train.write_graph(sess.graph_def, '/tmp/my-model', 'basic.pb', as_text=False)
|
|
5d8541f247275e781d6740caa5f24e5b5395dfc9
|
metafunctions/tests/test_imports.py
|
metafunctions/tests/test_imports.py
|
import unittest
import random
import importlib
class TestUnit(unittest.TestCase):
def test_api_imports(self):
expected_names = ['node', 'bind_call_state', 'star', 'store', 'recall', 'concurrent',
'mmap', 'locate_error']
random.shuffle(expected_names)
for name in expected_names:
exec(f'from metafunctions import {name}')
|
Add simple test that api functions are importable
|
Add simple test that api functions are importable
|
Python
|
mit
|
ForeverWintr/metafunctions
|
Add simple test that api functions are importable
|
import unittest
import random
import importlib
class TestUnit(unittest.TestCase):
def test_api_imports(self):
expected_names = ['node', 'bind_call_state', 'star', 'store', 'recall', 'concurrent',
'mmap', 'locate_error']
random.shuffle(expected_names)
for name in expected_names:
exec(f'from metafunctions import {name}')
|
<commit_before><commit_msg>Add simple test that api functions are importable<commit_after>
|
import unittest
import random
import importlib
class TestUnit(unittest.TestCase):
def test_api_imports(self):
expected_names = ['node', 'bind_call_state', 'star', 'store', 'recall', 'concurrent',
'mmap', 'locate_error']
random.shuffle(expected_names)
for name in expected_names:
exec(f'from metafunctions import {name}')
|
Add simple test that api functions are importableimport unittest
import random
import importlib
class TestUnit(unittest.TestCase):
def test_api_imports(self):
expected_names = ['node', 'bind_call_state', 'star', 'store', 'recall', 'concurrent',
'mmap', 'locate_error']
random.shuffle(expected_names)
for name in expected_names:
exec(f'from metafunctions import {name}')
|
<commit_before><commit_msg>Add simple test that api functions are importable<commit_after>import unittest
import random
import importlib
class TestUnit(unittest.TestCase):
def test_api_imports(self):
expected_names = ['node', 'bind_call_state', 'star', 'store', 'recall', 'concurrent',
'mmap', 'locate_error']
random.shuffle(expected_names)
for name in expected_names:
exec(f'from metafunctions import {name}')
|
|
fdcb0da2188e22341170d939a1d92b70189ff8df
|
migrations/versions/0114_another_letter_org.py
|
migrations/versions/0114_another_letter_org.py
|
"""empty message
Revision ID: 0114_another_letter_org
Revises: 0113_job_created_by_nullable
Create Date: 2017-06-29 12:44:16.815039
"""
# revision identifiers, used by Alembic.
revision = '0114_another_letter_org'
down_revision = '0113_job_created_by_nullable'
from alembic import op
def upgrade():
op.execute("""
INSERT INTO dvla_organisation VALUES
('005', 'Companies House')
""")
def downgrade():
# data migration, no downloads
pass
|
Add letter organisation for Companies House
|
Add letter organisation for Companies House
Depends on:
- [ ] https://github.com/alphagov/notifications-template-preview/pull/37
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Add letter organisation for Companies House
Depends on:
- [ ] https://github.com/alphagov/notifications-template-preview/pull/37
|
"""empty message
Revision ID: 0114_another_letter_org
Revises: 0113_job_created_by_nullable
Create Date: 2017-06-29 12:44:16.815039
"""
# revision identifiers, used by Alembic.
revision = '0114_another_letter_org'
down_revision = '0113_job_created_by_nullable'
from alembic import op
def upgrade():
op.execute("""
INSERT INTO dvla_organisation VALUES
('005', 'Companies House')
""")
def downgrade():
# data migration, no downloads
pass
|
<commit_before><commit_msg>Add letter organisation for Companies House
Depends on:
- [ ] https://github.com/alphagov/notifications-template-preview/pull/37<commit_after>
|
"""empty message
Revision ID: 0114_another_letter_org
Revises: 0113_job_created_by_nullable
Create Date: 2017-06-29 12:44:16.815039
"""
# revision identifiers, used by Alembic.
revision = '0114_another_letter_org'
down_revision = '0113_job_created_by_nullable'
from alembic import op
def upgrade():
op.execute("""
INSERT INTO dvla_organisation VALUES
('005', 'Companies House')
""")
def downgrade():
# data migration, no downloads
pass
|
Add letter organisation for Companies House
Depends on:
- [ ] https://github.com/alphagov/notifications-template-preview/pull/37"""empty message
Revision ID: 0114_another_letter_org
Revises: 0113_job_created_by_nullable
Create Date: 2017-06-29 12:44:16.815039
"""
# revision identifiers, used by Alembic.
revision = '0114_another_letter_org'
down_revision = '0113_job_created_by_nullable'
from alembic import op
def upgrade():
op.execute("""
INSERT INTO dvla_organisation VALUES
('005', 'Companies House')
""")
def downgrade():
# data migration, no downloads
pass
|
<commit_before><commit_msg>Add letter organisation for Companies House
Depends on:
- [ ] https://github.com/alphagov/notifications-template-preview/pull/37<commit_after>"""empty message
Revision ID: 0114_another_letter_org
Revises: 0113_job_created_by_nullable
Create Date: 2017-06-29 12:44:16.815039
"""
# revision identifiers, used by Alembic.
revision = '0114_another_letter_org'
down_revision = '0113_job_created_by_nullable'
from alembic import op
def upgrade():
op.execute("""
INSERT INTO dvla_organisation VALUES
('005', 'Companies House')
""")
def downgrade():
# data migration, no downloads
pass
|
|
2e6d9f5c0d55e6fa8740679e643c2df14ac82134
|
python/pygtk/python_gtk3_pygobject/container_paned.py
|
python/pygtk/python_gtk3_pygobject/container_paned.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
This is the simplest Python GTK+3 Paned snippet.
See: https://lazka.github.io/pgi-docs/Gtk-3.0/classes/Paned.html
http://learngtk.org/tutorials/python_gtk3_tutorial/html/paned.html
"""
from gi.repository import Gtk as gtk
def main():
window = gtk.Window()
paned = gtk.Paned(orientation=gtk.Orientation.VERTICAL) # gtk.Orientation.HORIZONTAL or gtk.Orientation.VERTICAL
paned.set_position(30) # Sets the position in pixels of the divider between the two panes (the divider is at n pixels from the top).
window.add(paned)
button1 = gtk.Button(label="Btn 1")
paned.add1(button1)
#paned.add1(button1, resize=True, shrink=True)
button2 = gtk.Button(label="Btn 2")
paned.add2(button2)
#paned.add2(button2, resize=True, shrink=True)
window.connect("delete-event", gtk.main_quit) # ask to quit the application when the close button is clicked
window.show_all() # display the window
gtk.main() # GTK+ main loop
if __name__ == '__main__':
main()
|
Add a snippet (Python GTK+3).
|
Add a snippet (Python GTK+3).
|
Python
|
mit
|
jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets
|
Add a snippet (Python GTK+3).
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
This is the simplest Python GTK+3 Paned snippet.
See: https://lazka.github.io/pgi-docs/Gtk-3.0/classes/Paned.html
http://learngtk.org/tutorials/python_gtk3_tutorial/html/paned.html
"""
from gi.repository import Gtk as gtk
def main():
window = gtk.Window()
paned = gtk.Paned(orientation=gtk.Orientation.VERTICAL) # gtk.Orientation.HORIZONTAL or gtk.Orientation.VERTICAL
paned.set_position(30) # Sets the position in pixels of the divider between the two panes (the divider is at n pixels from the top).
window.add(paned)
button1 = gtk.Button(label="Btn 1")
paned.add1(button1)
#paned.add1(button1, resize=True, shrink=True)
button2 = gtk.Button(label="Btn 2")
paned.add2(button2)
#paned.add2(button2, resize=True, shrink=True)
window.connect("delete-event", gtk.main_quit) # ask to quit the application when the close button is clicked
window.show_all() # display the window
gtk.main() # GTK+ main loop
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a snippet (Python GTK+3).<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
This is the simplest Python GTK+3 Paned snippet.
See: https://lazka.github.io/pgi-docs/Gtk-3.0/classes/Paned.html
http://learngtk.org/tutorials/python_gtk3_tutorial/html/paned.html
"""
from gi.repository import Gtk as gtk
def main():
window = gtk.Window()
paned = gtk.Paned(orientation=gtk.Orientation.VERTICAL) # gtk.Orientation.HORIZONTAL or gtk.Orientation.VERTICAL
paned.set_position(30) # Sets the position in pixels of the divider between the two panes (the divider is at n pixels from the top).
window.add(paned)
button1 = gtk.Button(label="Btn 1")
paned.add1(button1)
#paned.add1(button1, resize=True, shrink=True)
button2 = gtk.Button(label="Btn 2")
paned.add2(button2)
#paned.add2(button2, resize=True, shrink=True)
window.connect("delete-event", gtk.main_quit) # ask to quit the application when the close button is clicked
window.show_all() # display the window
gtk.main() # GTK+ main loop
if __name__ == '__main__':
main()
|
Add a snippet (Python GTK+3).#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
This is the simplest Python GTK+3 Paned snippet.
See: https://lazka.github.io/pgi-docs/Gtk-3.0/classes/Paned.html
http://learngtk.org/tutorials/python_gtk3_tutorial/html/paned.html
"""
from gi.repository import Gtk as gtk
def main():
window = gtk.Window()
paned = gtk.Paned(orientation=gtk.Orientation.VERTICAL) # gtk.Orientation.HORIZONTAL or gtk.Orientation.VERTICAL
paned.set_position(30) # Sets the position in pixels of the divider between the two panes (the divider is at n pixels from the top).
window.add(paned)
button1 = gtk.Button(label="Btn 1")
paned.add1(button1)
#paned.add1(button1, resize=True, shrink=True)
button2 = gtk.Button(label="Btn 2")
paned.add2(button2)
#paned.add2(button2, resize=True, shrink=True)
window.connect("delete-event", gtk.main_quit) # ask to quit the application when the close button is clicked
window.show_all() # display the window
gtk.main() # GTK+ main loop
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a snippet (Python GTK+3).<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
This is the simplest Python GTK+3 Paned snippet.
See: https://lazka.github.io/pgi-docs/Gtk-3.0/classes/Paned.html
http://learngtk.org/tutorials/python_gtk3_tutorial/html/paned.html
"""
from gi.repository import Gtk as gtk
def main():
window = gtk.Window()
paned = gtk.Paned(orientation=gtk.Orientation.VERTICAL) # gtk.Orientation.HORIZONTAL or gtk.Orientation.VERTICAL
paned.set_position(30) # Sets the position in pixels of the divider between the two panes (the divider is at n pixels from the top).
window.add(paned)
button1 = gtk.Button(label="Btn 1")
paned.add1(button1)
#paned.add1(button1, resize=True, shrink=True)
button2 = gtk.Button(label="Btn 2")
paned.add2(button2)
#paned.add2(button2, resize=True, shrink=True)
window.connect("delete-event", gtk.main_quit) # ask to quit the application when the close button is clicked
window.show_all() # display the window
gtk.main() # GTK+ main loop
if __name__ == '__main__':
main()
|
|
22a318b760d8332be8526fb755d2cf1f154874fc
|
programming/python/installed_packages.py
|
programming/python/installed_packages.py
|
# from http://stackoverflow.com/a/23885252
import pip
installed_packages = pip.get_installed_distributions()
installed_packages_list = sorted(["%s==%s" % (i.key, i.version)
for i in installed_packages])
for package in installed_packages_list:
print package
|
Add script to show what python modules are installed
|
Add script to show what python modules are installed
|
Python
|
mit
|
claremacrae/raspi_code,claremacrae/raspi_code,claremacrae/raspi_code
|
Add script to show what python modules are installed
|
# from http://stackoverflow.com/a/23885252
import pip
installed_packages = pip.get_installed_distributions()
installed_packages_list = sorted(["%s==%s" % (i.key, i.version)
for i in installed_packages])
for package in installed_packages_list:
print package
|
<commit_before><commit_msg>Add script to show what python modules are installed<commit_after>
|
# from http://stackoverflow.com/a/23885252
import pip
installed_packages = pip.get_installed_distributions()
installed_packages_list = sorted(["%s==%s" % (i.key, i.version)
for i in installed_packages])
for package in installed_packages_list:
print package
|
Add script to show what python modules are installed# from http://stackoverflow.com/a/23885252
import pip
installed_packages = pip.get_installed_distributions()
installed_packages_list = sorted(["%s==%s" % (i.key, i.version)
for i in installed_packages])
for package in installed_packages_list:
print package
|
<commit_before><commit_msg>Add script to show what python modules are installed<commit_after># from http://stackoverflow.com/a/23885252
import pip
installed_packages = pip.get_installed_distributions()
installed_packages_list = sorted(["%s==%s" % (i.key, i.version)
for i in installed_packages])
for package in installed_packages_list:
print package
|
|
8d34420d595e7b58220004ecf521cc8cfbc224f1
|
selenium_tests/test_share_application.py
|
selenium_tests/test_share_application.py
|
# -*- coding: utf-8 -*-
from selenium_tests.UserDriverTest import UserDriverTest
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
class TestShareApplication(UserDriverTest):
def test_share_modal(self):
with self.running_container():
self.open_application_settings()
self.click_first_element_located(By.ID, "share-button")
self.click_modal_footer_button("Close")
def test_share_button(self):
with self.running_container():
self.open_application_settings()
self.click_first_element_located(By.ID, "share-button")
self.click_first_element_located(By.ID, "cp-clipboard-button")
# Now the share url should be in the clipboard
input_element = self.driver.find_element_by_id("shared-url")
# Clear the input element and paste what is in the clipboard in
# order to retrieve it (lacking better way to retrieve the clipboard
# value)
input_element.clear()
input_element.send_keys(Keys.CONTROL, 'v')
clipboard_value = input_element.get_attribute("value")
# Go to the shared url
self.driver.get(clipboard_value)
self.wait_until_presence_of_element_located(By.ID, "noVNC_screen")
# Go back to simphony-remote
self.driver.back()
self.wait_until_application_list_loaded()
|
Add tests for the share button
|
Add tests for the share button
|
Python
|
bsd-3-clause
|
simphony/simphony-remote,simphony/simphony-remote,simphony/simphony-remote,simphony/simphony-remote
|
Add tests for the share button
|
# -*- coding: utf-8 -*-
from selenium_tests.UserDriverTest import UserDriverTest
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
class TestShareApplication(UserDriverTest):
def test_share_modal(self):
with self.running_container():
self.open_application_settings()
self.click_first_element_located(By.ID, "share-button")
self.click_modal_footer_button("Close")
def test_share_button(self):
with self.running_container():
self.open_application_settings()
self.click_first_element_located(By.ID, "share-button")
self.click_first_element_located(By.ID, "cp-clipboard-button")
# Now the share url should be in the clipboard
input_element = self.driver.find_element_by_id("shared-url")
# Clear the input element and paste what is in the clipboard in
# order to retrieve it (lacking better way to retrieve the clipboard
# value)
input_element.clear()
input_element.send_keys(Keys.CONTROL, 'v')
clipboard_value = input_element.get_attribute("value")
# Go to the shared url
self.driver.get(clipboard_value)
self.wait_until_presence_of_element_located(By.ID, "noVNC_screen")
# Go back to simphony-remote
self.driver.back()
self.wait_until_application_list_loaded()
|
<commit_before><commit_msg>Add tests for the share button<commit_after>
|
# -*- coding: utf-8 -*-
from selenium_tests.UserDriverTest import UserDriverTest
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
class TestShareApplication(UserDriverTest):
def test_share_modal(self):
with self.running_container():
self.open_application_settings()
self.click_first_element_located(By.ID, "share-button")
self.click_modal_footer_button("Close")
def test_share_button(self):
with self.running_container():
self.open_application_settings()
self.click_first_element_located(By.ID, "share-button")
self.click_first_element_located(By.ID, "cp-clipboard-button")
# Now the share url should be in the clipboard
input_element = self.driver.find_element_by_id("shared-url")
# Clear the input element and paste what is in the clipboard in
# order to retrieve it (lacking better way to retrieve the clipboard
# value)
input_element.clear()
input_element.send_keys(Keys.CONTROL, 'v')
clipboard_value = input_element.get_attribute("value")
# Go to the shared url
self.driver.get(clipboard_value)
self.wait_until_presence_of_element_located(By.ID, "noVNC_screen")
# Go back to simphony-remote
self.driver.back()
self.wait_until_application_list_loaded()
|
Add tests for the share button# -*- coding: utf-8 -*-
from selenium_tests.UserDriverTest import UserDriverTest
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
class TestShareApplication(UserDriverTest):
def test_share_modal(self):
with self.running_container():
self.open_application_settings()
self.click_first_element_located(By.ID, "share-button")
self.click_modal_footer_button("Close")
def test_share_button(self):
with self.running_container():
self.open_application_settings()
self.click_first_element_located(By.ID, "share-button")
self.click_first_element_located(By.ID, "cp-clipboard-button")
# Now the share url should be in the clipboard
input_element = self.driver.find_element_by_id("shared-url")
# Clear the input element and paste what is in the clipboard in
# order to retrieve it (lacking better way to retrieve the clipboard
# value)
input_element.clear()
input_element.send_keys(Keys.CONTROL, 'v')
clipboard_value = input_element.get_attribute("value")
# Go to the shared url
self.driver.get(clipboard_value)
self.wait_until_presence_of_element_located(By.ID, "noVNC_screen")
# Go back to simphony-remote
self.driver.back()
self.wait_until_application_list_loaded()
|
<commit_before><commit_msg>Add tests for the share button<commit_after># -*- coding: utf-8 -*-
from selenium_tests.UserDriverTest import UserDriverTest
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
class TestShareApplication(UserDriverTest):
def test_share_modal(self):
with self.running_container():
self.open_application_settings()
self.click_first_element_located(By.ID, "share-button")
self.click_modal_footer_button("Close")
def test_share_button(self):
with self.running_container():
self.open_application_settings()
self.click_first_element_located(By.ID, "share-button")
self.click_first_element_located(By.ID, "cp-clipboard-button")
# Now the share url should be in the clipboard
input_element = self.driver.find_element_by_id("shared-url")
# Clear the input element and paste what is in the clipboard in
# order to retrieve it (lacking better way to retrieve the clipboard
# value)
input_element.clear()
input_element.send_keys(Keys.CONTROL, 'v')
clipboard_value = input_element.get_attribute("value")
# Go to the shared url
self.driver.get(clipboard_value)
self.wait_until_presence_of_element_located(By.ID, "noVNC_screen")
# Go back to simphony-remote
self.driver.back()
self.wait_until_application_list_loaded()
|
|
0d36640d47c30d8b9cd2b2eff1c8ccf1e97c13c5
|
subscriptions/management/commands/add_missed_call_service_audio_notification_to_active_subscriptions.py
|
subscriptions/management/commands/add_missed_call_service_audio_notification_to_active_subscriptions.py
|
from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand, CommandError
from subscriptions.models import Subscription
class Command(BaseCommand):
help = ("Active subscription holders need to be informed via audio file "
"about the new missed call service.")
def handle(self, *args, **options):
self.stdout.write("Processing active subscriptions ...")
count = 0
try:
active_subscriptions_list = list(
Subscription.objects.filter(active=True))
except ObjectDoesNotExist:
self.stdout.write("No active subscriptions found")
if len(active_subscriptions_list) > 0:
for active_subscription in active_subscriptions_list:
# Add audio file to subscription meta_data. Not sure how we'll
# handle translations here.
if (active_subscription.metadata is not None and
"welcome_message" not in active_subscription.metadata):
active_subscription["audo_file_url"] = "audio_file_url"
count += 1
if count > 0:
self.stdout.write(
"Update {} subscriptions with voice notes".format(count))
else:
self.stdout.write(
"No subscriptions updated with audio file notes")
|
Add missed call service audio notification to active subscriptions
|
Add missed call service audio notification to active subscriptions
|
Python
|
bsd-3-clause
|
praekelt/seed-staged-based-messaging,praekelt/seed-stage-based-messaging,praekelt/seed-stage-based-messaging
|
Add missed call service audio notification to active subscriptions
|
from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand, CommandError
from subscriptions.models import Subscription
class Command(BaseCommand):
help = ("Active subscription holders need to be informed via audio file "
"about the new missed call service.")
def handle(self, *args, **options):
self.stdout.write("Processing active subscriptions ...")
count = 0
try:
active_subscriptions_list = list(
Subscription.objects.filter(active=True))
except ObjectDoesNotExist:
self.stdout.write("No active subscriptions found")
if len(active_subscriptions_list) > 0:
for active_subscription in active_subscriptions_list:
# Add audio file to subscription meta_data. Not sure how we'll
# handle translations here.
if (active_subscription.metadata is not None and
"welcome_message" not in active_subscription.metadata):
active_subscription["audo_file_url"] = "audio_file_url"
count += 1
if count > 0:
self.stdout.write(
"Update {} subscriptions with voice notes".format(count))
else:
self.stdout.write(
"No subscriptions updated with audio file notes")
|
<commit_before><commit_msg>Add missed call service audio notification to active subscriptions<commit_after>
|
from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand, CommandError
from subscriptions.models import Subscription
class Command(BaseCommand):
help = ("Active subscription holders need to be informed via audio file "
"about the new missed call service.")
def handle(self, *args, **options):
self.stdout.write("Processing active subscriptions ...")
count = 0
try:
active_subscriptions_list = list(
Subscription.objects.filter(active=True))
except ObjectDoesNotExist:
self.stdout.write("No active subscriptions found")
if len(active_subscriptions_list) > 0:
for active_subscription in active_subscriptions_list:
# Add audio file to subscription meta_data. Not sure how we'll
# handle translations here.
if (active_subscription.metadata is not None and
"welcome_message" not in active_subscription.metadata):
active_subscription["audo_file_url"] = "audio_file_url"
count += 1
if count > 0:
self.stdout.write(
"Update {} subscriptions with voice notes".format(count))
else:
self.stdout.write(
"No subscriptions updated with audio file notes")
|
Add missed call service audio notification to active subscriptionsfrom django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand, CommandError
from subscriptions.models import Subscription
class Command(BaseCommand):
help = ("Active subscription holders need to be informed via audio file "
"about the new missed call service.")
def handle(self, *args, **options):
self.stdout.write("Processing active subscriptions ...")
count = 0
try:
active_subscriptions_list = list(
Subscription.objects.filter(active=True))
except ObjectDoesNotExist:
self.stdout.write("No active subscriptions found")
if len(active_subscriptions_list) > 0:
for active_subscription in active_subscriptions_list:
# Add audio file to subscription meta_data. Not sure how we'll
# handle translations here.
if (active_subscription.metadata is not None and
"welcome_message" not in active_subscription.metadata):
active_subscription["audo_file_url"] = "audio_file_url"
count += 1
if count > 0:
self.stdout.write(
"Update {} subscriptions with voice notes".format(count))
else:
self.stdout.write(
"No subscriptions updated with audio file notes")
|
<commit_before><commit_msg>Add missed call service audio notification to active subscriptions<commit_after>from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand, CommandError
from subscriptions.models import Subscription
class Command(BaseCommand):
help = ("Active subscription holders need to be informed via audio file "
"about the new missed call service.")
def handle(self, *args, **options):
self.stdout.write("Processing active subscriptions ...")
count = 0
try:
active_subscriptions_list = list(
Subscription.objects.filter(active=True))
except ObjectDoesNotExist:
self.stdout.write("No active subscriptions found")
if len(active_subscriptions_list) > 0:
for active_subscription in active_subscriptions_list:
# Add audio file to subscription meta_data. Not sure how we'll
# handle translations here.
if (active_subscription.metadata is not None and
"welcome_message" not in active_subscription.metadata):
active_subscription["audo_file_url"] = "audio_file_url"
count += 1
if count > 0:
self.stdout.write(
"Update {} subscriptions with voice notes".format(count))
else:
self.stdout.write(
"No subscriptions updated with audio file notes")
|
|
7319b0efbe0bb5515d1a244db4dadebab9a4e3ec
|
tests/app/soc/modules/gci/views/test_student_forms.py
|
tests/app/soc/modules/gci/views/test_student_forms.py
|
# Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the view for GCI student form uploads.
"""
from tests.profile_utils import GCIProfileHelper
from tests.test_utils import GCIDjangoTestCase
class StudentFormUploadTest(GCIDjangoTestCase):
"""Tests the Student form upload page.
"""
def setUp(self):
self.init()
self.url = '/gci/student/forms/' + self.gci.key().name()
def assertStudentFormUploadTemplatesUsed(self, response):
"""Asserts that all the templates from student form upload page are used.
"""
self.assertGCITemplatesUsed(response)
self.assertTemplateUsed(response, 'v2/modules/gci/student_forms/base.html')
def testStudentFormUpload(self):
"""Tests the studentsInfoList component of the dashboard.
"""
profile_helper = GCIProfileHelper(self.gci, self.dev_test)
student = profile_helper.createStudent()
response = self.get(self.url)
self.assertStudentFormUploadTemplatesUsed(response)
self.assertResponseOK(response)
self.assertContains(
response, 'To download the sample form or one of its translations')
|
Implement basic tests for GCI student form uploads.
|
Implement basic tests for GCI student form uploads.
|
Python
|
apache-2.0
|
rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son
|
Implement basic tests for GCI student form uploads.
|
# Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the view for GCI student form uploads.
"""
from tests.profile_utils import GCIProfileHelper
from tests.test_utils import GCIDjangoTestCase
class StudentFormUploadTest(GCIDjangoTestCase):
"""Tests the Student form upload page.
"""
def setUp(self):
self.init()
self.url = '/gci/student/forms/' + self.gci.key().name()
def assertStudentFormUploadTemplatesUsed(self, response):
"""Asserts that all the templates from student form upload page are used.
"""
self.assertGCITemplatesUsed(response)
self.assertTemplateUsed(response, 'v2/modules/gci/student_forms/base.html')
def testStudentFormUpload(self):
"""Tests the studentsInfoList component of the dashboard.
"""
profile_helper = GCIProfileHelper(self.gci, self.dev_test)
student = profile_helper.createStudent()
response = self.get(self.url)
self.assertStudentFormUploadTemplatesUsed(response)
self.assertResponseOK(response)
self.assertContains(
response, 'To download the sample form or one of its translations')
|
<commit_before><commit_msg>Implement basic tests for GCI student form uploads.<commit_after>
|
# Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the view for GCI student form uploads.
"""
from tests.profile_utils import GCIProfileHelper
from tests.test_utils import GCIDjangoTestCase
class StudentFormUploadTest(GCIDjangoTestCase):
"""Tests the Student form upload page.
"""
def setUp(self):
self.init()
self.url = '/gci/student/forms/' + self.gci.key().name()
def assertStudentFormUploadTemplatesUsed(self, response):
"""Asserts that all the templates from student form upload page are used.
"""
self.assertGCITemplatesUsed(response)
self.assertTemplateUsed(response, 'v2/modules/gci/student_forms/base.html')
def testStudentFormUpload(self):
"""Tests the studentsInfoList component of the dashboard.
"""
profile_helper = GCIProfileHelper(self.gci, self.dev_test)
student = profile_helper.createStudent()
response = self.get(self.url)
self.assertStudentFormUploadTemplatesUsed(response)
self.assertResponseOK(response)
self.assertContains(
response, 'To download the sample form or one of its translations')
|
Implement basic tests for GCI student form uploads.# Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the view for GCI student form uploads.
"""
from tests.profile_utils import GCIProfileHelper
from tests.test_utils import GCIDjangoTestCase
class StudentFormUploadTest(GCIDjangoTestCase):
"""Tests the Student form upload page.
"""
def setUp(self):
self.init()
self.url = '/gci/student/forms/' + self.gci.key().name()
def assertStudentFormUploadTemplatesUsed(self, response):
"""Asserts that all the templates from student form upload page are used.
"""
self.assertGCITemplatesUsed(response)
self.assertTemplateUsed(response, 'v2/modules/gci/student_forms/base.html')
def testStudentFormUpload(self):
"""Tests the studentsInfoList component of the dashboard.
"""
profile_helper = GCIProfileHelper(self.gci, self.dev_test)
student = profile_helper.createStudent()
response = self.get(self.url)
self.assertStudentFormUploadTemplatesUsed(response)
self.assertResponseOK(response)
self.assertContains(
response, 'To download the sample form or one of its translations')
|
<commit_before><commit_msg>Implement basic tests for GCI student form uploads.<commit_after># Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the view for GCI student form uploads.
"""
from tests.profile_utils import GCIProfileHelper
from tests.test_utils import GCIDjangoTestCase
class StudentFormUploadTest(GCIDjangoTestCase):
"""Tests the Student form upload page.
"""
def setUp(self):
self.init()
self.url = '/gci/student/forms/' + self.gci.key().name()
def assertStudentFormUploadTemplatesUsed(self, response):
"""Asserts that all the templates from student form upload page are used.
"""
self.assertGCITemplatesUsed(response)
self.assertTemplateUsed(response, 'v2/modules/gci/student_forms/base.html')
def testStudentFormUpload(self):
"""Tests the studentsInfoList component of the dashboard.
"""
profile_helper = GCIProfileHelper(self.gci, self.dev_test)
student = profile_helper.createStudent()
response = self.get(self.url)
self.assertStudentFormUploadTemplatesUsed(response)
self.assertResponseOK(response)
self.assertContains(
response, 'To download the sample form or one of its translations')
|
|
0869c28f6f8cd5a6ddd636a4850d1609a60f90a7
|
bluebottle/funding/migrations/0044_auto_20191108_1008.py
|
bluebottle/funding/migrations/0044_auto_20191108_1008.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-08 09:08
from __future__ import unicode_literals
from django.db import migrations
def fix_funding_matching_currencies(apps, schema_editor):
Funding = apps.get_model('funding', 'Funding')
for funding in Funding.objects.filter(amount_matching__gt=0):
if funding.amount_matching.currency != funding.target.currency:
funding.amount_matching.currency = funding.target.currency
funding.save()
class Migration(migrations.Migration):
dependencies = [
('funding', '0043_auto_20191108_0819'),
]
operations = [
migrations.RunPython(fix_funding_matching_currencies, migrations.RunPython.noop)
]
|
Fix amount matching in wrong currency
|
Fix amount matching in wrong currency
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Fix amount matching in wrong currency
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-08 09:08
from __future__ import unicode_literals
from django.db import migrations
def fix_funding_matching_currencies(apps, schema_editor):
Funding = apps.get_model('funding', 'Funding')
for funding in Funding.objects.filter(amount_matching__gt=0):
if funding.amount_matching.currency != funding.target.currency:
funding.amount_matching.currency = funding.target.currency
funding.save()
class Migration(migrations.Migration):
dependencies = [
('funding', '0043_auto_20191108_0819'),
]
operations = [
migrations.RunPython(fix_funding_matching_currencies, migrations.RunPython.noop)
]
|
<commit_before><commit_msg>Fix amount matching in wrong currency<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-08 09:08
from __future__ import unicode_literals
from django.db import migrations
def fix_funding_matching_currencies(apps, schema_editor):
Funding = apps.get_model('funding', 'Funding')
for funding in Funding.objects.filter(amount_matching__gt=0):
if funding.amount_matching.currency != funding.target.currency:
funding.amount_matching.currency = funding.target.currency
funding.save()
class Migration(migrations.Migration):
dependencies = [
('funding', '0043_auto_20191108_0819'),
]
operations = [
migrations.RunPython(fix_funding_matching_currencies, migrations.RunPython.noop)
]
|
Fix amount matching in wrong currency# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-08 09:08
from __future__ import unicode_literals
from django.db import migrations
def fix_funding_matching_currencies(apps, schema_editor):
Funding = apps.get_model('funding', 'Funding')
for funding in Funding.objects.filter(amount_matching__gt=0):
if funding.amount_matching.currency != funding.target.currency:
funding.amount_matching.currency = funding.target.currency
funding.save()
class Migration(migrations.Migration):
dependencies = [
('funding', '0043_auto_20191108_0819'),
]
operations = [
migrations.RunPython(fix_funding_matching_currencies, migrations.RunPython.noop)
]
|
<commit_before><commit_msg>Fix amount matching in wrong currency<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-08 09:08
from __future__ import unicode_literals
from django.db import migrations
def fix_funding_matching_currencies(apps, schema_editor):
Funding = apps.get_model('funding', 'Funding')
for funding in Funding.objects.filter(amount_matching__gt=0):
if funding.amount_matching.currency != funding.target.currency:
funding.amount_matching.currency = funding.target.currency
funding.save()
class Migration(migrations.Migration):
dependencies = [
('funding', '0043_auto_20191108_0819'),
]
operations = [
migrations.RunPython(fix_funding_matching_currencies, migrations.RunPython.noop)
]
|
|
999262374abb57dcb9d5b48db155f180f95015b0
|
count_labelsets.py
|
count_labelsets.py
|
"""Count labelsets that occur in the multilabel data.
Input: name of the directory that contains the multilabel data in text files
(one text file per text).
Usage: python count_labelsets.py <input dir>
"""
import argparse
import codecs
import os
from collections import Counter
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('input_dir', help='the name of the directory '
'containing text files that should be normalized.')
args = parser.parse_args()
input_dir = args.input_dir
labelsets = Counter()
len_labelset = Counter()
print 'texts'
text_files = [fi for fi in os.listdir(input_dir) if fi.endswith('.txt')]
for text_file in text_files:
print text_file
in_file = os.path.join(input_dir, text_file)
with codecs.open(in_file, 'rb', 'utf-8') as f:
lines = f.readlines()
for line in lines:
parts = line.split('\t')
labels = parts[2].strip()
labelsets[labels] += 1
# count labelset lengths
if labels != 'None':
length = len(labels.split('_'))
len_labelset[str(length).zfill(3)] += 1
print '\n# different labelsets\t{}'.format(str(len(labelsets)))
# print lengths
print '\n# labels\tfrequency'
for le, freq in len_labelset.most_common():
print '{}\t{}'.format(le, freq)
# print labelsets
print '\nLabelset\tfrequency'
for ls, freq in labelsets.most_common():
print '{}\t{}'.format(ls, freq)
|
Add script to count label set statistics
|
Add script to count label set statistics
Added a script that outputs statistics about label sets in the data.
|
Python
|
apache-2.0
|
NLeSC/embodied-emotions-scripts,NLeSC/embodied-emotions-scripts
|
Add script to count label set statistics
Added a script that outputs statistics about label sets in the data.
|
"""Count labelsets that occur in the multilabel data.
Input: name of the directory that contains the multilabel data in text files
(one text file per text).
Usage: python count_labelsets.py <input dir>
"""
import argparse
import codecs
import os
from collections import Counter
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('input_dir', help='the name of the directory '
'containing text files that should be normalized.')
args = parser.parse_args()
input_dir = args.input_dir
labelsets = Counter()
len_labelset = Counter()
print 'texts'
text_files = [fi for fi in os.listdir(input_dir) if fi.endswith('.txt')]
for text_file in text_files:
print text_file
in_file = os.path.join(input_dir, text_file)
with codecs.open(in_file, 'rb', 'utf-8') as f:
lines = f.readlines()
for line in lines:
parts = line.split('\t')
labels = parts[2].strip()
labelsets[labels] += 1
# count labelset lengths
if labels != 'None':
length = len(labels.split('_'))
len_labelset[str(length).zfill(3)] += 1
print '\n# different labelsets\t{}'.format(str(len(labelsets)))
# print lengths
print '\n# labels\tfrequency'
for le, freq in len_labelset.most_common():
print '{}\t{}'.format(le, freq)
# print labelsets
print '\nLabelset\tfrequency'
for ls, freq in labelsets.most_common():
print '{}\t{}'.format(ls, freq)
|
<commit_before><commit_msg>Add script to count label set statistics
Added a script that outputs statistics about label sets in the data.<commit_after>
|
"""Count labelsets that occur in the multilabel data.
Input: name of the directory that contains the multilabel data in text files
(one text file per text).
Usage: python count_labelsets.py <input dir>
"""
import argparse
import codecs
import os
from collections import Counter
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('input_dir', help='the name of the directory '
'containing text files that should be normalized.')
args = parser.parse_args()
input_dir = args.input_dir
labelsets = Counter()
len_labelset = Counter()
print 'texts'
text_files = [fi for fi in os.listdir(input_dir) if fi.endswith('.txt')]
for text_file in text_files:
print text_file
in_file = os.path.join(input_dir, text_file)
with codecs.open(in_file, 'rb', 'utf-8') as f:
lines = f.readlines()
for line in lines:
parts = line.split('\t')
labels = parts[2].strip()
labelsets[labels] += 1
# count labelset lengths
if labels != 'None':
length = len(labels.split('_'))
len_labelset[str(length).zfill(3)] += 1
print '\n# different labelsets\t{}'.format(str(len(labelsets)))
# print lengths
print '\n# labels\tfrequency'
for le, freq in len_labelset.most_common():
print '{}\t{}'.format(le, freq)
# print labelsets
print '\nLabelset\tfrequency'
for ls, freq in labelsets.most_common():
print '{}\t{}'.format(ls, freq)
|
Add script to count label set statistics
Added a script that outputs statistics about label sets in the data."""Count labelsets that occur in the multilabel data.
Input: name of the directory that contains the multilabel data in text files
(one text file per text).
Usage: python count_labelsets.py <input dir>
"""
import argparse
import codecs
import os
from collections import Counter
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('input_dir', help='the name of the directory '
'containing text files that should be normalized.')
args = parser.parse_args()
input_dir = args.input_dir
labelsets = Counter()
len_labelset = Counter()
print 'texts'
text_files = [fi for fi in os.listdir(input_dir) if fi.endswith('.txt')]
for text_file in text_files:
print text_file
in_file = os.path.join(input_dir, text_file)
with codecs.open(in_file, 'rb', 'utf-8') as f:
lines = f.readlines()
for line in lines:
parts = line.split('\t')
labels = parts[2].strip()
labelsets[labels] += 1
# count labelset lengths
if labels != 'None':
length = len(labels.split('_'))
len_labelset[str(length).zfill(3)] += 1
print '\n# different labelsets\t{}'.format(str(len(labelsets)))
# print lengths
print '\n# labels\tfrequency'
for le, freq in len_labelset.most_common():
print '{}\t{}'.format(le, freq)
# print labelsets
print '\nLabelset\tfrequency'
for ls, freq in labelsets.most_common():
print '{}\t{}'.format(ls, freq)
|
<commit_before><commit_msg>Add script to count label set statistics
Added a script that outputs statistics about label sets in the data.<commit_after>"""Count labelsets that occur in the multilabel data.
Input: name of the directory that contains the multilabel data in text files
(one text file per text).
Usage: python count_labelsets.py <input dir>
"""
import argparse
import codecs
import os
from collections import Counter
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('input_dir', help='the name of the directory '
'containing text files that should be normalized.')
args = parser.parse_args()
input_dir = args.input_dir
labelsets = Counter()
len_labelset = Counter()
print 'texts'
text_files = [fi for fi in os.listdir(input_dir) if fi.endswith('.txt')]
for text_file in text_files:
print text_file
in_file = os.path.join(input_dir, text_file)
with codecs.open(in_file, 'rb', 'utf-8') as f:
lines = f.readlines()
for line in lines:
parts = line.split('\t')
labels = parts[2].strip()
labelsets[labels] += 1
# count labelset lengths
if labels != 'None':
length = len(labels.split('_'))
len_labelset[str(length).zfill(3)] += 1
print '\n# different labelsets\t{}'.format(str(len(labelsets)))
# print lengths
print '\n# labels\tfrequency'
for le, freq in len_labelset.most_common():
print '{}\t{}'.format(le, freq)
# print labelsets
print '\nLabelset\tfrequency'
for ls, freq in labelsets.most_common():
print '{}\t{}'.format(ls, freq)
|
|
2c3281754bd0e57a263a85f518eb49fbe6a8d72b
|
corehq/apps/importer/management/commands/import_cases.py
|
corehq/apps/importer/management/commands/import_cases.py
|
import json
from datetime import datetime
from django.core.management import BaseCommand, CommandError
from corehq.apps.importer.tasks import do_import
from corehq.apps.importer.util import ImporterConfig, ExcelFile
from corehq.apps.users.models import WebUser
class Command(BaseCommand):
help = "import cases from excel manually."
args = '<import_file> <config_file> <domain> <user>'
label = "import cases from excel manually."
def handle(self, *args, **options):
if len(args) != 4:
raise CommandError('Usage is import_cases %s' % self.args)
start = datetime.now()
export_file, config_file, domain, user_id = args
if '@' in user_id:
user = WebUser.get_by_username(user_id)
else:
user = WebUser.get(user_id)
if not user.is_member_of(domain):
raise CommandError("%s can't access %s" % (user, domain))
with open(config_file, 'r') as f:
config = ImporterConfig.from_json(f.read())
config.couch_user_id = user._id
spreadsheet = ExcelFile(export_file, True)
print json.dumps(do_import(spreadsheet, config, domain))
print 'finished in %s seconds' % (datetime.now() - start).seconds
|
import json
from datetime import datetime
from django.core.management import BaseCommand, CommandError
from dimagi.utils.web import json_handler
from corehq.apps.importer.tasks import do_import
from corehq.apps.importer.util import ImporterConfig, ExcelFile
from corehq.apps.users.models import WebUser
class Command(BaseCommand):
help = "import cases from excel manually."
args = '<import_file> <config_file> <domain> <user>'
label = "import cases from excel manually."
def handle(self, *args, **options):
if len(args) != 4:
raise CommandError('Usage is import_cases %s' % self.args)
start = datetime.now()
export_file, config_file, domain, user_id = args
if '@' in user_id:
user = WebUser.get_by_username(user_id)
else:
user = WebUser.get(user_id)
if not user.is_member_of(domain):
raise CommandError("%s can't access %s" % (user, domain))
with open(config_file, 'r') as f:
config = ImporterConfig.from_json(f.read())
config.couch_user_id = user._id
spreadsheet = ExcelFile(export_file, True)
print json.dumps(do_import(spreadsheet, config, domain),
default=json_handler)
print 'finished in %s seconds' % (datetime.now() - start).seconds
|
Use json_handler to force ugettext_lazy
|
Use json_handler to force ugettext_lazy
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq
|
import json
from datetime import datetime
from django.core.management import BaseCommand, CommandError
from corehq.apps.importer.tasks import do_import
from corehq.apps.importer.util import ImporterConfig, ExcelFile
from corehq.apps.users.models import WebUser
class Command(BaseCommand):
help = "import cases from excel manually."
args = '<import_file> <config_file> <domain> <user>'
label = "import cases from excel manually."
def handle(self, *args, **options):
if len(args) != 4:
raise CommandError('Usage is import_cases %s' % self.args)
start = datetime.now()
export_file, config_file, domain, user_id = args
if '@' in user_id:
user = WebUser.get_by_username(user_id)
else:
user = WebUser.get(user_id)
if not user.is_member_of(domain):
raise CommandError("%s can't access %s" % (user, domain))
with open(config_file, 'r') as f:
config = ImporterConfig.from_json(f.read())
config.couch_user_id = user._id
spreadsheet = ExcelFile(export_file, True)
print json.dumps(do_import(spreadsheet, config, domain))
print 'finished in %s seconds' % (datetime.now() - start).seconds
Use json_handler to force ugettext_lazy
|
import json
from datetime import datetime
from django.core.management import BaseCommand, CommandError
from dimagi.utils.web import json_handler
from corehq.apps.importer.tasks import do_import
from corehq.apps.importer.util import ImporterConfig, ExcelFile
from corehq.apps.users.models import WebUser
class Command(BaseCommand):
help = "import cases from excel manually."
args = '<import_file> <config_file> <domain> <user>'
label = "import cases from excel manually."
def handle(self, *args, **options):
if len(args) != 4:
raise CommandError('Usage is import_cases %s' % self.args)
start = datetime.now()
export_file, config_file, domain, user_id = args
if '@' in user_id:
user = WebUser.get_by_username(user_id)
else:
user = WebUser.get(user_id)
if not user.is_member_of(domain):
raise CommandError("%s can't access %s" % (user, domain))
with open(config_file, 'r') as f:
config = ImporterConfig.from_json(f.read())
config.couch_user_id = user._id
spreadsheet = ExcelFile(export_file, True)
print json.dumps(do_import(spreadsheet, config, domain),
default=json_handler)
print 'finished in %s seconds' % (datetime.now() - start).seconds
|
<commit_before>import json
from datetime import datetime
from django.core.management import BaseCommand, CommandError
from corehq.apps.importer.tasks import do_import
from corehq.apps.importer.util import ImporterConfig, ExcelFile
from corehq.apps.users.models import WebUser
class Command(BaseCommand):
help = "import cases from excel manually."
args = '<import_file> <config_file> <domain> <user>'
label = "import cases from excel manually."
def handle(self, *args, **options):
if len(args) != 4:
raise CommandError('Usage is import_cases %s' % self.args)
start = datetime.now()
export_file, config_file, domain, user_id = args
if '@' in user_id:
user = WebUser.get_by_username(user_id)
else:
user = WebUser.get(user_id)
if not user.is_member_of(domain):
raise CommandError("%s can't access %s" % (user, domain))
with open(config_file, 'r') as f:
config = ImporterConfig.from_json(f.read())
config.couch_user_id = user._id
spreadsheet = ExcelFile(export_file, True)
print json.dumps(do_import(spreadsheet, config, domain))
print 'finished in %s seconds' % (datetime.now() - start).seconds
<commit_msg>Use json_handler to force ugettext_lazy<commit_after>
|
import json
from datetime import datetime
from django.core.management import BaseCommand, CommandError
from dimagi.utils.web import json_handler
from corehq.apps.importer.tasks import do_import
from corehq.apps.importer.util import ImporterConfig, ExcelFile
from corehq.apps.users.models import WebUser
class Command(BaseCommand):
help = "import cases from excel manually."
args = '<import_file> <config_file> <domain> <user>'
label = "import cases from excel manually."
def handle(self, *args, **options):
if len(args) != 4:
raise CommandError('Usage is import_cases %s' % self.args)
start = datetime.now()
export_file, config_file, domain, user_id = args
if '@' in user_id:
user = WebUser.get_by_username(user_id)
else:
user = WebUser.get(user_id)
if not user.is_member_of(domain):
raise CommandError("%s can't access %s" % (user, domain))
with open(config_file, 'r') as f:
config = ImporterConfig.from_json(f.read())
config.couch_user_id = user._id
spreadsheet = ExcelFile(export_file, True)
print json.dumps(do_import(spreadsheet, config, domain),
default=json_handler)
print 'finished in %s seconds' % (datetime.now() - start).seconds
|
import json
from datetime import datetime
from django.core.management import BaseCommand, CommandError
from corehq.apps.importer.tasks import do_import
from corehq.apps.importer.util import ImporterConfig, ExcelFile
from corehq.apps.users.models import WebUser
class Command(BaseCommand):
help = "import cases from excel manually."
args = '<import_file> <config_file> <domain> <user>'
label = "import cases from excel manually."
def handle(self, *args, **options):
if len(args) != 4:
raise CommandError('Usage is import_cases %s' % self.args)
start = datetime.now()
export_file, config_file, domain, user_id = args
if '@' in user_id:
user = WebUser.get_by_username(user_id)
else:
user = WebUser.get(user_id)
if not user.is_member_of(domain):
raise CommandError("%s can't access %s" % (user, domain))
with open(config_file, 'r') as f:
config = ImporterConfig.from_json(f.read())
config.couch_user_id = user._id
spreadsheet = ExcelFile(export_file, True)
print json.dumps(do_import(spreadsheet, config, domain))
print 'finished in %s seconds' % (datetime.now() - start).seconds
Use json_handler to force ugettext_lazyimport json
from datetime import datetime
from django.core.management import BaseCommand, CommandError
from dimagi.utils.web import json_handler
from corehq.apps.importer.tasks import do_import
from corehq.apps.importer.util import ImporterConfig, ExcelFile
from corehq.apps.users.models import WebUser
class Command(BaseCommand):
help = "import cases from excel manually."
args = '<import_file> <config_file> <domain> <user>'
label = "import cases from excel manually."
def handle(self, *args, **options):
if len(args) != 4:
raise CommandError('Usage is import_cases %s' % self.args)
start = datetime.now()
export_file, config_file, domain, user_id = args
if '@' in user_id:
user = WebUser.get_by_username(user_id)
else:
user = WebUser.get(user_id)
if not user.is_member_of(domain):
raise CommandError("%s can't access %s" % (user, domain))
with open(config_file, 'r') as f:
config = ImporterConfig.from_json(f.read())
config.couch_user_id = user._id
spreadsheet = ExcelFile(export_file, True)
print json.dumps(do_import(spreadsheet, config, domain),
default=json_handler)
print 'finished in %s seconds' % (datetime.now() - start).seconds
|
<commit_before>import json
from datetime import datetime
from django.core.management import BaseCommand, CommandError
from corehq.apps.importer.tasks import do_import
from corehq.apps.importer.util import ImporterConfig, ExcelFile
from corehq.apps.users.models import WebUser
class Command(BaseCommand):
help = "import cases from excel manually."
args = '<import_file> <config_file> <domain> <user>'
label = "import cases from excel manually."
def handle(self, *args, **options):
if len(args) != 4:
raise CommandError('Usage is import_cases %s' % self.args)
start = datetime.now()
export_file, config_file, domain, user_id = args
if '@' in user_id:
user = WebUser.get_by_username(user_id)
else:
user = WebUser.get(user_id)
if not user.is_member_of(domain):
raise CommandError("%s can't access %s" % (user, domain))
with open(config_file, 'r') as f:
config = ImporterConfig.from_json(f.read())
config.couch_user_id = user._id
spreadsheet = ExcelFile(export_file, True)
print json.dumps(do_import(spreadsheet, config, domain))
print 'finished in %s seconds' % (datetime.now() - start).seconds
<commit_msg>Use json_handler to force ugettext_lazy<commit_after>import json
from datetime import datetime
from django.core.management import BaseCommand, CommandError
from dimagi.utils.web import json_handler
from corehq.apps.importer.tasks import do_import
from corehq.apps.importer.util import ImporterConfig, ExcelFile
from corehq.apps.users.models import WebUser
class Command(BaseCommand):
help = "import cases from excel manually."
args = '<import_file> <config_file> <domain> <user>'
label = "import cases from excel manually."
def handle(self, *args, **options):
if len(args) != 4:
raise CommandError('Usage is import_cases %s' % self.args)
start = datetime.now()
export_file, config_file, domain, user_id = args
if '@' in user_id:
user = WebUser.get_by_username(user_id)
else:
user = WebUser.get(user_id)
if not user.is_member_of(domain):
raise CommandError("%s can't access %s" % (user, domain))
with open(config_file, 'r') as f:
config = ImporterConfig.from_json(f.read())
config.couch_user_id = user._id
spreadsheet = ExcelFile(export_file, True)
print json.dumps(do_import(spreadsheet, config, domain),
default=json_handler)
print 'finished in %s seconds' % (datetime.now() - start).seconds
|
2e766d9b7010cb8d3fefaf785102e869e69b4a95
|
math/transpose_of_matrix/python/transpose_of_matrix.py
|
math/transpose_of_matrix/python/transpose_of_matrix.py
|
def transpose_matrix(matrix):
matrix_rows_quantity = len(matrix)
matrix_column_quantity = len(matrix[0])
transposed_matrix = [[0] * matrix_rows_quantity for _ in range(matrix_column_quantity)]
for row in range(matrix_rows_quantity):
for column in range(matrix_column_quantity):
transposed_matrix[column][row] = matrix[row][column]
return transposed_matrix
|
Create function in python to transpose matrix
|
Create function in python to transpose matrix
|
Python
|
cc0-1.0
|
ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms
|
Create function in python to transpose matrix
|
def transpose_matrix(matrix):
matrix_rows_quantity = len(matrix)
matrix_column_quantity = len(matrix[0])
transposed_matrix = [[0] * matrix_rows_quantity for _ in range(matrix_column_quantity)]
for row in range(matrix_rows_quantity):
for column in range(matrix_column_quantity):
transposed_matrix[column][row] = matrix[row][column]
return transposed_matrix
|
<commit_before><commit_msg>Create function in python to transpose matrix<commit_after>
|
def transpose_matrix(matrix):
matrix_rows_quantity = len(matrix)
matrix_column_quantity = len(matrix[0])
transposed_matrix = [[0] * matrix_rows_quantity for _ in range(matrix_column_quantity)]
for row in range(matrix_rows_quantity):
for column in range(matrix_column_quantity):
transposed_matrix[column][row] = matrix[row][column]
return transposed_matrix
|
Create function in python to transpose matrixdef transpose_matrix(matrix):
matrix_rows_quantity = len(matrix)
matrix_column_quantity = len(matrix[0])
transposed_matrix = [[0] * matrix_rows_quantity for _ in range(matrix_column_quantity)]
for row in range(matrix_rows_quantity):
for column in range(matrix_column_quantity):
transposed_matrix[column][row] = matrix[row][column]
return transposed_matrix
|
<commit_before><commit_msg>Create function in python to transpose matrix<commit_after>def transpose_matrix(matrix):
matrix_rows_quantity = len(matrix)
matrix_column_quantity = len(matrix[0])
transposed_matrix = [[0] * matrix_rows_quantity for _ in range(matrix_column_quantity)]
for row in range(matrix_rows_quantity):
for column in range(matrix_column_quantity):
transposed_matrix[column][row] = matrix[row][column]
return transposed_matrix
|
|
8a4c8802ef8744b6e691e69adf256008adaddcea
|
migrations/versions/0130_service_email_reply_to_row.py
|
migrations/versions/0130_service_email_reply_to_row.py
|
"""empty message
Revision ID: 0130_service_email_reply_to_row
Revises: 0129_add_email_auth_permission
Create Date: 2017-08-29 14:09:41.042061
"""
# revision identifiers, used by Alembic.
revision = '0130_service_email_reply_to_row'
down_revision = '0129_add_email_auth_permission'
from alembic import op
NOTIFY_SERVICE_ID = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'
EMAIL_REPLY_TO_ID = 'b3a58d57-2337-662a-4cba-40792a9322f2'
def upgrade():
op.execute("""
INSERT INTO service_email_reply_to
(id, service_id, email_address, is_default, created_at)
VALUES
('{}','{}', 'notify+1@digital.cabinet-office.gov.uk', 'f', NOW())
""".format(EMAIL_REPLY_TO_ID, NOTIFY_SERVICE_ID))
def downgrade():
op.execute("""
DELETE FROM service_email_reply_to
WHERE id = '{}'
""".format(EMAIL_REPLY_TO_ID))
|
Add notification email reply_to script
|
Add notification email reply_to script
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Add notification email reply_to script
|
"""empty message
Revision ID: 0130_service_email_reply_to_row
Revises: 0129_add_email_auth_permission
Create Date: 2017-08-29 14:09:41.042061
"""
# revision identifiers, used by Alembic.
revision = '0130_service_email_reply_to_row'
down_revision = '0129_add_email_auth_permission'
from alembic import op
NOTIFY_SERVICE_ID = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'
EMAIL_REPLY_TO_ID = 'b3a58d57-2337-662a-4cba-40792a9322f2'
def upgrade():
op.execute("""
INSERT INTO service_email_reply_to
(id, service_id, email_address, is_default, created_at)
VALUES
('{}','{}', 'notify+1@digital.cabinet-office.gov.uk', 'f', NOW())
""".format(EMAIL_REPLY_TO_ID, NOTIFY_SERVICE_ID))
def downgrade():
op.execute("""
DELETE FROM service_email_reply_to
WHERE id = '{}'
""".format(EMAIL_REPLY_TO_ID))
|
<commit_before><commit_msg>Add notification email reply_to script<commit_after>
|
"""empty message
Revision ID: 0130_service_email_reply_to_row
Revises: 0129_add_email_auth_permission
Create Date: 2017-08-29 14:09:41.042061
"""
# revision identifiers, used by Alembic.
revision = '0130_service_email_reply_to_row'
down_revision = '0129_add_email_auth_permission'
from alembic import op
NOTIFY_SERVICE_ID = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'
EMAIL_REPLY_TO_ID = 'b3a58d57-2337-662a-4cba-40792a9322f2'
def upgrade():
op.execute("""
INSERT INTO service_email_reply_to
(id, service_id, email_address, is_default, created_at)
VALUES
('{}','{}', 'notify+1@digital.cabinet-office.gov.uk', 'f', NOW())
""".format(EMAIL_REPLY_TO_ID, NOTIFY_SERVICE_ID))
def downgrade():
op.execute("""
DELETE FROM service_email_reply_to
WHERE id = '{}'
""".format(EMAIL_REPLY_TO_ID))
|
Add notification email reply_to script"""empty message
Revision ID: 0130_service_email_reply_to_row
Revises: 0129_add_email_auth_permission
Create Date: 2017-08-29 14:09:41.042061
"""
# revision identifiers, used by Alembic.
revision = '0130_service_email_reply_to_row'
down_revision = '0129_add_email_auth_permission'
from alembic import op
NOTIFY_SERVICE_ID = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'
EMAIL_REPLY_TO_ID = 'b3a58d57-2337-662a-4cba-40792a9322f2'
def upgrade():
op.execute("""
INSERT INTO service_email_reply_to
(id, service_id, email_address, is_default, created_at)
VALUES
('{}','{}', 'notify+1@digital.cabinet-office.gov.uk', 'f', NOW())
""".format(EMAIL_REPLY_TO_ID, NOTIFY_SERVICE_ID))
def downgrade():
op.execute("""
DELETE FROM service_email_reply_to
WHERE id = '{}'
""".format(EMAIL_REPLY_TO_ID))
|
<commit_before><commit_msg>Add notification email reply_to script<commit_after>"""empty message
Revision ID: 0130_service_email_reply_to_row
Revises: 0129_add_email_auth_permission
Create Date: 2017-08-29 14:09:41.042061
"""
# revision identifiers, used by Alembic.
revision = '0130_service_email_reply_to_row'
down_revision = '0129_add_email_auth_permission'
from alembic import op
NOTIFY_SERVICE_ID = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'
EMAIL_REPLY_TO_ID = 'b3a58d57-2337-662a-4cba-40792a9322f2'
def upgrade():
op.execute("""
INSERT INTO service_email_reply_to
(id, service_id, email_address, is_default, created_at)
VALUES
('{}','{}', 'notify+1@digital.cabinet-office.gov.uk', 'f', NOW())
""".format(EMAIL_REPLY_TO_ID, NOTIFY_SERVICE_ID))
def downgrade():
op.execute("""
DELETE FROM service_email_reply_to
WHERE id = '{}'
""".format(EMAIL_REPLY_TO_ID))
|
|
b2d7740cf1e328342b76f744f17e91029ee33061
|
corehq/apps/users/management/commands/clear_bad_user_data.py
|
corehq/apps/users/management/commands/clear_bad_user_data.py
|
from django.core.management.base import BaseCommand
from corehq.apps.es import UserES
from corehq.apps.users.models import CommCareUser
from corehq.util.couch import iter_update, DocUpdate
from corehq.util.log import with_progress_bar
class Command(BaseCommand):
args = ""
help = ("Clears commcare_location_id and commtrack-supply-point on any "
"users without location_id set")
def handle(self, *args, **options):
clean_users()
def get_bad_user_ids():
res = (UserES()
.mobile_users()
.empty("location_id")
.fields(["_id", "domain", "username", "user_data.commcare_location_id"])
.run().hits)
return [u['_id'] for u in res
if u.get('user_data', {}).get('commcare_location_id')
or u.get('user_data', {}).get('commtrack-supply-point')]
def clean_user(doc):
"""Take any users with no location_id and clear the location user_data"""
if doc['location_id']:
return
if (
doc.get('user_data', {}).pop('commcare_location_id', False)
or doc.get('user_data', {}).pop('commtrack-supply-point', False)
):
return DocUpdate(doc)
def clean_users():
all_ids = with_progress_bar(get_bad_user_ids())
iter_update(CommCareUser.get_db(), clean_user, all_ids, verbose=True)
|
Add mgmt cmd to clear bad location user data
|
Add mgmt cmd to clear bad location user data
|
Python
|
bsd-3-clause
|
qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq
|
Add mgmt cmd to clear bad location user data
|
from django.core.management.base import BaseCommand
from corehq.apps.es import UserES
from corehq.apps.users.models import CommCareUser
from corehq.util.couch import iter_update, DocUpdate
from corehq.util.log import with_progress_bar
class Command(BaseCommand):
args = ""
help = ("Clears commcare_location_id and commtrack-supply-point on any "
"users without location_id set")
def handle(self, *args, **options):
clean_users()
def get_bad_user_ids():
res = (UserES()
.mobile_users()
.empty("location_id")
.fields(["_id", "domain", "username", "user_data.commcare_location_id"])
.run().hits)
return [u['_id'] for u in res
if u.get('user_data', {}).get('commcare_location_id')
or u.get('user_data', {}).get('commtrack-supply-point')]
def clean_user(doc):
"""Take any users with no location_id and clear the location user_data"""
if doc['location_id']:
return
if (
doc.get('user_data', {}).pop('commcare_location_id', False)
or doc.get('user_data', {}).pop('commtrack-supply-point', False)
):
return DocUpdate(doc)
def clean_users():
all_ids = with_progress_bar(get_bad_user_ids())
iter_update(CommCareUser.get_db(), clean_user, all_ids, verbose=True)
|
<commit_before><commit_msg>Add mgmt cmd to clear bad location user data<commit_after>
|
from django.core.management.base import BaseCommand
from corehq.apps.es import UserES
from corehq.apps.users.models import CommCareUser
from corehq.util.couch import iter_update, DocUpdate
from corehq.util.log import with_progress_bar
class Command(BaseCommand):
args = ""
help = ("Clears commcare_location_id and commtrack-supply-point on any "
"users without location_id set")
def handle(self, *args, **options):
clean_users()
def get_bad_user_ids():
res = (UserES()
.mobile_users()
.empty("location_id")
.fields(["_id", "domain", "username", "user_data.commcare_location_id"])
.run().hits)
return [u['_id'] for u in res
if u.get('user_data', {}).get('commcare_location_id')
or u.get('user_data', {}).get('commtrack-supply-point')]
def clean_user(doc):
"""Take any users with no location_id and clear the location user_data"""
if doc['location_id']:
return
if (
doc.get('user_data', {}).pop('commcare_location_id', False)
or doc.get('user_data', {}).pop('commtrack-supply-point', False)
):
return DocUpdate(doc)
def clean_users():
all_ids = with_progress_bar(get_bad_user_ids())
iter_update(CommCareUser.get_db(), clean_user, all_ids, verbose=True)
|
Add mgmt cmd to clear bad location user datafrom django.core.management.base import BaseCommand
from corehq.apps.es import UserES
from corehq.apps.users.models import CommCareUser
from corehq.util.couch import iter_update, DocUpdate
from corehq.util.log import with_progress_bar
class Command(BaseCommand):
args = ""
help = ("Clears commcare_location_id and commtrack-supply-point on any "
"users without location_id set")
def handle(self, *args, **options):
clean_users()
def get_bad_user_ids():
res = (UserES()
.mobile_users()
.empty("location_id")
.fields(["_id", "domain", "username", "user_data.commcare_location_id"])
.run().hits)
return [u['_id'] for u in res
if u.get('user_data', {}).get('commcare_location_id')
or u.get('user_data', {}).get('commtrack-supply-point')]
def clean_user(doc):
"""Take any users with no location_id and clear the location user_data"""
if doc['location_id']:
return
if (
doc.get('user_data', {}).pop('commcare_location_id', False)
or doc.get('user_data', {}).pop('commtrack-supply-point', False)
):
return DocUpdate(doc)
def clean_users():
all_ids = with_progress_bar(get_bad_user_ids())
iter_update(CommCareUser.get_db(), clean_user, all_ids, verbose=True)
|
<commit_before><commit_msg>Add mgmt cmd to clear bad location user data<commit_after>from django.core.management.base import BaseCommand
from corehq.apps.es import UserES
from corehq.apps.users.models import CommCareUser
from corehq.util.couch import iter_update, DocUpdate
from corehq.util.log import with_progress_bar
class Command(BaseCommand):
args = ""
help = ("Clears commcare_location_id and commtrack-supply-point on any "
"users without location_id set")
def handle(self, *args, **options):
clean_users()
def get_bad_user_ids():
res = (UserES()
.mobile_users()
.empty("location_id")
.fields(["_id", "domain", "username", "user_data.commcare_location_id"])
.run().hits)
return [u['_id'] for u in res
if u.get('user_data', {}).get('commcare_location_id')
or u.get('user_data', {}).get('commtrack-supply-point')]
def clean_user(doc):
"""Take any users with no location_id and clear the location user_data"""
if doc['location_id']:
return
if (
doc.get('user_data', {}).pop('commcare_location_id', False)
or doc.get('user_data', {}).pop('commtrack-supply-point', False)
):
return DocUpdate(doc)
def clean_users():
all_ids = with_progress_bar(get_bad_user_ids())
iter_update(CommCareUser.get_db(), clean_user, all_ids, verbose=True)
|
|
adefe5d762da042cfa6589d6cfcbc337f98921da
|
sphinxcontrib/openstreetmap.py
|
sphinxcontrib/openstreetmap.py
|
# -*- coding: utf-8 -*-
"""
sphinxcontrib.openstreetmap
===========================
Embed OpenStreetMap on your documentation.
:copyright: Copyright 2015 HAYASHI Kentaro <kenhys@gmail.com>
:license: BSD, see LICENSE for details.
"""
from sphinx.util.compat import Directive
class openstreetmap(nodes.General, nodes.Element):
pass
class OpenStreetMapDirective(Directive):
"""Directive for embedding OpenStreetMap"""
has_content = False
option_spec = {
'name': directives.unchanged,
'label': directives.unchanged
}
def run(self):
node = openstreetmap()
return [node]
def visit_openstreetmap_node(self, node):
self.body.append("<div id='openstreetmap' style='color:red'>OpenStreetMap directive</div>")
def depart_openstreetmap_node(self, node):
pass
def setup(app):
app.add_node(openstreetmap,
html=(visit_openstreetmap_node, depart_openstreetmap_node))
app.add_directive('openstreetmap', OpenStreetMapDirective)
|
Add template script for supporting OpenStreetMap
|
Add template script for supporting OpenStreetMap
|
Python
|
bsd-2-clause
|
kenhys/sphinxcontrib-openstreetmap,kenhys/sphinxcontrib-openstreetmap
|
Add template script for supporting OpenStreetMap
|
# -*- coding: utf-8 -*-
"""
sphinxcontrib.openstreetmap
===========================
Embed OpenStreetMap on your documentation.
:copyright: Copyright 2015 HAYASHI Kentaro <kenhys@gmail.com>
:license: BSD, see LICENSE for details.
"""
from sphinx.util.compat import Directive
class openstreetmap(nodes.General, nodes.Element):
pass
class OpenStreetMapDirective(Directive):
"""Directive for embedding OpenStreetMap"""
has_content = False
option_spec = {
'name': directives.unchanged,
'label': directives.unchanged
}
def run(self):
node = openstreetmap()
return [node]
def visit_openstreetmap_node(self, node):
self.body.append("<div id='openstreetmap' style='color:red'>OpenStreetMap directive</div>")
def depart_openstreetmap_node(self, node):
pass
def setup(app):
app.add_node(openstreetmap,
html=(visit_openstreetmap_node, depart_openstreetmap_node))
app.add_directive('openstreetmap', OpenStreetMapDirective)
|
<commit_before><commit_msg>Add template script for supporting OpenStreetMap<commit_after>
|
# -*- coding: utf-8 -*-
"""
sphinxcontrib.openstreetmap
===========================
Embed OpenStreetMap on your documentation.
:copyright: Copyright 2015 HAYASHI Kentaro <kenhys@gmail.com>
:license: BSD, see LICENSE for details.
"""
from sphinx.util.compat import Directive
class openstreetmap(nodes.General, nodes.Element):
pass
class OpenStreetMapDirective(Directive):
"""Directive for embedding OpenStreetMap"""
has_content = False
option_spec = {
'name': directives.unchanged,
'label': directives.unchanged
}
def run(self):
node = openstreetmap()
return [node]
def visit_openstreetmap_node(self, node):
self.body.append("<div id='openstreetmap' style='color:red'>OpenStreetMap directive</div>")
def depart_openstreetmap_node(self, node):
pass
def setup(app):
app.add_node(openstreetmap,
html=(visit_openstreetmap_node, depart_openstreetmap_node))
app.add_directive('openstreetmap', OpenStreetMapDirective)
|
Add template script for supporting OpenStreetMap# -*- coding: utf-8 -*-
"""
sphinxcontrib.openstreetmap
===========================
Embed OpenStreetMap on your documentation.
:copyright: Copyright 2015 HAYASHI Kentaro <kenhys@gmail.com>
:license: BSD, see LICENSE for details.
"""
from sphinx.util.compat import Directive
class openstreetmap(nodes.General, nodes.Element):
pass
class OpenStreetMapDirective(Directive):
"""Directive for embedding OpenStreetMap"""
has_content = False
option_spec = {
'name': directives.unchanged,
'label': directives.unchanged
}
def run(self):
node = openstreetmap()
return [node]
def visit_openstreetmap_node(self, node):
self.body.append("<div id='openstreetmap' style='color:red'>OpenStreetMap directive</div>")
def depart_openstreetmap_node(self, node):
pass
def setup(app):
app.add_node(openstreetmap,
html=(visit_openstreetmap_node, depart_openstreetmap_node))
app.add_directive('openstreetmap', OpenStreetMapDirective)
|
<commit_before><commit_msg>Add template script for supporting OpenStreetMap<commit_after># -*- coding: utf-8 -*-
"""
sphinxcontrib.openstreetmap
===========================
Embed OpenStreetMap on your documentation.
:copyright: Copyright 2015 HAYASHI Kentaro <kenhys@gmail.com>
:license: BSD, see LICENSE for details.
"""
from sphinx.util.compat import Directive
class openstreetmap(nodes.General, nodes.Element):
pass
class OpenStreetMapDirective(Directive):
"""Directive for embedding OpenStreetMap"""
has_content = False
option_spec = {
'name': directives.unchanged,
'label': directives.unchanged
}
def run(self):
node = openstreetmap()
return [node]
def visit_openstreetmap_node(self, node):
self.body.append("<div id='openstreetmap' style='color:red'>OpenStreetMap directive</div>")
def depart_openstreetmap_node(self, node):
pass
def setup(app):
app.add_node(openstreetmap,
html=(visit_openstreetmap_node, depart_openstreetmap_node))
app.add_directive('openstreetmap', OpenStreetMapDirective)
|
|
926fc398b24170d418e0836b8ea23320dc7ff193
|
umibukela/migrations/0012_surveykoboproject.py
|
umibukela/migrations/0012_surveykoboproject.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('umibukela', '0011_cycleresultset_published'),
]
operations = [
migrations.CreateModel(
name='SurveyKoboProject',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('url', models.TextField(unique=True)),
('survey', models.ForeignKey(to='umibukela.Survey')),
],
),
]
|
Add SurveyKoboProject which optionally indicates a form/submission origin
|
Add SurveyKoboProject which optionally indicates a form/submission origin
|
Python
|
mit
|
Code4SA/umibukela,Code4SA/umibukela,Code4SA/umibukela,Code4SA/umibukela
|
Add SurveyKoboProject which optionally indicates a form/submission origin
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('umibukela', '0011_cycleresultset_published'),
]
operations = [
migrations.CreateModel(
name='SurveyKoboProject',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('url', models.TextField(unique=True)),
('survey', models.ForeignKey(to='umibukela.Survey')),
],
),
]
|
<commit_before><commit_msg>Add SurveyKoboProject which optionally indicates a form/submission origin<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('umibukela', '0011_cycleresultset_published'),
]
operations = [
migrations.CreateModel(
name='SurveyKoboProject',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('url', models.TextField(unique=True)),
('survey', models.ForeignKey(to='umibukela.Survey')),
],
),
]
|
Add SurveyKoboProject which optionally indicates a form/submission origin# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('umibukela', '0011_cycleresultset_published'),
]
operations = [
migrations.CreateModel(
name='SurveyKoboProject',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('url', models.TextField(unique=True)),
('survey', models.ForeignKey(to='umibukela.Survey')),
],
),
]
|
<commit_before><commit_msg>Add SurveyKoboProject which optionally indicates a form/submission origin<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('umibukela', '0011_cycleresultset_published'),
]
operations = [
migrations.CreateModel(
name='SurveyKoboProject',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('url', models.TextField(unique=True)),
('survey', models.ForeignKey(to='umibukela.Survey')),
],
),
]
|
|
2a61cdcad1de2d4b080a91e7eaca714a3e2ec68b
|
test/lib/environment_test.py
|
test/lib/environment_test.py
|
# Run the following command to test:
#
# (in /usr/local/googkit)
# $ python -m {test_module_name}
#
# See also: http://docs.python.org/3.3/library/unittest.html#command-line-interface
#
# We cannot use unittest.mock on python 2.x!
# Please install the Mock module when you use Python 2.x.
#
# $ easy_install -U Mock
#
# See also: http://www.voidspace.org.uk/python/mock/#installing
import unittest
import os
try:
# Python 3.3 or later
import unittest.mock as mock
except ImportError:
# Python 2.x or 3.2-
import mock
from lib.environment import Environment
class TestEnvironmtnt(unittest.TestCase):
# run {{{
def test_init(self):
args = mock.MagicMock()
config = mock.MagicMock()
env = Environment(args, config)
self.assertEqual(env.args, args)
self.assertEqual(env.config, config)
# }}}
if __name__ == '__main__':
unittest.main()
# vim: fdm=marker
|
Add a test for lib.environment
|
Add a test for lib.environment
|
Python
|
mit
|
googkit/googkit,googkit/googkit,googkit/googkit
|
Add a test for lib.environment
|
# Run the following command to test:
#
# (in /usr/local/googkit)
# $ python -m {test_module_name}
#
# See also: http://docs.python.org/3.3/library/unittest.html#command-line-interface
#
# We cannot use unittest.mock on python 2.x!
# Please install the Mock module when you use Python 2.x.
#
# $ easy_install -U Mock
#
# See also: http://www.voidspace.org.uk/python/mock/#installing
import unittest
import os
try:
# Python 3.3 or later
import unittest.mock as mock
except ImportError:
# Python 2.x or 3.2-
import mock
from lib.environment import Environment
class TestEnvironmtnt(unittest.TestCase):
# run {{{
def test_init(self):
args = mock.MagicMock()
config = mock.MagicMock()
env = Environment(args, config)
self.assertEqual(env.args, args)
self.assertEqual(env.config, config)
# }}}
if __name__ == '__main__':
unittest.main()
# vim: fdm=marker
|
<commit_before><commit_msg>Add a test for lib.environment<commit_after>
|
# Run the following command to test:
#
# (in /usr/local/googkit)
# $ python -m {test_module_name}
#
# See also: http://docs.python.org/3.3/library/unittest.html#command-line-interface
#
# We cannot use unittest.mock on python 2.x!
# Please install the Mock module when you use Python 2.x.
#
# $ easy_install -U Mock
#
# See also: http://www.voidspace.org.uk/python/mock/#installing
import unittest
import os
try:
# Python 3.3 or later
import unittest.mock as mock
except ImportError:
# Python 2.x or 3.2-
import mock
from lib.environment import Environment
class TestEnvironmtnt(unittest.TestCase):
# run {{{
def test_init(self):
args = mock.MagicMock()
config = mock.MagicMock()
env = Environment(args, config)
self.assertEqual(env.args, args)
self.assertEqual(env.config, config)
# }}}
if __name__ == '__main__':
unittest.main()
# vim: fdm=marker
|
Add a test for lib.environment# Run the following command to test:
#
# (in /usr/local/googkit)
# $ python -m {test_module_name}
#
# See also: http://docs.python.org/3.3/library/unittest.html#command-line-interface
#
# We cannot use unittest.mock on python 2.x!
# Please install the Mock module when you use Python 2.x.
#
# $ easy_install -U Mock
#
# See also: http://www.voidspace.org.uk/python/mock/#installing
import unittest
import os
try:
# Python 3.3 or later
import unittest.mock as mock
except ImportError:
# Python 2.x or 3.2-
import mock
from lib.environment import Environment
class TestEnvironmtnt(unittest.TestCase):
# run {{{
def test_init(self):
args = mock.MagicMock()
config = mock.MagicMock()
env = Environment(args, config)
self.assertEqual(env.args, args)
self.assertEqual(env.config, config)
# }}}
if __name__ == '__main__':
unittest.main()
# vim: fdm=marker
|
<commit_before><commit_msg>Add a test for lib.environment<commit_after># Run the following command to test:
#
# (in /usr/local/googkit)
# $ python -m {test_module_name}
#
# See also: http://docs.python.org/3.3/library/unittest.html#command-line-interface
#
# We cannot use unittest.mock on python 2.x!
# Please install the Mock module when you use Python 2.x.
#
# $ easy_install -U Mock
#
# See also: http://www.voidspace.org.uk/python/mock/#installing
import unittest
import os
try:
# Python 3.3 or later
import unittest.mock as mock
except ImportError:
# Python 2.x or 3.2-
import mock
from lib.environment import Environment
class TestEnvironmtnt(unittest.TestCase):
# run {{{
def test_init(self):
args = mock.MagicMock()
config = mock.MagicMock()
env = Environment(args, config)
self.assertEqual(env.args, args)
self.assertEqual(env.config, config)
# }}}
if __name__ == '__main__':
unittest.main()
# vim: fdm=marker
|
|
11f1e4b8f12759e0492626e8d75c79c760d6ffcb
|
tests/basics/tests/types1.py
|
tests/basics/tests/types1.py
|
# basic types
print(bool)
print(int)
print(float)
print(complex)
print(tuple)
print(list)
print(set)
print(dict)
print(type(bool()) == bool)
print(type(int()) == int)
print(type(float()) == float)
print(type(complex()) == complex)
print(type(tuple()) == tuple)
print(type(list()) == list)
print(type(set()) == set)
print(type(dict()) == dict)
print(type(False) == bool)
print(type(0) == int)
print(type(0.0) == float)
print(type(1j) == complex)
print(type(()) == tuple)
print(type([]) == list)
print(type({None}) == set)
print(type({}) == dict)
|
Add test for basic builtin types.
|
Add test for basic builtin types.
|
Python
|
mit
|
alex-robbins/micropython,supergis/micropython,stonegithubs/micropython,blmorris/micropython,Peetz0r/micropython-esp32,ganshun666/micropython,emfcamp/micropython,slzatz/micropython,blazewicz/micropython,jmarcelino/pycom-micropython,noahchense/micropython,vriera/micropython,blmorris/micropython,ahotam/micropython,dhylands/micropython,neilh10/micropython,blmorris/micropython,dinau/micropython,ceramos/micropython,supergis/micropython,cnoviello/micropython,firstval/micropython,chrisdearman/micropython,mpalomer/micropython,galenhz/micropython,dinau/micropython,puuu/micropython,martinribelotta/micropython,henriknelson/micropython,lowRISC/micropython,chrisdearman/micropython,dxxb/micropython,feilongfl/micropython,ruffy91/micropython,mhoffma/micropython,mgyenik/micropython,micropython/micropython-esp32,tuc-osg/micropython,ernesto-g/micropython,paul-xxx/micropython,heisewangluo/micropython,torwag/micropython,noahwilliamsson/micropython,cnoviello/micropython,jlillest/micropython,aitjcize/micropython,xuxiaoxin/micropython,hiway/micropython,pramasoul/micropython,rubencabrera/micropython,jimkmc/micropython,omtinez/micropython,noahwilliamsson/micropython,SHA2017-badge/micropython-esp32,xhat/micropython,adafruit/circuitpython,jlillest/micropython,aitjcize/micropython,henriknelson/micropython,jlillest/micropython,SungEun-Steve-Kim/test-mp,pozetroninc/micropython,HenrikSolver/micropython,ganshun666/micropython,Timmenem/micropython,utopiaprince/micropython,alex-march/micropython,ceramos/micropython,blazewicz/micropython,oopy/micropython,firstval/micropython,pfalcon/micropython,chrisdearman/micropython,SHA2017-badge/micropython-esp32,kostyll/micropython,bvernoux/micropython,dinau/micropython,feilongfl/micropython,ruffy91/micropython,lbattraw/micropython,neilh10/micropython,redbear/micropython,danicampora/micropython,alex-robbins/micropython,aethaniel/micropython,lbattraw/micropython,henriknelson/micropython,infinnovation/micropython,TDAbboud/micropython,cwyark/micropython,TDAbboud/micropython,jmarcelino/pycom-micropython,ganshun666/micropython,praemdonck/micropython,ernesto-g/micropython,ahotam/micropython,blmorris/micropython,stonegithubs/micropython,misterdanb/micropython,vitiral/micropython,matthewelse/micropython,infinnovation/micropython,puuu/micropython,slzatz/micropython,matthewelse/micropython,hosaka/micropython,suda/micropython,adafruit/circuitpython,lowRISC/micropython,dxxb/micropython,pramasoul/micropython,adamkh/micropython,skybird6672/micropython,methoxid/micropystat,misterdanb/micropython,mpalomer/micropython,MrSurly/micropython,ceramos/micropython,tralamazza/micropython,MrSurly/micropython,MrSurly/micropython-esp32,ryannathans/micropython,swegener/micropython,cwyark/micropython,xyb/micropython,ahotam/micropython,jimkmc/micropython,bvernoux/micropython,deshipu/micropython,tobbad/micropython,pramasoul/micropython,danicampora/micropython,praemdonck/micropython,orionrobots/micropython,omtinez/micropython,tobbad/micropython,rubencabrera/micropython,paul-xxx/micropython,HenrikSolver/micropython,selste/micropython,mpalomer/micropython,trezor/micropython,ceramos/micropython,aethaniel/micropython,slzatz/micropython,ericsnowcurrently/micropython,infinnovation/micropython,ericsnowcurrently/micropython,micropython/micropython-esp32,toolmacher/micropython,puuu/micropython,vriera/micropython,danicampora/micropython,lowRISC/micropython,pfalcon/micropython,martinribelotta/micropython,skybird6672/micropython,turbinenreiter/micropython,pfalcon/micropython,slzatz/micropython,ericsnowcurrently/micropython,galenhz/micropython,trezor/micropython,trezor/micropython,MrSurly/micropython,vriera/micropython,jmarcelino/pycom-micropython,martinribelotta/micropython,tobbad/micropython,hosaka/micropython,ganshun666/micropython,galenhz/micropython,micropython/micropython-esp32,ceramos/micropython,pozetroninc/micropython,adafruit/circuitpython,lowRISC/micropython,warner83/micropython,turbinenreiter/micropython,chrisdearman/micropython,lbattraw/micropython,MrSurly/micropython,adafruit/micropython,misterdanb/micropython,misterdanb/micropython,stonegithubs/micropython,Vogtinator/micropython,neilh10/micropython,toolmacher/micropython,Timmenem/micropython,puuu/micropython,orionrobots/micropython,redbear/micropython,Timmenem/micropython,drrk/micropython,adamkh/micropython,redbear/micropython,alex-march/micropython,xuxiaoxin/micropython,deshipu/micropython,adamkh/micropython,hiway/micropython,orionrobots/micropython,ruffy91/micropython,trezor/micropython,rubencabrera/micropython,AriZuu/micropython,paul-xxx/micropython,adafruit/micropython,alex-robbins/micropython,redbear/micropython,Peetz0r/micropython-esp32,AriZuu/micropython,firstval/micropython,jlillest/micropython,trezor/micropython,SungEun-Steve-Kim/test-mp,AriZuu/micropython,tralamazza/micropython,ahotam/micropython,mgyenik/micropython,xuxiaoxin/micropython,warner83/micropython,bvernoux/micropython,firstval/micropython,Timmenem/micropython,emfcamp/micropython,neilh10/micropython,pozetroninc/micropython,Peetz0r/micropython-esp32,suda/micropython,toolmacher/micropython,suda/micropython,turbinenreiter/micropython,mhoffma/micropython,adafruit/circuitpython,ryannathans/micropython,heisewangluo/micropython,swegener/micropython,ChuckM/micropython,matthewelse/micropython,HenrikSolver/micropython,adamkh/micropython,noahchense/micropython,infinnovation/micropython,micropython/micropython-esp32,swegener/micropython,cloudformdesign/micropython,ericsnowcurrently/micropython,alex-robbins/micropython,jimkmc/micropython,aitjcize/micropython,alex-march/micropython,EcmaXp/micropython,mianos/micropython,adafruit/circuitpython,matthewelse/micropython,rubencabrera/micropython,hosaka/micropython,selste/micropython,adafruit/micropython,MrSurly/micropython-esp32,Vogtinator/micropython,xuxiaoxin/micropython,ernesto-g/micropython,deshipu/micropython,oopy/micropython,TDAbboud/micropython,hiway/micropython,tralamazza/micropython,KISSMonX/micropython,mianos/micropython,xhat/micropython,bvernoux/micropython,blazewicz/micropython,noahchense/micropython,ruffy91/micropython,omtinez/micropython,feilongfl/micropython,xyb/micropython,dhylands/micropython,PappaPeppar/micropython,xyb/micropython,kerneltask/micropython,omtinez/micropython,ganshun666/micropython,redbear/micropython,xhat/micropython,pozetroninc/micropython,swegener/micropython,tobbad/micropython,ryannathans/micropython,vitiral/micropython,emfcamp/micropython,praemdonck/micropython,dinau/micropython,pramasoul/micropython,EcmaXp/micropython,blazewicz/micropython,tdautc19841202/micropython,tobbad/micropython,mhoffma/micropython,warner83/micropython,ChuckM/micropython,KISSMonX/micropython,cloudformdesign/micropython,SungEun-Steve-Kim/test-mp,vitiral/micropython,oopy/micropython,Vogtinator/micropython,cwyark/micropython,SHA2017-badge/micropython-esp32,mhoffma/micropython,lbattraw/micropython,methoxid/micropystat,hiway/micropython,dmazzella/micropython,praemdonck/micropython,cnoviello/micropython,hosaka/micropython,KISSMonX/micropython,Peetz0r/micropython-esp32,ernesto-g/micropython,dxxb/micropython,kerneltask/micropython,KISSMonX/micropython,drrk/micropython,praemdonck/micropython,matthewelse/micropython,adamkh/micropython,deshipu/micropython,deshipu/micropython,torwag/micropython,toolmacher/micropython,EcmaXp/micropython,neilh10/micropython,tuc-osg/micropython,cnoviello/micropython,MrSurly/micropython-esp32,martinribelotta/micropython,heisewangluo/micropython,drrk/micropython,dxxb/micropython,tuc-osg/micropython,ahotam/micropython,xhat/micropython,KISSMonX/micropython,skybird6672/micropython,SungEun-Steve-Kim/test-mp,dinau/micropython,ernesto-g/micropython,oopy/micropython,cloudformdesign/micropython,matthewelse/micropython,mpalomer/micropython,Peetz0r/micropython-esp32,skybird6672/micropython,cwyark/micropython,heisewangluo/micropython,lowRISC/micropython,ryannathans/micropython,stonegithubs/micropython,henriknelson/micropython,ChuckM/micropython,selste/micropython,tuc-osg/micropython,dmazzella/micropython,noahwilliamsson/micropython,PappaPeppar/micropython,tdautc19841202/micropython,vitiral/micropython,cnoviello/micropython,orionrobots/micropython,heisewangluo/micropython,PappaPeppar/micropython,kostyll/micropython,kerneltask/micropython,misterdanb/micropython,aethaniel/micropython,xyb/micropython,dmazzella/micropython,adafruit/micropython,galenhz/micropython,drrk/micropython,EcmaXp/micropython,skybird6672/micropython,methoxid/micropystat,alex-march/micropython,mgyenik/micropython,HenrikSolver/micropython,pfalcon/micropython,SHA2017-badge/micropython-esp32,tralamazza/micropython,EcmaXp/micropython,cloudformdesign/micropython,jmarcelino/pycom-micropython,ruffy91/micropython,suda/micropython,ChuckM/micropython,kostyll/micropython,blazewicz/micropython,cloudformdesign/micropython,supergis/micropython,kostyll/micropython,TDAbboud/micropython,TDAbboud/micropython,emfcamp/micropython,tdautc19841202/micropython,aethaniel/micropython,ChuckM/micropython,toolmacher/micropython,aitjcize/micropython,stonegithubs/micropython,noahwilliamsson/micropython,omtinez/micropython,vitiral/micropython,cwyark/micropython,galenhz/micropython,aethaniel/micropython,alex-robbins/micropython,micropython/micropython-esp32,adafruit/micropython,suda/micropython,oopy/micropython,swegener/micropython,Vogtinator/micropython,adafruit/circuitpython,vriera/micropython,torwag/micropython,pfalcon/micropython,paul-xxx/micropython,selste/micropython,emfcamp/micropython,torwag/micropython,rubencabrera/micropython,mgyenik/micropython,drrk/micropython,PappaPeppar/micropython,warner83/micropython,dmazzella/micropython,infinnovation/micropython,henriknelson/micropython,mpalomer/micropython,noahchense/micropython,orionrobots/micropython,lbattraw/micropython,pramasoul/micropython,AriZuu/micropython,feilongfl/micropython,chrisdearman/micropython,Vogtinator/micropython,slzatz/micropython,HenrikSolver/micropython,jlillest/micropython,selste/micropython,PappaPeppar/micropython,kerneltask/micropython,tdautc19841202/micropython,tuc-osg/micropython,warner83/micropython,martinribelotta/micropython,dhylands/micropython,mgyenik/micropython,paul-xxx/micropython,noahwilliamsson/micropython,methoxid/micropystat,turbinenreiter/micropython,MrSurly/micropython,danicampora/micropython,xyb/micropython,turbinenreiter/micropython,kostyll/micropython,utopiaprince/micropython,SungEun-Steve-Kim/test-mp,pozetroninc/micropython,feilongfl/micropython,mianos/micropython,SHA2017-badge/micropython-esp32,vriera/micropython,bvernoux/micropython,MrSurly/micropython-esp32,firstval/micropython,danicampora/micropython,tdautc19841202/micropython,noahchense/micropython,supergis/micropython,alex-march/micropython,xhat/micropython,mhoffma/micropython,jimkmc/micropython,puuu/micropython,xuxiaoxin/micropython,ericsnowcurrently/micropython,dxxb/micropython,dhylands/micropython,supergis/micropython,torwag/micropython,ryannathans/micropython,MrSurly/micropython-esp32,AriZuu/micropython,kerneltask/micropython,utopiaprince/micropython,mianos/micropython,hiway/micropython,utopiaprince/micropython,Timmenem/micropython,jmarcelino/pycom-micropython,mianos/micropython,blmorris/micropython,jimkmc/micropython,dhylands/micropython,utopiaprince/micropython,hosaka/micropython,methoxid/micropystat
|
Add test for basic builtin types.
|
# basic types
print(bool)
print(int)
print(float)
print(complex)
print(tuple)
print(list)
print(set)
print(dict)
print(type(bool()) == bool)
print(type(int()) == int)
print(type(float()) == float)
print(type(complex()) == complex)
print(type(tuple()) == tuple)
print(type(list()) == list)
print(type(set()) == set)
print(type(dict()) == dict)
print(type(False) == bool)
print(type(0) == int)
print(type(0.0) == float)
print(type(1j) == complex)
print(type(()) == tuple)
print(type([]) == list)
print(type({None}) == set)
print(type({}) == dict)
|
<commit_before><commit_msg>Add test for basic builtin types.<commit_after>
|
# basic types
print(bool)
print(int)
print(float)
print(complex)
print(tuple)
print(list)
print(set)
print(dict)
print(type(bool()) == bool)
print(type(int()) == int)
print(type(float()) == float)
print(type(complex()) == complex)
print(type(tuple()) == tuple)
print(type(list()) == list)
print(type(set()) == set)
print(type(dict()) == dict)
print(type(False) == bool)
print(type(0) == int)
print(type(0.0) == float)
print(type(1j) == complex)
print(type(()) == tuple)
print(type([]) == list)
print(type({None}) == set)
print(type({}) == dict)
|
Add test for basic builtin types.# basic types
print(bool)
print(int)
print(float)
print(complex)
print(tuple)
print(list)
print(set)
print(dict)
print(type(bool()) == bool)
print(type(int()) == int)
print(type(float()) == float)
print(type(complex()) == complex)
print(type(tuple()) == tuple)
print(type(list()) == list)
print(type(set()) == set)
print(type(dict()) == dict)
print(type(False) == bool)
print(type(0) == int)
print(type(0.0) == float)
print(type(1j) == complex)
print(type(()) == tuple)
print(type([]) == list)
print(type({None}) == set)
print(type({}) == dict)
|
<commit_before><commit_msg>Add test for basic builtin types.<commit_after># basic types
print(bool)
print(int)
print(float)
print(complex)
print(tuple)
print(list)
print(set)
print(dict)
print(type(bool()) == bool)
print(type(int()) == int)
print(type(float()) == float)
print(type(complex()) == complex)
print(type(tuple()) == tuple)
print(type(list()) == list)
print(type(set()) == set)
print(type(dict()) == dict)
print(type(False) == bool)
print(type(0) == int)
print(type(0.0) == float)
print(type(1j) == complex)
print(type(()) == tuple)
print(type([]) == list)
print(type({None}) == set)
print(type({}) == dict)
|
|
bed2210dd4e705b8574f2d1974f43b3a139a6477
|
scripts/remove_after_use/verify_node_wiki_page_counts.py
|
scripts/remove_after_use/verify_node_wiki_page_counts.py
|
# -*- coding: utf-8 -*-
import sys
import logging
from website.app import setup_django, init_app
from scripts import utils as script_utils
from django.db import transaction
setup_django()
from osf.models import AbstractNode
from addons.wiki.models import WikiPage, WikiVersion
logger = logging.getLogger(__name__)
def count_node_wiki_pages():
"""
Assert that counts for created WikiPages and WikiVersions are correct.
"""
nodes_with_wikis = AbstractNode.objects.exclude(wiki_pages_versions={}).exclude(type='osf.collection').exclude(type='osf.quickfilesnode')
wiki_page_count = 0
wiki_version_counts = 0
for node in nodes_with_wikis:
for wiki_key, version_list in node.wiki_pages_versions.iteritems():
wiki_page_count += 1
wiki_version_counts += len(version_list)
print "{} wiki pages expected".format(wiki_page_count)
print "{} wiki versions expected".format(wiki_version_counts)
assert wiki_page_count == WikiPage.objects.count()
assert wiki_version_counts == WikiVersion.objects.count()
def main(dry=True):
init_app(routes=False)
count_node_wiki_pages()
if __name__ == '__main__':
dry = '--dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
# Finally run the migration
main(dry=dry)
|
Add test script for verifying counts.
|
Add test script for verifying counts.
|
Python
|
apache-2.0
|
felliott/osf.io,aaxelb/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,binoculars/osf.io,felliott/osf.io,saradbowman/osf.io,felliott/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,mattclark/osf.io,mfraezz/osf.io,mfraezz/osf.io,baylee-d/osf.io,adlius/osf.io,pattisdr/osf.io,felliott/osf.io,brianjgeiger/osf.io,mattclark/osf.io,cslzchen/osf.io,cslzchen/osf.io,aaxelb/osf.io,adlius/osf.io,mattclark/osf.io,icereval/osf.io,Johnetordoff/osf.io,caseyrollins/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,sloria/osf.io,caseyrollins/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,icereval/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,binoculars/osf.io,adlius/osf.io,adlius/osf.io,HalcyonChimera/osf.io,binoculars/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,sloria/osf.io,aaxelb/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,saradbowman/osf.io
|
Add test script for verifying counts.
|
# -*- coding: utf-8 -*-
import sys
import logging
from website.app import setup_django, init_app
from scripts import utils as script_utils
from django.db import transaction
setup_django()
from osf.models import AbstractNode
from addons.wiki.models import WikiPage, WikiVersion
logger = logging.getLogger(__name__)
def count_node_wiki_pages():
"""
Assert that counts for created WikiPages and WikiVersions are correct.
"""
nodes_with_wikis = AbstractNode.objects.exclude(wiki_pages_versions={}).exclude(type='osf.collection').exclude(type='osf.quickfilesnode')
wiki_page_count = 0
wiki_version_counts = 0
for node in nodes_with_wikis:
for wiki_key, version_list in node.wiki_pages_versions.iteritems():
wiki_page_count += 1
wiki_version_counts += len(version_list)
print "{} wiki pages expected".format(wiki_page_count)
print "{} wiki versions expected".format(wiki_version_counts)
assert wiki_page_count == WikiPage.objects.count()
assert wiki_version_counts == WikiVersion.objects.count()
def main(dry=True):
init_app(routes=False)
count_node_wiki_pages()
if __name__ == '__main__':
dry = '--dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
# Finally run the migration
main(dry=dry)
|
<commit_before><commit_msg>Add test script for verifying counts.<commit_after>
|
# -*- coding: utf-8 -*-
import sys
import logging
from website.app import setup_django, init_app
from scripts import utils as script_utils
from django.db import transaction
setup_django()
from osf.models import AbstractNode
from addons.wiki.models import WikiPage, WikiVersion
logger = logging.getLogger(__name__)
def count_node_wiki_pages():
"""
Assert that counts for created WikiPages and WikiVersions are correct.
"""
nodes_with_wikis = AbstractNode.objects.exclude(wiki_pages_versions={}).exclude(type='osf.collection').exclude(type='osf.quickfilesnode')
wiki_page_count = 0
wiki_version_counts = 0
for node in nodes_with_wikis:
for wiki_key, version_list in node.wiki_pages_versions.iteritems():
wiki_page_count += 1
wiki_version_counts += len(version_list)
print "{} wiki pages expected".format(wiki_page_count)
print "{} wiki versions expected".format(wiki_version_counts)
assert wiki_page_count == WikiPage.objects.count()
assert wiki_version_counts == WikiVersion.objects.count()
def main(dry=True):
init_app(routes=False)
count_node_wiki_pages()
if __name__ == '__main__':
dry = '--dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
# Finally run the migration
main(dry=dry)
|
Add test script for verifying counts.# -*- coding: utf-8 -*-
import sys
import logging
from website.app import setup_django, init_app
from scripts import utils as script_utils
from django.db import transaction
setup_django()
from osf.models import AbstractNode
from addons.wiki.models import WikiPage, WikiVersion
logger = logging.getLogger(__name__)
def count_node_wiki_pages():
"""
Assert that counts for created WikiPages and WikiVersions are correct.
"""
nodes_with_wikis = AbstractNode.objects.exclude(wiki_pages_versions={}).exclude(type='osf.collection').exclude(type='osf.quickfilesnode')
wiki_page_count = 0
wiki_version_counts = 0
for node in nodes_with_wikis:
for wiki_key, version_list in node.wiki_pages_versions.iteritems():
wiki_page_count += 1
wiki_version_counts += len(version_list)
print "{} wiki pages expected".format(wiki_page_count)
print "{} wiki versions expected".format(wiki_version_counts)
assert wiki_page_count == WikiPage.objects.count()
assert wiki_version_counts == WikiVersion.objects.count()
def main(dry=True):
init_app(routes=False)
count_node_wiki_pages()
if __name__ == '__main__':
dry = '--dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
# Finally run the migration
main(dry=dry)
|
<commit_before><commit_msg>Add test script for verifying counts.<commit_after># -*- coding: utf-8 -*-
import sys
import logging
from website.app import setup_django, init_app
from scripts import utils as script_utils
from django.db import transaction
setup_django()
from osf.models import AbstractNode
from addons.wiki.models import WikiPage, WikiVersion
logger = logging.getLogger(__name__)
def count_node_wiki_pages():
"""
Assert that counts for created WikiPages and WikiVersions are correct.
"""
nodes_with_wikis = AbstractNode.objects.exclude(wiki_pages_versions={}).exclude(type='osf.collection').exclude(type='osf.quickfilesnode')
wiki_page_count = 0
wiki_version_counts = 0
for node in nodes_with_wikis:
for wiki_key, version_list in node.wiki_pages_versions.iteritems():
wiki_page_count += 1
wiki_version_counts += len(version_list)
print "{} wiki pages expected".format(wiki_page_count)
print "{} wiki versions expected".format(wiki_version_counts)
assert wiki_page_count == WikiPage.objects.count()
assert wiki_version_counts == WikiVersion.objects.count()
def main(dry=True):
init_app(routes=False)
count_node_wiki_pages()
if __name__ == '__main__':
dry = '--dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
# Finally run the migration
main(dry=dry)
|
|
63b8222a1b75aef2f78bdce41bc18b46cba5a2b7
|
Charts/Testing/Python/TestLinePlot.py
|
Charts/Testing/Python/TestLinePlot.py
|
#!/usr/bin/env python
# Run this test like so:
# vtkpython TestLinePlot.py -D $VTK_DATA_ROOT \
# -B $VTK_DATA_ROOT/Baseline/Charts/
import os
import vtk
import vtk.test.Testing
import math
class TestLinePlot(vtk.test.Testing.vtkTest):
def testLinePlot(self):
"Test if line plots can be built with python"
# Set up a 2D scene, add an XY chart to it
view = vtk.vtkContextView()
view.GetRenderer().SetBackground(1.0,1.0,1.0)
view.GetRenderWindow().SetSize(400,300)
chart = vtk.vtkChartXY()
view.GetScene().AddItem(chart)
# Create a table with some points in it
table = vtk.vtkTable()
arrX = vtk.vtkFloatArray()
arrX.SetName("X Axis")
arrC = vtk.vtkFloatArray()
arrC.SetName("Cosine")
arrS = vtk.vtkFloatArray()
arrS.SetName("Sine")
arrS2 = vtk.vtkFloatArray()
arrS2.SetName("Sine2")
numPoints = 69
inc = 7.5 / (numPoints - 1)
for i in range(0,numPoints):
arrX.InsertNextValue(i*inc)
arrC.InsertNextValue(math.cos(i * inc) + 0.0)
arrS.InsertNextValue(math.sin(i * inc) + 0.0)
arrS2.InsertNextValue(math.sin(i * inc) + 0.5)
table.AddColumn(arrX)
table.AddColumn(arrC)
table.AddColumn(arrS)
table.AddColumn(arrS2)
# Now add the line plots with appropriate colors
line = chart.AddPlot(0)
line.SetInput(table,0,1)
line.SetColor(0,255,0,255)
line.SetWidth(1.0)
line = chart.AddPlot(0)
line.SetInput(table,0,2)
line.SetColor(255,0,0,255);
line.SetWidth(5.0)
line = chart.AddPlot(0)
line.SetInput(table,0,3)
line.SetColor(0,0,255,255);
line.SetWidth(4.0)
view.GetRenderWindow().SetMultiSamples(0)
img_file = "TestLinePlot.png"
vtk.test.Testing.compareImage(view.GetRenderWindow(),vtk.test.Testing.getAbsImagePath(img_file),threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestLinePlot, 'test')])
|
Add Python LinePlot Charts Test
|
Add Python LinePlot Charts Test
Largely a clone of the Cxx version save that the data arrays must
be manipulated directly since the vtkVariant based methods on vtkTable
aren't available to Python.
|
Python
|
bsd-3-clause
|
sankhesh/VTK,msmolens/VTK,Wuteyan/VTK,cjh1/VTK,candy7393/VTK,keithroe/vtkoptix,biddisco/VTK,aashish24/VTK-old,msmolens/VTK,candy7393/VTK,demarle/VTK,arnaudgelas/VTK,demarle/VTK,SimVascular/VTK,berendkleinhaneveld/VTK,sumedhasingla/VTK,mspark93/VTK,biddisco/VTK,msmolens/VTK,hendradarwin/VTK,naucoin/VTKSlicerWidgets,naucoin/VTKSlicerWidgets,naucoin/VTKSlicerWidgets,Wuteyan/VTK,mspark93/VTK,sumedhasingla/VTK,sumedhasingla/VTK,biddisco/VTK,collects/VTK,demarle/VTK,collects/VTK,sumedhasingla/VTK,sankhesh/VTK,spthaolt/VTK,mspark93/VTK,gram526/VTK,johnkit/vtk-dev,daviddoria/PointGraphsPhase1,gram526/VTK,hendradarwin/VTK,hendradarwin/VTK,sankhesh/VTK,daviddoria/PointGraphsPhase1,sankhesh/VTK,jeffbaumes/jeffbaumes-vtk,mspark93/VTK,arnaudgelas/VTK,cjh1/VTK,ashray/VTK-EVM,aashish24/VTK-old,cjh1/VTK,naucoin/VTKSlicerWidgets,collects/VTK,SimVascular/VTK,keithroe/vtkoptix,SimVascular/VTK,spthaolt/VTK,SimVascular/VTK,jmerkow/VTK,Wuteyan/VTK,jeffbaumes/jeffbaumes-vtk,johnkit/vtk-dev,johnkit/vtk-dev,sumedhasingla/VTK,ashray/VTK-EVM,hendradarwin/VTK,spthaolt/VTK,jeffbaumes/jeffbaumes-vtk,SimVascular/VTK,johnkit/vtk-dev,hendradarwin/VTK,keithroe/vtkoptix,collects/VTK,Wuteyan/VTK,johnkit/vtk-dev,jmerkow/VTK,aashish24/VTK-old,msmolens/VTK,demarle/VTK,aashish24/VTK-old,candy7393/VTK,collects/VTK,mspark93/VTK,jmerkow/VTK,berendkleinhaneveld/VTK,sumedhasingla/VTK,naucoin/VTKSlicerWidgets,msmolens/VTK,keithroe/vtkoptix,arnaudgelas/VTK,arnaudgelas/VTK,Wuteyan/VTK,jeffbaumes/jeffbaumes-vtk,jmerkow/VTK,jmerkow/VTK,biddisco/VTK,candy7393/VTK,cjh1/VTK,aashish24/VTK-old,spthaolt/VTK,spthaolt/VTK,sankhesh/VTK,sankhesh/VTK,demarle/VTK,msmolens/VTK,SimVascular/VTK,sankhesh/VTK,hendradarwin/VTK,arnaudgelas/VTK,sumedhasingla/VTK,keithroe/vtkoptix,gram526/VTK,ashray/VTK-EVM,keithroe/vtkoptix,ashray/VTK-EVM,demarle/VTK,johnkit/vtk-dev,hendradarwin/VTK,jeffbaumes/jeffbaumes-vtk,cjh1/VTK,biddisco/VTK,ashray/VTK-EVM,spthaolt/VTK,daviddoria/PointGraphsPhase1,mspark93/VTK,gram526/VTK,berendkleinhaneveld/VTK,msmolens/VTK,daviddoria/PointGraphsPhase1,SimVascular/VTK,berendkleinhaneveld/VTK,naucoin/VTKSlicerWidgets,johnkit/vtk-dev,Wuteyan/VTK,gram526/VTK,mspark93/VTK,Wuteyan/VTK,daviddoria/PointGraphsPhase1,candy7393/VTK,sankhesh/VTK,jmerkow/VTK,sumedhasingla/VTK,gram526/VTK,ashray/VTK-EVM,candy7393/VTK,demarle/VTK,jeffbaumes/jeffbaumes-vtk,mspark93/VTK,daviddoria/PointGraphsPhase1,gram526/VTK,berendkleinhaneveld/VTK,aashish24/VTK-old,SimVascular/VTK,cjh1/VTK,arnaudgelas/VTK,ashray/VTK-EVM,demarle/VTK,candy7393/VTK,collects/VTK,candy7393/VTK,gram526/VTK,keithroe/vtkoptix,berendkleinhaneveld/VTK,biddisco/VTK,berendkleinhaneveld/VTK,ashray/VTK-EVM,jmerkow/VTK,biddisco/VTK,keithroe/vtkoptix,msmolens/VTK,jmerkow/VTK,spthaolt/VTK
|
Add Python LinePlot Charts Test
Largely a clone of the Cxx version save that the data arrays must
be manipulated directly since the vtkVariant based methods on vtkTable
aren't available to Python.
|
#!/usr/bin/env python
# Run this test like so:
# vtkpython TestLinePlot.py -D $VTK_DATA_ROOT \
# -B $VTK_DATA_ROOT/Baseline/Charts/
import os
import vtk
import vtk.test.Testing
import math
class TestLinePlot(vtk.test.Testing.vtkTest):
def testLinePlot(self):
"Test if line plots can be built with python"
# Set up a 2D scene, add an XY chart to it
view = vtk.vtkContextView()
view.GetRenderer().SetBackground(1.0,1.0,1.0)
view.GetRenderWindow().SetSize(400,300)
chart = vtk.vtkChartXY()
view.GetScene().AddItem(chart)
# Create a table with some points in it
table = vtk.vtkTable()
arrX = vtk.vtkFloatArray()
arrX.SetName("X Axis")
arrC = vtk.vtkFloatArray()
arrC.SetName("Cosine")
arrS = vtk.vtkFloatArray()
arrS.SetName("Sine")
arrS2 = vtk.vtkFloatArray()
arrS2.SetName("Sine2")
numPoints = 69
inc = 7.5 / (numPoints - 1)
for i in range(0,numPoints):
arrX.InsertNextValue(i*inc)
arrC.InsertNextValue(math.cos(i * inc) + 0.0)
arrS.InsertNextValue(math.sin(i * inc) + 0.0)
arrS2.InsertNextValue(math.sin(i * inc) + 0.5)
table.AddColumn(arrX)
table.AddColumn(arrC)
table.AddColumn(arrS)
table.AddColumn(arrS2)
# Now add the line plots with appropriate colors
line = chart.AddPlot(0)
line.SetInput(table,0,1)
line.SetColor(0,255,0,255)
line.SetWidth(1.0)
line = chart.AddPlot(0)
line.SetInput(table,0,2)
line.SetColor(255,0,0,255);
line.SetWidth(5.0)
line = chart.AddPlot(0)
line.SetInput(table,0,3)
line.SetColor(0,0,255,255);
line.SetWidth(4.0)
view.GetRenderWindow().SetMultiSamples(0)
img_file = "TestLinePlot.png"
vtk.test.Testing.compareImage(view.GetRenderWindow(),vtk.test.Testing.getAbsImagePath(img_file),threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestLinePlot, 'test')])
|
<commit_before><commit_msg>Add Python LinePlot Charts Test
Largely a clone of the Cxx version save that the data arrays must
be manipulated directly since the vtkVariant based methods on vtkTable
aren't available to Python.<commit_after>
|
#!/usr/bin/env python
# Run this test like so:
# vtkpython TestLinePlot.py -D $VTK_DATA_ROOT \
# -B $VTK_DATA_ROOT/Baseline/Charts/
import os
import vtk
import vtk.test.Testing
import math
class TestLinePlot(vtk.test.Testing.vtkTest):
def testLinePlot(self):
"Test if line plots can be built with python"
# Set up a 2D scene, add an XY chart to it
view = vtk.vtkContextView()
view.GetRenderer().SetBackground(1.0,1.0,1.0)
view.GetRenderWindow().SetSize(400,300)
chart = vtk.vtkChartXY()
view.GetScene().AddItem(chart)
# Create a table with some points in it
table = vtk.vtkTable()
arrX = vtk.vtkFloatArray()
arrX.SetName("X Axis")
arrC = vtk.vtkFloatArray()
arrC.SetName("Cosine")
arrS = vtk.vtkFloatArray()
arrS.SetName("Sine")
arrS2 = vtk.vtkFloatArray()
arrS2.SetName("Sine2")
numPoints = 69
inc = 7.5 / (numPoints - 1)
for i in range(0,numPoints):
arrX.InsertNextValue(i*inc)
arrC.InsertNextValue(math.cos(i * inc) + 0.0)
arrS.InsertNextValue(math.sin(i * inc) + 0.0)
arrS2.InsertNextValue(math.sin(i * inc) + 0.5)
table.AddColumn(arrX)
table.AddColumn(arrC)
table.AddColumn(arrS)
table.AddColumn(arrS2)
# Now add the line plots with appropriate colors
line = chart.AddPlot(0)
line.SetInput(table,0,1)
line.SetColor(0,255,0,255)
line.SetWidth(1.0)
line = chart.AddPlot(0)
line.SetInput(table,0,2)
line.SetColor(255,0,0,255);
line.SetWidth(5.0)
line = chart.AddPlot(0)
line.SetInput(table,0,3)
line.SetColor(0,0,255,255);
line.SetWidth(4.0)
view.GetRenderWindow().SetMultiSamples(0)
img_file = "TestLinePlot.png"
vtk.test.Testing.compareImage(view.GetRenderWindow(),vtk.test.Testing.getAbsImagePath(img_file),threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestLinePlot, 'test')])
|
Add Python LinePlot Charts Test
Largely a clone of the Cxx version save that the data arrays must
be manipulated directly since the vtkVariant based methods on vtkTable
aren't available to Python.#!/usr/bin/env python
# Run this test like so:
# vtkpython TestLinePlot.py -D $VTK_DATA_ROOT \
# -B $VTK_DATA_ROOT/Baseline/Charts/
import os
import vtk
import vtk.test.Testing
import math
class TestLinePlot(vtk.test.Testing.vtkTest):
def testLinePlot(self):
"Test if line plots can be built with python"
# Set up a 2D scene, add an XY chart to it
view = vtk.vtkContextView()
view.GetRenderer().SetBackground(1.0,1.0,1.0)
view.GetRenderWindow().SetSize(400,300)
chart = vtk.vtkChartXY()
view.GetScene().AddItem(chart)
# Create a table with some points in it
table = vtk.vtkTable()
arrX = vtk.vtkFloatArray()
arrX.SetName("X Axis")
arrC = vtk.vtkFloatArray()
arrC.SetName("Cosine")
arrS = vtk.vtkFloatArray()
arrS.SetName("Sine")
arrS2 = vtk.vtkFloatArray()
arrS2.SetName("Sine2")
numPoints = 69
inc = 7.5 / (numPoints - 1)
for i in range(0,numPoints):
arrX.InsertNextValue(i*inc)
arrC.InsertNextValue(math.cos(i * inc) + 0.0)
arrS.InsertNextValue(math.sin(i * inc) + 0.0)
arrS2.InsertNextValue(math.sin(i * inc) + 0.5)
table.AddColumn(arrX)
table.AddColumn(arrC)
table.AddColumn(arrS)
table.AddColumn(arrS2)
# Now add the line plots with appropriate colors
line = chart.AddPlot(0)
line.SetInput(table,0,1)
line.SetColor(0,255,0,255)
line.SetWidth(1.0)
line = chart.AddPlot(0)
line.SetInput(table,0,2)
line.SetColor(255,0,0,255);
line.SetWidth(5.0)
line = chart.AddPlot(0)
line.SetInput(table,0,3)
line.SetColor(0,0,255,255);
line.SetWidth(4.0)
view.GetRenderWindow().SetMultiSamples(0)
img_file = "TestLinePlot.png"
vtk.test.Testing.compareImage(view.GetRenderWindow(),vtk.test.Testing.getAbsImagePath(img_file),threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestLinePlot, 'test')])
|
<commit_before><commit_msg>Add Python LinePlot Charts Test
Largely a clone of the Cxx version save that the data arrays must
be manipulated directly since the vtkVariant based methods on vtkTable
aren't available to Python.<commit_after>#!/usr/bin/env python
# Run this test like so:
# vtkpython TestLinePlot.py -D $VTK_DATA_ROOT \
# -B $VTK_DATA_ROOT/Baseline/Charts/
import os
import vtk
import vtk.test.Testing
import math
class TestLinePlot(vtk.test.Testing.vtkTest):
def testLinePlot(self):
"Test if line plots can be built with python"
# Set up a 2D scene, add an XY chart to it
view = vtk.vtkContextView()
view.GetRenderer().SetBackground(1.0,1.0,1.0)
view.GetRenderWindow().SetSize(400,300)
chart = vtk.vtkChartXY()
view.GetScene().AddItem(chart)
# Create a table with some points in it
table = vtk.vtkTable()
arrX = vtk.vtkFloatArray()
arrX.SetName("X Axis")
arrC = vtk.vtkFloatArray()
arrC.SetName("Cosine")
arrS = vtk.vtkFloatArray()
arrS.SetName("Sine")
arrS2 = vtk.vtkFloatArray()
arrS2.SetName("Sine2")
numPoints = 69
inc = 7.5 / (numPoints - 1)
for i in range(0,numPoints):
arrX.InsertNextValue(i*inc)
arrC.InsertNextValue(math.cos(i * inc) + 0.0)
arrS.InsertNextValue(math.sin(i * inc) + 0.0)
arrS2.InsertNextValue(math.sin(i * inc) + 0.5)
table.AddColumn(arrX)
table.AddColumn(arrC)
table.AddColumn(arrS)
table.AddColumn(arrS2)
# Now add the line plots with appropriate colors
line = chart.AddPlot(0)
line.SetInput(table,0,1)
line.SetColor(0,255,0,255)
line.SetWidth(1.0)
line = chart.AddPlot(0)
line.SetInput(table,0,2)
line.SetColor(255,0,0,255);
line.SetWidth(5.0)
line = chart.AddPlot(0)
line.SetInput(table,0,3)
line.SetColor(0,0,255,255);
line.SetWidth(4.0)
view.GetRenderWindow().SetMultiSamples(0)
img_file = "TestLinePlot.png"
vtk.test.Testing.compareImage(view.GetRenderWindow(),vtk.test.Testing.getAbsImagePath(img_file),threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestLinePlot, 'test')])
|
|
def7caf595c4d995673f2fb8b997fc4f6b563d09
|
tests/integration-test/test_cis_splice_effects_main.py
|
tests/integration-test/test_cis_splice_effects_main.py
|
#!/usr/bin/env python
'''
test_cis_splice_effects_main.py -- Integration test for `regtools cis-splice-effects`
Copyright (c) 2015, The Griffith Lab
Author: Avinash Ramu <aramu@genome.wustl.edu>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
'''
from integrationtest import IntegrationTest, main
import unittest
class TestCisSpliceEffectsHelp(IntegrationTest, unittest.TestCase):
def test_junctions_help(self):
params = ["cis-splice-effects", "-h"]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
if __name__ == "__main__":
main()
|
Add test for cis-splice-effects main
|
Add test for cis-splice-effects main
|
Python
|
mit
|
griffithlab/regtools,griffithlab/regtools,griffithlab/regtools,griffithlab/regtools,griffithlab/regtools,gatoravi/regtools,gatoravi/regtools,griffithlab/regtools,gatoravi/regtools,gatoravi/regtools,gatoravi/regtools
|
Add test for cis-splice-effects main
|
#!/usr/bin/env python
'''
test_cis_splice_effects_main.py -- Integration test for `regtools cis-splice-effects`
Copyright (c) 2015, The Griffith Lab
Author: Avinash Ramu <aramu@genome.wustl.edu>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
'''
from integrationtest import IntegrationTest, main
import unittest
class TestCisSpliceEffectsHelp(IntegrationTest, unittest.TestCase):
def test_junctions_help(self):
params = ["cis-splice-effects", "-h"]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add test for cis-splice-effects main<commit_after>
|
#!/usr/bin/env python
'''
test_cis_splice_effects_main.py -- Integration test for `regtools cis-splice-effects`
Copyright (c) 2015, The Griffith Lab
Author: Avinash Ramu <aramu@genome.wustl.edu>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
'''
from integrationtest import IntegrationTest, main
import unittest
class TestCisSpliceEffectsHelp(IntegrationTest, unittest.TestCase):
def test_junctions_help(self):
params = ["cis-splice-effects", "-h"]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
if __name__ == "__main__":
main()
|
Add test for cis-splice-effects main#!/usr/bin/env python
'''
test_cis_splice_effects_main.py -- Integration test for `regtools cis-splice-effects`
Copyright (c) 2015, The Griffith Lab
Author: Avinash Ramu <aramu@genome.wustl.edu>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
'''
from integrationtest import IntegrationTest, main
import unittest
class TestCisSpliceEffectsHelp(IntegrationTest, unittest.TestCase):
def test_junctions_help(self):
params = ["cis-splice-effects", "-h"]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add test for cis-splice-effects main<commit_after>#!/usr/bin/env python
'''
test_cis_splice_effects_main.py -- Integration test for `regtools cis-splice-effects`
Copyright (c) 2015, The Griffith Lab
Author: Avinash Ramu <aramu@genome.wustl.edu>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
'''
from integrationtest import IntegrationTest, main
import unittest
class TestCisSpliceEffectsHelp(IntegrationTest, unittest.TestCase):
def test_junctions_help(self):
params = ["cis-splice-effects", "-h"]
rv, err = self.execute(params)
self.assertEqual(rv, 0)
if __name__ == "__main__":
main()
|
|
465aec2bf42d285b1707e0dbd8913856e6859e9d
|
tests/startsymbol_tests/NonterminalNotInGrammarTest.py
|
tests/startsymbol_tests/NonterminalNotInGrammarTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 10.08.2017 23:20
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import NonterminalDoesNotExistsException
class NonterminalNotInGrammarTest(TestCase):
pass
if __name__ == '__main__':
main()
|
Add file for tests of setting invalid nonterminal as start symbol
|
Add file for tests of setting invalid nonterminal as start symbol
|
Python
|
mit
|
PatrikValkovic/grammpy
|
Add file for tests of setting invalid nonterminal as start symbol
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 10.08.2017 23:20
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import NonterminalDoesNotExistsException
class NonterminalNotInGrammarTest(TestCase):
pass
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add file for tests of setting invalid nonterminal as start symbol<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 10.08.2017 23:20
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import NonterminalDoesNotExistsException
class NonterminalNotInGrammarTest(TestCase):
pass
if __name__ == '__main__':
main()
|
Add file for tests of setting invalid nonterminal as start symbol#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 10.08.2017 23:20
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import NonterminalDoesNotExistsException
class NonterminalNotInGrammarTest(TestCase):
pass
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add file for tests of setting invalid nonterminal as start symbol<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 10.08.2017 23:20
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import NonterminalDoesNotExistsException
class NonterminalNotInGrammarTest(TestCase):
pass
if __name__ == '__main__':
main()
|
|
2d1e8de13ce20c9e05e9e584d045431406af31c0
|
conman/routes/migrations/0003_add_validators.py
|
conman/routes/migrations/0003_add_validators.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import conman.routes.validators
class Migration(migrations.Migration):
dependencies = [
('routes', '0002_remove_slug_parent'),
]
operations = [
migrations.AlterField(
model_name='route',
name='url',
field=models.TextField(db_index=True, validators=[conman.routes.validators.validate_end_in_slash, conman.routes.validators.validate_start_in_slash, conman.routes.validators.validate_no_dotty_subpaths, conman.routes.validators.validate_no_double_slashes, conman.routes.validators.validate_no_hash_symbol, conman.routes.validators.validate_no_questionmark], unique=True),
),
]
|
Add missing migration (validators on Route.url)
|
Add missing migration (validators on Route.url)
|
Python
|
bsd-2-clause
|
Ian-Foote/django-conman,meshy/django-conman,meshy/django-conman
|
Add missing migration (validators on Route.url)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import conman.routes.validators
class Migration(migrations.Migration):
dependencies = [
('routes', '0002_remove_slug_parent'),
]
operations = [
migrations.AlterField(
model_name='route',
name='url',
field=models.TextField(db_index=True, validators=[conman.routes.validators.validate_end_in_slash, conman.routes.validators.validate_start_in_slash, conman.routes.validators.validate_no_dotty_subpaths, conman.routes.validators.validate_no_double_slashes, conman.routes.validators.validate_no_hash_symbol, conman.routes.validators.validate_no_questionmark], unique=True),
),
]
|
<commit_before><commit_msg>Add missing migration (validators on Route.url)<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import conman.routes.validators
class Migration(migrations.Migration):
dependencies = [
('routes', '0002_remove_slug_parent'),
]
operations = [
migrations.AlterField(
model_name='route',
name='url',
field=models.TextField(db_index=True, validators=[conman.routes.validators.validate_end_in_slash, conman.routes.validators.validate_start_in_slash, conman.routes.validators.validate_no_dotty_subpaths, conman.routes.validators.validate_no_double_slashes, conman.routes.validators.validate_no_hash_symbol, conman.routes.validators.validate_no_questionmark], unique=True),
),
]
|
Add missing migration (validators on Route.url)# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import conman.routes.validators
class Migration(migrations.Migration):
dependencies = [
('routes', '0002_remove_slug_parent'),
]
operations = [
migrations.AlterField(
model_name='route',
name='url',
field=models.TextField(db_index=True, validators=[conman.routes.validators.validate_end_in_slash, conman.routes.validators.validate_start_in_slash, conman.routes.validators.validate_no_dotty_subpaths, conman.routes.validators.validate_no_double_slashes, conman.routes.validators.validate_no_hash_symbol, conman.routes.validators.validate_no_questionmark], unique=True),
),
]
|
<commit_before><commit_msg>Add missing migration (validators on Route.url)<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import conman.routes.validators
class Migration(migrations.Migration):
dependencies = [
('routes', '0002_remove_slug_parent'),
]
operations = [
migrations.AlterField(
model_name='route',
name='url',
field=models.TextField(db_index=True, validators=[conman.routes.validators.validate_end_in_slash, conman.routes.validators.validate_start_in_slash, conman.routes.validators.validate_no_dotty_subpaths, conman.routes.validators.validate_no_double_slashes, conman.routes.validators.validate_no_hash_symbol, conman.routes.validators.validate_no_questionmark], unique=True),
),
]
|
|
76607bfd2b909eb004a897d9b4c78c93690e0f32
|
press_releases/migrations/0009_auto_20170519_1308.py
|
press_releases/migrations/0009_auto_20170519_1308.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_press_releases', '0008_auto_20161128_1049'),
]
operations = [
migrations.AddField(
model_name='pressreleaselisting',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this item", blank=True),
),
migrations.AddField(
model_name='pressreleaselisting',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this item', blank=True),
),
]
|
Update DB migrations following upstream change in ICEkit
|
Update DB migrations following upstream change in ICEkit
The `WorkflowStateMixin` model in django-icekit --
which is used as a basis for the `PressReleaseListing`
model -- was updated with two new fields: `brief`,
and `admin_notes`.
This change updates the model in this project to
comply with the upstream changes.
And will hopefully make ICEkit unit tests pass again
in Travis.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/icekit-press-releases,ic-labs/django-icekit,ic-labs/icekit-press-releases,ic-labs/django-icekit
|
Update DB migrations following upstream change in ICEkit
The `WorkflowStateMixin` model in django-icekit --
which is used as a basis for the `PressReleaseListing`
model -- was updated with two new fields: `brief`,
and `admin_notes`.
This change updates the model in this project to
comply with the upstream changes.
And will hopefully make ICEkit unit tests pass again
in Travis.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_press_releases', '0008_auto_20161128_1049'),
]
operations = [
migrations.AddField(
model_name='pressreleaselisting',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this item", blank=True),
),
migrations.AddField(
model_name='pressreleaselisting',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this item', blank=True),
),
]
|
<commit_before><commit_msg>Update DB migrations following upstream change in ICEkit
The `WorkflowStateMixin` model in django-icekit --
which is used as a basis for the `PressReleaseListing`
model -- was updated with two new fields: `brief`,
and `admin_notes`.
This change updates the model in this project to
comply with the upstream changes.
And will hopefully make ICEkit unit tests pass again
in Travis.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_press_releases', '0008_auto_20161128_1049'),
]
operations = [
migrations.AddField(
model_name='pressreleaselisting',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this item", blank=True),
),
migrations.AddField(
model_name='pressreleaselisting',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this item', blank=True),
),
]
|
Update DB migrations following upstream change in ICEkit
The `WorkflowStateMixin` model in django-icekit --
which is used as a basis for the `PressReleaseListing`
model -- was updated with two new fields: `brief`,
and `admin_notes`.
This change updates the model in this project to
comply with the upstream changes.
And will hopefully make ICEkit unit tests pass again
in Travis.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_press_releases', '0008_auto_20161128_1049'),
]
operations = [
migrations.AddField(
model_name='pressreleaselisting',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this item", blank=True),
),
migrations.AddField(
model_name='pressreleaselisting',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this item', blank=True),
),
]
|
<commit_before><commit_msg>Update DB migrations following upstream change in ICEkit
The `WorkflowStateMixin` model in django-icekit --
which is used as a basis for the `PressReleaseListing`
model -- was updated with two new fields: `brief`,
and `admin_notes`.
This change updates the model in this project to
comply with the upstream changes.
And will hopefully make ICEkit unit tests pass again
in Travis.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icekit_press_releases', '0008_auto_20161128_1049'),
]
operations = [
migrations.AddField(
model_name='pressreleaselisting',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this item", blank=True),
),
migrations.AddField(
model_name='pressreleaselisting',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this item', blank=True),
),
]
|
|
9d4b8961cb1129cf02dffdbe987babe0942a4f9b
|
migrations/versions/0136_notification_template_hist.py
|
migrations/versions/0136_notification_template_hist.py
|
"""
Revision ID: 0136_notification_template_hist
Revises: 0135_stats_template_usage
Create Date: 2017-11-08 10:15:07.039227
"""
from alembic import op
revision = '0136_notification_template_hist'
down_revision = '0135_stats_template_usage'
def upgrade():
op.drop_constraint('notifications_template_id_fkey', 'notifications', type_='foreignkey')
op.execute("""
ALTER TABLE notifications ADD CONSTRAINT "notifications_templates_history_fkey"
FOREIGN KEY ("template_id", "template_version") REFERENCES "templates_history" ("id", "version")
NOT VALID
""")
op.drop_constraint('notification_history_template_id_fkey', 'notification_history', type_='foreignkey')
op.execute("""
ALTER TABLE notification_history ADD CONSTRAINT "notification_history_templates_history_fkey"
FOREIGN KEY ("template_id", "template_version") REFERENCES "templates_history" ("id", "version")
NOT VALID
""")
def downgrade():
op.drop_constraint('notifications_templates_history_fkey', 'notifications', type_='foreignkey')
op.create_foreign_key('notifications_template_id_fkey', 'notifications', 'templates', ['template_id'], ['id'])
op.drop_constraint('notification_history_templates_history_fkey', 'notification_history', type_='foreignkey')
op.create_foreign_key('notification_history_template_id_fkey', 'notification_history', 'templates',
['template_id'], ['id'])
|
Add a migration to replace notifications_template foreign key
|
Add a migration to replace notifications_template foreign key
Removes notifications.template_id foreign key and replaces it with
a composite foreign key constraint to TemplateHistory using the
existing notification columns for template ID and template version.
Foreign key constraint is created as NOT VALID to avoid locking the
notifications table while postgres verifies that existing records
don't break the constraint. From postgres docs:
> If the constraint is marked NOT VALID, the potentially-lengthy initial
> check to verify that all rows in the table satisfy the constraint is
> skipped. The constraint will still be enforced against subsequent
> inserts or updates (that is, they'll fail unless there is a matching
> row in the referenced table, in the case of foreign keys; and they'll
> fail unless the new row matches the specified check constraints). But
> the database will not assume that the constraint holds for all rows
> in the table, until it is validated by using the VALIDATE CONSTRAINT
> option.
VALIDATE CONSTRAINT will be issued as a separate migration in a
follow-up PR.
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Add a migration to replace notifications_template foreign key
Removes notifications.template_id foreign key and replaces it with
a composite foreign key constraint to TemplateHistory using the
existing notification columns for template ID and template version.
Foreign key constraint is created as NOT VALID to avoid locking the
notifications table while postgres verifies that existing records
don't break the constraint. From postgres docs:
> If the constraint is marked NOT VALID, the potentially-lengthy initial
> check to verify that all rows in the table satisfy the constraint is
> skipped. The constraint will still be enforced against subsequent
> inserts or updates (that is, they'll fail unless there is a matching
> row in the referenced table, in the case of foreign keys; and they'll
> fail unless the new row matches the specified check constraints). But
> the database will not assume that the constraint holds for all rows
> in the table, until it is validated by using the VALIDATE CONSTRAINT
> option.
VALIDATE CONSTRAINT will be issued as a separate migration in a
follow-up PR.
|
"""
Revision ID: 0136_notification_template_hist
Revises: 0135_stats_template_usage
Create Date: 2017-11-08 10:15:07.039227
"""
from alembic import op
revision = '0136_notification_template_hist'
down_revision = '0135_stats_template_usage'
def upgrade():
op.drop_constraint('notifications_template_id_fkey', 'notifications', type_='foreignkey')
op.execute("""
ALTER TABLE notifications ADD CONSTRAINT "notifications_templates_history_fkey"
FOREIGN KEY ("template_id", "template_version") REFERENCES "templates_history" ("id", "version")
NOT VALID
""")
op.drop_constraint('notification_history_template_id_fkey', 'notification_history', type_='foreignkey')
op.execute("""
ALTER TABLE notification_history ADD CONSTRAINT "notification_history_templates_history_fkey"
FOREIGN KEY ("template_id", "template_version") REFERENCES "templates_history" ("id", "version")
NOT VALID
""")
def downgrade():
op.drop_constraint('notifications_templates_history_fkey', 'notifications', type_='foreignkey')
op.create_foreign_key('notifications_template_id_fkey', 'notifications', 'templates', ['template_id'], ['id'])
op.drop_constraint('notification_history_templates_history_fkey', 'notification_history', type_='foreignkey')
op.create_foreign_key('notification_history_template_id_fkey', 'notification_history', 'templates',
['template_id'], ['id'])
|
<commit_before><commit_msg>Add a migration to replace notifications_template foreign key
Removes notifications.template_id foreign key and replaces it with
a composite foreign key constraint to TemplateHistory using the
existing notification columns for template ID and template version.
Foreign key constraint is created as NOT VALID to avoid locking the
notifications table while postgres verifies that existing records
don't break the constraint. From postgres docs:
> If the constraint is marked NOT VALID, the potentially-lengthy initial
> check to verify that all rows in the table satisfy the constraint is
> skipped. The constraint will still be enforced against subsequent
> inserts or updates (that is, they'll fail unless there is a matching
> row in the referenced table, in the case of foreign keys; and they'll
> fail unless the new row matches the specified check constraints). But
> the database will not assume that the constraint holds for all rows
> in the table, until it is validated by using the VALIDATE CONSTRAINT
> option.
VALIDATE CONSTRAINT will be issued as a separate migration in a
follow-up PR.<commit_after>
|
"""
Revision ID: 0136_notification_template_hist
Revises: 0135_stats_template_usage
Create Date: 2017-11-08 10:15:07.039227
"""
from alembic import op
revision = '0136_notification_template_hist'
down_revision = '0135_stats_template_usage'
def upgrade():
op.drop_constraint('notifications_template_id_fkey', 'notifications', type_='foreignkey')
op.execute("""
ALTER TABLE notifications ADD CONSTRAINT "notifications_templates_history_fkey"
FOREIGN KEY ("template_id", "template_version") REFERENCES "templates_history" ("id", "version")
NOT VALID
""")
op.drop_constraint('notification_history_template_id_fkey', 'notification_history', type_='foreignkey')
op.execute("""
ALTER TABLE notification_history ADD CONSTRAINT "notification_history_templates_history_fkey"
FOREIGN KEY ("template_id", "template_version") REFERENCES "templates_history" ("id", "version")
NOT VALID
""")
def downgrade():
op.drop_constraint('notifications_templates_history_fkey', 'notifications', type_='foreignkey')
op.create_foreign_key('notifications_template_id_fkey', 'notifications', 'templates', ['template_id'], ['id'])
op.drop_constraint('notification_history_templates_history_fkey', 'notification_history', type_='foreignkey')
op.create_foreign_key('notification_history_template_id_fkey', 'notification_history', 'templates',
['template_id'], ['id'])
|
Add a migration to replace notifications_template foreign key
Removes notifications.template_id foreign key and replaces it with
a composite foreign key constraint to TemplateHistory using the
existing notification columns for template ID and template version.
Foreign key constraint is created as NOT VALID to avoid locking the
notifications table while postgres verifies that existing records
don't break the constraint. From postgres docs:
> If the constraint is marked NOT VALID, the potentially-lengthy initial
> check to verify that all rows in the table satisfy the constraint is
> skipped. The constraint will still be enforced against subsequent
> inserts or updates (that is, they'll fail unless there is a matching
> row in the referenced table, in the case of foreign keys; and they'll
> fail unless the new row matches the specified check constraints). But
> the database will not assume that the constraint holds for all rows
> in the table, until it is validated by using the VALIDATE CONSTRAINT
> option.
VALIDATE CONSTRAINT will be issued as a separate migration in a
follow-up PR."""
Revision ID: 0136_notification_template_hist
Revises: 0135_stats_template_usage
Create Date: 2017-11-08 10:15:07.039227
"""
from alembic import op
revision = '0136_notification_template_hist'
down_revision = '0135_stats_template_usage'
def upgrade():
op.drop_constraint('notifications_template_id_fkey', 'notifications', type_='foreignkey')
op.execute("""
ALTER TABLE notifications ADD CONSTRAINT "notifications_templates_history_fkey"
FOREIGN KEY ("template_id", "template_version") REFERENCES "templates_history" ("id", "version")
NOT VALID
""")
op.drop_constraint('notification_history_template_id_fkey', 'notification_history', type_='foreignkey')
op.execute("""
ALTER TABLE notification_history ADD CONSTRAINT "notification_history_templates_history_fkey"
FOREIGN KEY ("template_id", "template_version") REFERENCES "templates_history" ("id", "version")
NOT VALID
""")
def downgrade():
op.drop_constraint('notifications_templates_history_fkey', 'notifications', type_='foreignkey')
op.create_foreign_key('notifications_template_id_fkey', 'notifications', 'templates', ['template_id'], ['id'])
op.drop_constraint('notification_history_templates_history_fkey', 'notification_history', type_='foreignkey')
op.create_foreign_key('notification_history_template_id_fkey', 'notification_history', 'templates',
['template_id'], ['id'])
|
<commit_before><commit_msg>Add a migration to replace notifications_template foreign key
Removes notifications.template_id foreign key and replaces it with
a composite foreign key constraint to TemplateHistory using the
existing notification columns for template ID and template version.
Foreign key constraint is created as NOT VALID to avoid locking the
notifications table while postgres verifies that existing records
don't break the constraint. From postgres docs:
> If the constraint is marked NOT VALID, the potentially-lengthy initial
> check to verify that all rows in the table satisfy the constraint is
> skipped. The constraint will still be enforced against subsequent
> inserts or updates (that is, they'll fail unless there is a matching
> row in the referenced table, in the case of foreign keys; and they'll
> fail unless the new row matches the specified check constraints). But
> the database will not assume that the constraint holds for all rows
> in the table, until it is validated by using the VALIDATE CONSTRAINT
> option.
VALIDATE CONSTRAINT will be issued as a separate migration in a
follow-up PR.<commit_after>"""
Revision ID: 0136_notification_template_hist
Revises: 0135_stats_template_usage
Create Date: 2017-11-08 10:15:07.039227
"""
from alembic import op
revision = '0136_notification_template_hist'
down_revision = '0135_stats_template_usage'
def upgrade():
op.drop_constraint('notifications_template_id_fkey', 'notifications', type_='foreignkey')
op.execute("""
ALTER TABLE notifications ADD CONSTRAINT "notifications_templates_history_fkey"
FOREIGN KEY ("template_id", "template_version") REFERENCES "templates_history" ("id", "version")
NOT VALID
""")
op.drop_constraint('notification_history_template_id_fkey', 'notification_history', type_='foreignkey')
op.execute("""
ALTER TABLE notification_history ADD CONSTRAINT "notification_history_templates_history_fkey"
FOREIGN KEY ("template_id", "template_version") REFERENCES "templates_history" ("id", "version")
NOT VALID
""")
def downgrade():
op.drop_constraint('notifications_templates_history_fkey', 'notifications', type_='foreignkey')
op.create_foreign_key('notifications_template_id_fkey', 'notifications', 'templates', ['template_id'], ['id'])
op.drop_constraint('notification_history_templates_history_fkey', 'notification_history', type_='foreignkey')
op.create_foreign_key('notification_history_template_id_fkey', 'notification_history', 'templates',
['template_id'], ['id'])
|
|
173df69a56a0088fcee12cd58c94d631eb952c0d
|
normandy/studies/migrations/0002_auto_20180510_2256.py
|
normandy/studies/migrations/0002_auto_20180510_2256.py
|
# Generated by Django 2.0.5 on 2018-05-10 22:56
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('studies', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='extension',
options={'ordering': ('-id',)},
),
]
|
Add migration for meta change on Extensions
|
Add migration for meta change on Extensions
|
Python
|
mpl-2.0
|
mozilla/normandy,mozilla/normandy,mozilla/normandy,mozilla/normandy
|
Add migration for meta change on Extensions
|
# Generated by Django 2.0.5 on 2018-05-10 22:56
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('studies', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='extension',
options={'ordering': ('-id',)},
),
]
|
<commit_before><commit_msg>Add migration for meta change on Extensions<commit_after>
|
# Generated by Django 2.0.5 on 2018-05-10 22:56
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('studies', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='extension',
options={'ordering': ('-id',)},
),
]
|
Add migration for meta change on Extensions# Generated by Django 2.0.5 on 2018-05-10 22:56
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('studies', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='extension',
options={'ordering': ('-id',)},
),
]
|
<commit_before><commit_msg>Add migration for meta change on Extensions<commit_after># Generated by Django 2.0.5 on 2018-05-10 22:56
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('studies', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='extension',
options={'ordering': ('-id',)},
),
]
|
|
c76a24e25609fba8bdff5babc0319be6f8ea145c
|
connect_config/migrations/0003_auto_20141207_1231.py
|
connect_config/migrations/0003_auto_20141207_1231.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('connect_config', '0002_auto_20141104_1434'),
]
operations = [
migrations.AlterModelOptions(
name='siteconfig',
options={'verbose_name': 'site configuration', 'verbose_name_plural': 'site configurations'},
),
migrations.AlterField(
model_name='siteconfig',
name='email',
field=models.EmailField(verbose_name='email', max_length=75, help_text='Email for receiving site-wide enquiries'),
),
migrations.AlterField(
model_name='siteconfig',
name='email_header',
field=models.ImageField(verbose_name='email header', upload_to='', help_text='Header image on site generated emails. Must be 600px wide. Keep the file size as small as possible!'),
),
migrations.AlterField(
model_name='siteconfig',
name='logo',
field=models.ImageField(verbose_name='logo', upload_to='', help_text='Must be no larger than 80px by 160px'),
),
migrations.AlterField(
model_name='siteconfig',
name='site',
field=models.OneToOneField(to='sites.Site', related_name='config', verbose_name='site'),
),
migrations.AlterField(
model_name='siteconfig',
name='tagline',
field=models.CharField(verbose_name='site tagline', max_length=200),
),
]
|
Add migration for translation cleanup
|
Add migration for translation cleanup
|
Python
|
bsd-3-clause
|
f3r3nc/connect,nlhkabu/connect,f3r3nc/connect,f3r3nc/connect,nlhkabu/connect,nlhkabu/connect,nlhkabu/connect,f3r3nc/connect
|
Add migration for translation cleanup
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('connect_config', '0002_auto_20141104_1434'),
]
operations = [
migrations.AlterModelOptions(
name='siteconfig',
options={'verbose_name': 'site configuration', 'verbose_name_plural': 'site configurations'},
),
migrations.AlterField(
model_name='siteconfig',
name='email',
field=models.EmailField(verbose_name='email', max_length=75, help_text='Email for receiving site-wide enquiries'),
),
migrations.AlterField(
model_name='siteconfig',
name='email_header',
field=models.ImageField(verbose_name='email header', upload_to='', help_text='Header image on site generated emails. Must be 600px wide. Keep the file size as small as possible!'),
),
migrations.AlterField(
model_name='siteconfig',
name='logo',
field=models.ImageField(verbose_name='logo', upload_to='', help_text='Must be no larger than 80px by 160px'),
),
migrations.AlterField(
model_name='siteconfig',
name='site',
field=models.OneToOneField(to='sites.Site', related_name='config', verbose_name='site'),
),
migrations.AlterField(
model_name='siteconfig',
name='tagline',
field=models.CharField(verbose_name='site tagline', max_length=200),
),
]
|
<commit_before><commit_msg>Add migration for translation cleanup<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('connect_config', '0002_auto_20141104_1434'),
]
operations = [
migrations.AlterModelOptions(
name='siteconfig',
options={'verbose_name': 'site configuration', 'verbose_name_plural': 'site configurations'},
),
migrations.AlterField(
model_name='siteconfig',
name='email',
field=models.EmailField(verbose_name='email', max_length=75, help_text='Email for receiving site-wide enquiries'),
),
migrations.AlterField(
model_name='siteconfig',
name='email_header',
field=models.ImageField(verbose_name='email header', upload_to='', help_text='Header image on site generated emails. Must be 600px wide. Keep the file size as small as possible!'),
),
migrations.AlterField(
model_name='siteconfig',
name='logo',
field=models.ImageField(verbose_name='logo', upload_to='', help_text='Must be no larger than 80px by 160px'),
),
migrations.AlterField(
model_name='siteconfig',
name='site',
field=models.OneToOneField(to='sites.Site', related_name='config', verbose_name='site'),
),
migrations.AlterField(
model_name='siteconfig',
name='tagline',
field=models.CharField(verbose_name='site tagline', max_length=200),
),
]
|
Add migration for translation cleanup# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('connect_config', '0002_auto_20141104_1434'),
]
operations = [
migrations.AlterModelOptions(
name='siteconfig',
options={'verbose_name': 'site configuration', 'verbose_name_plural': 'site configurations'},
),
migrations.AlterField(
model_name='siteconfig',
name='email',
field=models.EmailField(verbose_name='email', max_length=75, help_text='Email for receiving site-wide enquiries'),
),
migrations.AlterField(
model_name='siteconfig',
name='email_header',
field=models.ImageField(verbose_name='email header', upload_to='', help_text='Header image on site generated emails. Must be 600px wide. Keep the file size as small as possible!'),
),
migrations.AlterField(
model_name='siteconfig',
name='logo',
field=models.ImageField(verbose_name='logo', upload_to='', help_text='Must be no larger than 80px by 160px'),
),
migrations.AlterField(
model_name='siteconfig',
name='site',
field=models.OneToOneField(to='sites.Site', related_name='config', verbose_name='site'),
),
migrations.AlterField(
model_name='siteconfig',
name='tagline',
field=models.CharField(verbose_name='site tagline', max_length=200),
),
]
|
<commit_before><commit_msg>Add migration for translation cleanup<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('connect_config', '0002_auto_20141104_1434'),
]
operations = [
migrations.AlterModelOptions(
name='siteconfig',
options={'verbose_name': 'site configuration', 'verbose_name_plural': 'site configurations'},
),
migrations.AlterField(
model_name='siteconfig',
name='email',
field=models.EmailField(verbose_name='email', max_length=75, help_text='Email for receiving site-wide enquiries'),
),
migrations.AlterField(
model_name='siteconfig',
name='email_header',
field=models.ImageField(verbose_name='email header', upload_to='', help_text='Header image on site generated emails. Must be 600px wide. Keep the file size as small as possible!'),
),
migrations.AlterField(
model_name='siteconfig',
name='logo',
field=models.ImageField(verbose_name='logo', upload_to='', help_text='Must be no larger than 80px by 160px'),
),
migrations.AlterField(
model_name='siteconfig',
name='site',
field=models.OneToOneField(to='sites.Site', related_name='config', verbose_name='site'),
),
migrations.AlterField(
model_name='siteconfig',
name='tagline',
field=models.CharField(verbose_name='site tagline', max_length=200),
),
]
|
|
4d0295eb55e92a8885b4e48749f6db019e2fb5a3
|
django/users/migrations/0002_auto_20140922_0843.py
|
django/users/migrations/0002_auto_20140922_0843.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_echonest_forward(apps, schema_editor):
"""Create echonest user."""
User = apps.get_model("users", "User")
User.objects.update_or_create(email='echonest')
def add_echonest_backward(apps, schema_editor):
"""Delete echonest user."""
User = apps.get_model("users", "User")
User.objects.filter(email='echonest').delete()
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.RunPython(add_echonest_forward, add_echonest_backward)
]
|
Add migration to create echonest user
|
Add migration to create echonest user
|
Python
|
bsd-3-clause
|
FreeMusicNinja/freemusic.ninja,FreeMusicNinja/freemusic.ninja
|
Add migration to create echonest user
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_echonest_forward(apps, schema_editor):
"""Create echonest user."""
User = apps.get_model("users", "User")
User.objects.update_or_create(email='echonest')
def add_echonest_backward(apps, schema_editor):
"""Delete echonest user."""
User = apps.get_model("users", "User")
User.objects.filter(email='echonest').delete()
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.RunPython(add_echonest_forward, add_echonest_backward)
]
|
<commit_before><commit_msg>Add migration to create echonest user<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_echonest_forward(apps, schema_editor):
"""Create echonest user."""
User = apps.get_model("users", "User")
User.objects.update_or_create(email='echonest')
def add_echonest_backward(apps, schema_editor):
"""Delete echonest user."""
User = apps.get_model("users", "User")
User.objects.filter(email='echonest').delete()
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.RunPython(add_echonest_forward, add_echonest_backward)
]
|
Add migration to create echonest user# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_echonest_forward(apps, schema_editor):
"""Create echonest user."""
User = apps.get_model("users", "User")
User.objects.update_or_create(email='echonest')
def add_echonest_backward(apps, schema_editor):
"""Delete echonest user."""
User = apps.get_model("users", "User")
User.objects.filter(email='echonest').delete()
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.RunPython(add_echonest_forward, add_echonest_backward)
]
|
<commit_before><commit_msg>Add migration to create echonest user<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_echonest_forward(apps, schema_editor):
"""Create echonest user."""
User = apps.get_model("users", "User")
User.objects.update_or_create(email='echonest')
def add_echonest_backward(apps, schema_editor):
"""Delete echonest user."""
User = apps.get_model("users", "User")
User.objects.filter(email='echonest').delete()
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.RunPython(add_echonest_forward, add_echonest_backward)
]
|
|
22171f517cfa72ed69fa1c321c6c647108de3a6d
|
app/grandchallenge/retina_core/signals.py
|
app/grandchallenge/retina_core/signals.py
|
from django.dispatch import receiver
from django.db.models.signals import post_save
from guardian.shortcuts import assign_perm
from django.conf import settings
from grandchallenge.annotations.models import (
MeasurementAnnotation,
BooleanClassificationAnnotation,
IntegerClassificationAnnotation,
PolygonAnnotationSet,
LandmarkAnnotationSet,
ETDRSGridAnnotation,
CoordinateListAnnotation,
SinglePolygonAnnotation,
SingleLandmarkAnnotation,
)
@receiver(post_save, sender=MeasurementAnnotation)
@receiver(post_save, sender=BooleanClassificationAnnotation)
@receiver(post_save, sender=IntegerClassificationAnnotation)
@receiver(post_save, sender=PolygonAnnotationSet)
@receiver(post_save, sender=LandmarkAnnotationSet)
@receiver(post_save, sender=ETDRSGridAnnotation)
@receiver(post_save, sender=CoordinateListAnnotation)
@receiver(post_save, sender=SinglePolygonAnnotation)
@receiver(post_save, sender=SingleLandmarkAnnotation)
def annotation_post_save(sender, instance, created):
"""
Set object level permissions for grader that belongs to retina_graders group after saving
of new annotation
"""
if not created:
return
model_name = sender.__name__.lower()
if model_name.startswith("single"):
owner = instance.annotation_set.grader
else:
owner = instance.grader
if not owner.groups.filter(name=settings.RETINA_GRADERS_GROUP_NAME).exists():
return
for permission_type in ("view", "add", "change", "delete"):
permission_name = f"annotation.{permission_type}_{model_name}"
assign_perm(permission_name, owner, instance)
|
Add post_save signal for annotations to add correct object level permissions
|
Add post_save signal for annotations to add correct object level permissions
|
Python
|
apache-2.0
|
comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django
|
Add post_save signal for annotations to add correct object level permissions
|
from django.dispatch import receiver
from django.db.models.signals import post_save
from guardian.shortcuts import assign_perm
from django.conf import settings
from grandchallenge.annotations.models import (
MeasurementAnnotation,
BooleanClassificationAnnotation,
IntegerClassificationAnnotation,
PolygonAnnotationSet,
LandmarkAnnotationSet,
ETDRSGridAnnotation,
CoordinateListAnnotation,
SinglePolygonAnnotation,
SingleLandmarkAnnotation,
)
@receiver(post_save, sender=MeasurementAnnotation)
@receiver(post_save, sender=BooleanClassificationAnnotation)
@receiver(post_save, sender=IntegerClassificationAnnotation)
@receiver(post_save, sender=PolygonAnnotationSet)
@receiver(post_save, sender=LandmarkAnnotationSet)
@receiver(post_save, sender=ETDRSGridAnnotation)
@receiver(post_save, sender=CoordinateListAnnotation)
@receiver(post_save, sender=SinglePolygonAnnotation)
@receiver(post_save, sender=SingleLandmarkAnnotation)
def annotation_post_save(sender, instance, created):
"""
Set object level permissions for grader that belongs to retina_graders group after saving
of new annotation
"""
if not created:
return
model_name = sender.__name__.lower()
if model_name.startswith("single"):
owner = instance.annotation_set.grader
else:
owner = instance.grader
if not owner.groups.filter(name=settings.RETINA_GRADERS_GROUP_NAME).exists():
return
for permission_type in ("view", "add", "change", "delete"):
permission_name = f"annotation.{permission_type}_{model_name}"
assign_perm(permission_name, owner, instance)
|
<commit_before><commit_msg>Add post_save signal for annotations to add correct object level permissions<commit_after>
|
from django.dispatch import receiver
from django.db.models.signals import post_save
from guardian.shortcuts import assign_perm
from django.conf import settings
from grandchallenge.annotations.models import (
MeasurementAnnotation,
BooleanClassificationAnnotation,
IntegerClassificationAnnotation,
PolygonAnnotationSet,
LandmarkAnnotationSet,
ETDRSGridAnnotation,
CoordinateListAnnotation,
SinglePolygonAnnotation,
SingleLandmarkAnnotation,
)
@receiver(post_save, sender=MeasurementAnnotation)
@receiver(post_save, sender=BooleanClassificationAnnotation)
@receiver(post_save, sender=IntegerClassificationAnnotation)
@receiver(post_save, sender=PolygonAnnotationSet)
@receiver(post_save, sender=LandmarkAnnotationSet)
@receiver(post_save, sender=ETDRSGridAnnotation)
@receiver(post_save, sender=CoordinateListAnnotation)
@receiver(post_save, sender=SinglePolygonAnnotation)
@receiver(post_save, sender=SingleLandmarkAnnotation)
def annotation_post_save(sender, instance, created):
"""
Set object level permissions for grader that belongs to retina_graders group after saving
of new annotation
"""
if not created:
return
model_name = sender.__name__.lower()
if model_name.startswith("single"):
owner = instance.annotation_set.grader
else:
owner = instance.grader
if not owner.groups.filter(name=settings.RETINA_GRADERS_GROUP_NAME).exists():
return
for permission_type in ("view", "add", "change", "delete"):
permission_name = f"annotation.{permission_type}_{model_name}"
assign_perm(permission_name, owner, instance)
|
Add post_save signal for annotations to add correct object level permissionsfrom django.dispatch import receiver
from django.db.models.signals import post_save
from guardian.shortcuts import assign_perm
from django.conf import settings
from grandchallenge.annotations.models import (
MeasurementAnnotation,
BooleanClassificationAnnotation,
IntegerClassificationAnnotation,
PolygonAnnotationSet,
LandmarkAnnotationSet,
ETDRSGridAnnotation,
CoordinateListAnnotation,
SinglePolygonAnnotation,
SingleLandmarkAnnotation,
)
@receiver(post_save, sender=MeasurementAnnotation)
@receiver(post_save, sender=BooleanClassificationAnnotation)
@receiver(post_save, sender=IntegerClassificationAnnotation)
@receiver(post_save, sender=PolygonAnnotationSet)
@receiver(post_save, sender=LandmarkAnnotationSet)
@receiver(post_save, sender=ETDRSGridAnnotation)
@receiver(post_save, sender=CoordinateListAnnotation)
@receiver(post_save, sender=SinglePolygonAnnotation)
@receiver(post_save, sender=SingleLandmarkAnnotation)
def annotation_post_save(sender, instance, created):
"""
Set object level permissions for grader that belongs to retina_graders group after saving
of new annotation
"""
if not created:
return
model_name = sender.__name__.lower()
if model_name.startswith("single"):
owner = instance.annotation_set.grader
else:
owner = instance.grader
if not owner.groups.filter(name=settings.RETINA_GRADERS_GROUP_NAME).exists():
return
for permission_type in ("view", "add", "change", "delete"):
permission_name = f"annotation.{permission_type}_{model_name}"
assign_perm(permission_name, owner, instance)
|
<commit_before><commit_msg>Add post_save signal for annotations to add correct object level permissions<commit_after>from django.dispatch import receiver
from django.db.models.signals import post_save
from guardian.shortcuts import assign_perm
from django.conf import settings
from grandchallenge.annotations.models import (
MeasurementAnnotation,
BooleanClassificationAnnotation,
IntegerClassificationAnnotation,
PolygonAnnotationSet,
LandmarkAnnotationSet,
ETDRSGridAnnotation,
CoordinateListAnnotation,
SinglePolygonAnnotation,
SingleLandmarkAnnotation,
)
@receiver(post_save, sender=MeasurementAnnotation)
@receiver(post_save, sender=BooleanClassificationAnnotation)
@receiver(post_save, sender=IntegerClassificationAnnotation)
@receiver(post_save, sender=PolygonAnnotationSet)
@receiver(post_save, sender=LandmarkAnnotationSet)
@receiver(post_save, sender=ETDRSGridAnnotation)
@receiver(post_save, sender=CoordinateListAnnotation)
@receiver(post_save, sender=SinglePolygonAnnotation)
@receiver(post_save, sender=SingleLandmarkAnnotation)
def annotation_post_save(sender, instance, created):
"""
Set object level permissions for grader that belongs to retina_graders group after saving
of new annotation
"""
if not created:
return
model_name = sender.__name__.lower()
if model_name.startswith("single"):
owner = instance.annotation_set.grader
else:
owner = instance.grader
if not owner.groups.filter(name=settings.RETINA_GRADERS_GROUP_NAME).exists():
return
for permission_type in ("view", "add", "change", "delete"):
permission_name = f"annotation.{permission_type}_{model_name}"
assign_perm(permission_name, owner, instance)
|
|
3c1f879469202a2001db94ba47055a3a9fab43f0
|
ecommerce/extensions/catalogue/migrations/0045_add_edx_employee_coupon_category.py
|
ecommerce/extensions/catalogue/migrations/0045_add_edx_employee_coupon_category.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-22 09:46
from __future__ import unicode_literals
from django.db import migrations
from oscar.apps.catalogue.categories import create_from_breadcrumbs
from oscar.core.loading import get_model
Category = get_model('catalogue', 'Category')
COUPON_CATEGORY_NAME = 'Coupons'
EDX_EMPLOYEE_COUPON_CATEGORY = 'edX Employee Request'
def create_edx_employee_category(apps, schema_editor):
"""Create edX employee coupon category."""
Category.skip_history_when_saving = True
create_from_breadcrumbs(
'{} > {}'.format(
COUPON_CATEGORY_NAME, EDX_EMPLOYEE_COUPON_CATEGORY
)
)
def remove_edx_employee_category(apps, schema_editor):
"""Remove edX employee coupon category."""
Category.skip_history_when_saving = True
Category.objects.get(
name=COUPON_CATEGORY_NAME
).get_children().filter(
name=EDX_EMPLOYEE_COUPON_CATEGORY
).delete()
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0044_add_enterprisecontractmetadata_product_attribute'),
]
operations = [
migrations.RunPython(create_edx_employee_category, remove_edx_employee_category)
]
|
Add new coupon category for edX.
|
Add new coupon category for edX.
Added a data migration to create a new coupon category for edX
employees.
PROD-1177
|
Python
|
agpl-3.0
|
eduNEXT/edunext-ecommerce,edx/ecommerce,eduNEXT/edunext-ecommerce,eduNEXT/edunext-ecommerce,edx/ecommerce,edx/ecommerce,eduNEXT/edunext-ecommerce,edx/ecommerce
|
Add new coupon category for edX.
Added a data migration to create a new coupon category for edX
employees.
PROD-1177
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-22 09:46
from __future__ import unicode_literals
from django.db import migrations
from oscar.apps.catalogue.categories import create_from_breadcrumbs
from oscar.core.loading import get_model
Category = get_model('catalogue', 'Category')
COUPON_CATEGORY_NAME = 'Coupons'
EDX_EMPLOYEE_COUPON_CATEGORY = 'edX Employee Request'
def create_edx_employee_category(apps, schema_editor):
"""Create edX employee coupon category."""
Category.skip_history_when_saving = True
create_from_breadcrumbs(
'{} > {}'.format(
COUPON_CATEGORY_NAME, EDX_EMPLOYEE_COUPON_CATEGORY
)
)
def remove_edx_employee_category(apps, schema_editor):
"""Remove edX employee coupon category."""
Category.skip_history_when_saving = True
Category.objects.get(
name=COUPON_CATEGORY_NAME
).get_children().filter(
name=EDX_EMPLOYEE_COUPON_CATEGORY
).delete()
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0044_add_enterprisecontractmetadata_product_attribute'),
]
operations = [
migrations.RunPython(create_edx_employee_category, remove_edx_employee_category)
]
|
<commit_before><commit_msg>Add new coupon category for edX.
Added a data migration to create a new coupon category for edX
employees.
PROD-1177<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-22 09:46
from __future__ import unicode_literals
from django.db import migrations
from oscar.apps.catalogue.categories import create_from_breadcrumbs
from oscar.core.loading import get_model
Category = get_model('catalogue', 'Category')
COUPON_CATEGORY_NAME = 'Coupons'
EDX_EMPLOYEE_COUPON_CATEGORY = 'edX Employee Request'
def create_edx_employee_category(apps, schema_editor):
"""Create edX employee coupon category."""
Category.skip_history_when_saving = True
create_from_breadcrumbs(
'{} > {}'.format(
COUPON_CATEGORY_NAME, EDX_EMPLOYEE_COUPON_CATEGORY
)
)
def remove_edx_employee_category(apps, schema_editor):
"""Remove edX employee coupon category."""
Category.skip_history_when_saving = True
Category.objects.get(
name=COUPON_CATEGORY_NAME
).get_children().filter(
name=EDX_EMPLOYEE_COUPON_CATEGORY
).delete()
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0044_add_enterprisecontractmetadata_product_attribute'),
]
operations = [
migrations.RunPython(create_edx_employee_category, remove_edx_employee_category)
]
|
Add new coupon category for edX.
Added a data migration to create a new coupon category for edX
employees.
PROD-1177# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-22 09:46
from __future__ import unicode_literals
from django.db import migrations
from oscar.apps.catalogue.categories import create_from_breadcrumbs
from oscar.core.loading import get_model
Category = get_model('catalogue', 'Category')
COUPON_CATEGORY_NAME = 'Coupons'
EDX_EMPLOYEE_COUPON_CATEGORY = 'edX Employee Request'
def create_edx_employee_category(apps, schema_editor):
"""Create edX employee coupon category."""
Category.skip_history_when_saving = True
create_from_breadcrumbs(
'{} > {}'.format(
COUPON_CATEGORY_NAME, EDX_EMPLOYEE_COUPON_CATEGORY
)
)
def remove_edx_employee_category(apps, schema_editor):
"""Remove edX employee coupon category."""
Category.skip_history_when_saving = True
Category.objects.get(
name=COUPON_CATEGORY_NAME
).get_children().filter(
name=EDX_EMPLOYEE_COUPON_CATEGORY
).delete()
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0044_add_enterprisecontractmetadata_product_attribute'),
]
operations = [
migrations.RunPython(create_edx_employee_category, remove_edx_employee_category)
]
|
<commit_before><commit_msg>Add new coupon category for edX.
Added a data migration to create a new coupon category for edX
employees.
PROD-1177<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-22 09:46
from __future__ import unicode_literals
from django.db import migrations
from oscar.apps.catalogue.categories import create_from_breadcrumbs
from oscar.core.loading import get_model
Category = get_model('catalogue', 'Category')
COUPON_CATEGORY_NAME = 'Coupons'
EDX_EMPLOYEE_COUPON_CATEGORY = 'edX Employee Request'
def create_edx_employee_category(apps, schema_editor):
"""Create edX employee coupon category."""
Category.skip_history_when_saving = True
create_from_breadcrumbs(
'{} > {}'.format(
COUPON_CATEGORY_NAME, EDX_EMPLOYEE_COUPON_CATEGORY
)
)
def remove_edx_employee_category(apps, schema_editor):
"""Remove edX employee coupon category."""
Category.skip_history_when_saving = True
Category.objects.get(
name=COUPON_CATEGORY_NAME
).get_children().filter(
name=EDX_EMPLOYEE_COUPON_CATEGORY
).delete()
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0044_add_enterprisecontractmetadata_product_attribute'),
]
operations = [
migrations.RunPython(create_edx_employee_category, remove_edx_employee_category)
]
|
|
7fdce241ba818755f6822e44f66bf503cbea3a77
|
exercises/chapter_04/exercise_04_04/exercise_04_04.py
|
exercises/chapter_04/exercise_04_04/exercise_04_04.py
|
# 4-5. Summing a Million
numbers = list(range(1, 1000001))
print("min = ", min(numbers))
print("max = ", max(numbers))
print("sum = ", sum(numbers))
|
Add solution to exercise 4.4.
|
Add solution to exercise 4.4.
|
Python
|
mit
|
HenrikSamuelsson/python-crash-course
|
Add solution to exercise 4.4.
|
# 4-5. Summing a Million
numbers = list(range(1, 1000001))
print("min = ", min(numbers))
print("max = ", max(numbers))
print("sum = ", sum(numbers))
|
<commit_before><commit_msg>Add solution to exercise 4.4.<commit_after>
|
# 4-5. Summing a Million
numbers = list(range(1, 1000001))
print("min = ", min(numbers))
print("max = ", max(numbers))
print("sum = ", sum(numbers))
|
Add solution to exercise 4.4.# 4-5. Summing a Million
numbers = list(range(1, 1000001))
print("min = ", min(numbers))
print("max = ", max(numbers))
print("sum = ", sum(numbers))
|
<commit_before><commit_msg>Add solution to exercise 4.4.<commit_after># 4-5. Summing a Million
numbers = list(range(1, 1000001))
print("min = ", min(numbers))
print("max = ", max(numbers))
print("sum = ", sum(numbers))
|
|
10a0fc5f62dcd85b022cc781855d9675f2beb365
|
alembic/versions/36fba9f9069d_delete_unused_project_columns.py
|
alembic/versions/36fba9f9069d_delete_unused_project_columns.py
|
"""delete unused project columns
Revision ID: 36fba9f9069d
Revises: 151b2f642877
Create Date: 2015-08-07 09:45:22.044720
"""
# revision identifiers, used by Alembic.
revision = '36fba9f9069d'
down_revision = '151b2f642877'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_column('project', 'time_estimate')
op.drop_column('project', 'time_limit')
op.drop_column('project', 'calibration_frac')
op.drop_column('project', 'bolt_course_id')
op.drop_column('project', 'long_tasks')
def downgrade():
op.add_column('project', sa.Column('time_estimate', sa.Integer, default=0))
op.add_column('project', sa.Column('time_limit', sa.Integer, default=0))
op.add_column('project', sa.Column('calibration_frac', sa.Float, default=0))
op.add_column('project', sa.Column('bolt_course_id', sa.Integer, default=0))
op.add_column('project', sa.Column('long_tasks', sa.Integer, default=0))
|
Add migration for removing unused columns in project
|
Add migration for removing unused columns in project
|
Python
|
agpl-3.0
|
PyBossa/pybossa,Scifabric/pybossa,PyBossa/pybossa,Scifabric/pybossa,geotagx/pybossa,geotagx/pybossa
|
Add migration for removing unused columns in project
|
"""delete unused project columns
Revision ID: 36fba9f9069d
Revises: 151b2f642877
Create Date: 2015-08-07 09:45:22.044720
"""
# revision identifiers, used by Alembic.
revision = '36fba9f9069d'
down_revision = '151b2f642877'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_column('project', 'time_estimate')
op.drop_column('project', 'time_limit')
op.drop_column('project', 'calibration_frac')
op.drop_column('project', 'bolt_course_id')
op.drop_column('project', 'long_tasks')
def downgrade():
op.add_column('project', sa.Column('time_estimate', sa.Integer, default=0))
op.add_column('project', sa.Column('time_limit', sa.Integer, default=0))
op.add_column('project', sa.Column('calibration_frac', sa.Float, default=0))
op.add_column('project', sa.Column('bolt_course_id', sa.Integer, default=0))
op.add_column('project', sa.Column('long_tasks', sa.Integer, default=0))
|
<commit_before><commit_msg>Add migration for removing unused columns in project<commit_after>
|
"""delete unused project columns
Revision ID: 36fba9f9069d
Revises: 151b2f642877
Create Date: 2015-08-07 09:45:22.044720
"""
# revision identifiers, used by Alembic.
revision = '36fba9f9069d'
down_revision = '151b2f642877'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_column('project', 'time_estimate')
op.drop_column('project', 'time_limit')
op.drop_column('project', 'calibration_frac')
op.drop_column('project', 'bolt_course_id')
op.drop_column('project', 'long_tasks')
def downgrade():
op.add_column('project', sa.Column('time_estimate', sa.Integer, default=0))
op.add_column('project', sa.Column('time_limit', sa.Integer, default=0))
op.add_column('project', sa.Column('calibration_frac', sa.Float, default=0))
op.add_column('project', sa.Column('bolt_course_id', sa.Integer, default=0))
op.add_column('project', sa.Column('long_tasks', sa.Integer, default=0))
|
Add migration for removing unused columns in project"""delete unused project columns
Revision ID: 36fba9f9069d
Revises: 151b2f642877
Create Date: 2015-08-07 09:45:22.044720
"""
# revision identifiers, used by Alembic.
revision = '36fba9f9069d'
down_revision = '151b2f642877'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_column('project', 'time_estimate')
op.drop_column('project', 'time_limit')
op.drop_column('project', 'calibration_frac')
op.drop_column('project', 'bolt_course_id')
op.drop_column('project', 'long_tasks')
def downgrade():
op.add_column('project', sa.Column('time_estimate', sa.Integer, default=0))
op.add_column('project', sa.Column('time_limit', sa.Integer, default=0))
op.add_column('project', sa.Column('calibration_frac', sa.Float, default=0))
op.add_column('project', sa.Column('bolt_course_id', sa.Integer, default=0))
op.add_column('project', sa.Column('long_tasks', sa.Integer, default=0))
|
<commit_before><commit_msg>Add migration for removing unused columns in project<commit_after>"""delete unused project columns
Revision ID: 36fba9f9069d
Revises: 151b2f642877
Create Date: 2015-08-07 09:45:22.044720
"""
# revision identifiers, used by Alembic.
revision = '36fba9f9069d'
down_revision = '151b2f642877'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_column('project', 'time_estimate')
op.drop_column('project', 'time_limit')
op.drop_column('project', 'calibration_frac')
op.drop_column('project', 'bolt_course_id')
op.drop_column('project', 'long_tasks')
def downgrade():
op.add_column('project', sa.Column('time_estimate', sa.Integer, default=0))
op.add_column('project', sa.Column('time_limit', sa.Integer, default=0))
op.add_column('project', sa.Column('calibration_frac', sa.Float, default=0))
op.add_column('project', sa.Column('bolt_course_id', sa.Integer, default=0))
op.add_column('project', sa.Column('long_tasks', sa.Integer, default=0))
|
|
5e75bf7b71c57ebc617dc0f073c2ed14585944be
|
tests/test_base.py
|
tests/test_base.py
|
import unittest
from requests import HTTPError
from kbcstorage.base import Endpoint
class TestEndpoint(unittest.TestCase):
"""
Test Endpoint functionality.
"""
def setUp(self):
self.root = 'https://httpbin.org'
self.token = ''
def test_get(self):
"""
Simple get works.
"""
endpoint = Endpoint(self.root, 'get', self.token)
requested_url = endpoint.get(endpoint.base_url)['url']
assert requested_url == 'https://httpbin.org/get'
def test_get_404(self):
"""
Get inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'get', self.token)
with self.assertRaises(HTTPError):
endpoint.get('{}/not-a-url'.format(endpoint.base_url))
def test_post(self):
"""
Simple post works.
"""
endpoint = Endpoint(self.root, 'post', self.token)
requested_url = endpoint.post(endpoint.base_url)['url']
assert requested_url == 'https://httpbin.org/post'
def test_post_404(self):
"""
Post to inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'post', self.token)
with self.assertRaises(HTTPError):
endpoint.post('{}/not-a-url'.format(endpoint.base_url))
def test_delete(self):
"""
Simple delete works.
"""
endpoint = Endpoint(self.root, 'delete', self.token)
resp = endpoint.delete(endpoint.base_url)
assert resp is None
def test_delete_404(self):
"""
Delete inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'delete', self.token)
with self.assertRaises(HTTPError):
endpoint.delete('{}/not-a-url'.format(endpoint.base_url))
|
Add tests for the base endpoint HTTP methods
|
Add tests for the base endpoint HTTP methods
This tests the `try: ... except: raise finally: ...` gotcha caught
by @pocin.
|
Python
|
mit
|
Ogaday/sapi-python-client,Ogaday/sapi-python-client
|
Add tests for the base endpoint HTTP methods
This tests the `try: ... except: raise finally: ...` gotcha caught
by @pocin.
|
import unittest
from requests import HTTPError
from kbcstorage.base import Endpoint
class TestEndpoint(unittest.TestCase):
"""
Test Endpoint functionality.
"""
def setUp(self):
self.root = 'https://httpbin.org'
self.token = ''
def test_get(self):
"""
Simple get works.
"""
endpoint = Endpoint(self.root, 'get', self.token)
requested_url = endpoint.get(endpoint.base_url)['url']
assert requested_url == 'https://httpbin.org/get'
def test_get_404(self):
"""
Get inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'get', self.token)
with self.assertRaises(HTTPError):
endpoint.get('{}/not-a-url'.format(endpoint.base_url))
def test_post(self):
"""
Simple post works.
"""
endpoint = Endpoint(self.root, 'post', self.token)
requested_url = endpoint.post(endpoint.base_url)['url']
assert requested_url == 'https://httpbin.org/post'
def test_post_404(self):
"""
Post to inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'post', self.token)
with self.assertRaises(HTTPError):
endpoint.post('{}/not-a-url'.format(endpoint.base_url))
def test_delete(self):
"""
Simple delete works.
"""
endpoint = Endpoint(self.root, 'delete', self.token)
resp = endpoint.delete(endpoint.base_url)
assert resp is None
def test_delete_404(self):
"""
Delete inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'delete', self.token)
with self.assertRaises(HTTPError):
endpoint.delete('{}/not-a-url'.format(endpoint.base_url))
|
<commit_before><commit_msg>Add tests for the base endpoint HTTP methods
This tests the `try: ... except: raise finally: ...` gotcha caught
by @pocin.<commit_after>
|
import unittest
from requests import HTTPError
from kbcstorage.base import Endpoint
class TestEndpoint(unittest.TestCase):
"""
Test Endpoint functionality.
"""
def setUp(self):
self.root = 'https://httpbin.org'
self.token = ''
def test_get(self):
"""
Simple get works.
"""
endpoint = Endpoint(self.root, 'get', self.token)
requested_url = endpoint.get(endpoint.base_url)['url']
assert requested_url == 'https://httpbin.org/get'
def test_get_404(self):
"""
Get inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'get', self.token)
with self.assertRaises(HTTPError):
endpoint.get('{}/not-a-url'.format(endpoint.base_url))
def test_post(self):
"""
Simple post works.
"""
endpoint = Endpoint(self.root, 'post', self.token)
requested_url = endpoint.post(endpoint.base_url)['url']
assert requested_url == 'https://httpbin.org/post'
def test_post_404(self):
"""
Post to inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'post', self.token)
with self.assertRaises(HTTPError):
endpoint.post('{}/not-a-url'.format(endpoint.base_url))
def test_delete(self):
"""
Simple delete works.
"""
endpoint = Endpoint(self.root, 'delete', self.token)
resp = endpoint.delete(endpoint.base_url)
assert resp is None
def test_delete_404(self):
"""
Delete inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'delete', self.token)
with self.assertRaises(HTTPError):
endpoint.delete('{}/not-a-url'.format(endpoint.base_url))
|
Add tests for the base endpoint HTTP methods
This tests the `try: ... except: raise finally: ...` gotcha caught
by @pocin.import unittest
from requests import HTTPError
from kbcstorage.base import Endpoint
class TestEndpoint(unittest.TestCase):
"""
Test Endpoint functionality.
"""
def setUp(self):
self.root = 'https://httpbin.org'
self.token = ''
def test_get(self):
"""
Simple get works.
"""
endpoint = Endpoint(self.root, 'get', self.token)
requested_url = endpoint.get(endpoint.base_url)['url']
assert requested_url == 'https://httpbin.org/get'
def test_get_404(self):
"""
Get inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'get', self.token)
with self.assertRaises(HTTPError):
endpoint.get('{}/not-a-url'.format(endpoint.base_url))
def test_post(self):
"""
Simple post works.
"""
endpoint = Endpoint(self.root, 'post', self.token)
requested_url = endpoint.post(endpoint.base_url)['url']
assert requested_url == 'https://httpbin.org/post'
def test_post_404(self):
"""
Post to inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'post', self.token)
with self.assertRaises(HTTPError):
endpoint.post('{}/not-a-url'.format(endpoint.base_url))
def test_delete(self):
"""
Simple delete works.
"""
endpoint = Endpoint(self.root, 'delete', self.token)
resp = endpoint.delete(endpoint.base_url)
assert resp is None
def test_delete_404(self):
"""
Delete inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'delete', self.token)
with self.assertRaises(HTTPError):
endpoint.delete('{}/not-a-url'.format(endpoint.base_url))
|
<commit_before><commit_msg>Add tests for the base endpoint HTTP methods
This tests the `try: ... except: raise finally: ...` gotcha caught
by @pocin.<commit_after>import unittest
from requests import HTTPError
from kbcstorage.base import Endpoint
class TestEndpoint(unittest.TestCase):
"""
Test Endpoint functionality.
"""
def setUp(self):
self.root = 'https://httpbin.org'
self.token = ''
def test_get(self):
"""
Simple get works.
"""
endpoint = Endpoint(self.root, 'get', self.token)
requested_url = endpoint.get(endpoint.base_url)['url']
assert requested_url == 'https://httpbin.org/get'
def test_get_404(self):
"""
Get inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'get', self.token)
with self.assertRaises(HTTPError):
endpoint.get('{}/not-a-url'.format(endpoint.base_url))
def test_post(self):
"""
Simple post works.
"""
endpoint = Endpoint(self.root, 'post', self.token)
requested_url = endpoint.post(endpoint.base_url)['url']
assert requested_url == 'https://httpbin.org/post'
def test_post_404(self):
"""
Post to inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'post', self.token)
with self.assertRaises(HTTPError):
endpoint.post('{}/not-a-url'.format(endpoint.base_url))
def test_delete(self):
"""
Simple delete works.
"""
endpoint = Endpoint(self.root, 'delete', self.token)
resp = endpoint.delete(endpoint.base_url)
assert resp is None
def test_delete_404(self):
"""
Delete inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'delete', self.token)
with self.assertRaises(HTTPError):
endpoint.delete('{}/not-a-url'.format(endpoint.base_url))
|
|
1890fbeacb34d1067a6e12909953cf070a8321c1
|
tests/FindEpsilonRules/SimpleChainingSecondTest.py
|
tests/FindEpsilonRules/SimpleChainingSecondTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 20.08.2017 16:07
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import TestCase, main
from grammpy import *
from grammpy_transforms import ContextFree
class S(Nonterminal): pass
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class Rules(Rule):
rules = [
([S], [0, A]),
([S], [1, C]),
([S], [C, C]),
([A], [B]),
([B], [S]),
([B], [EPS]),
([C], [A]),
([C], [S])]
class SimpleChainingTest(TestCase):
def test_simpleChainingTest(self):
g = Grammar(terminals=[0, 1],
nonterminals=[S, A, B, C],
rules=[Rules])
n = ContextFree.find_terminals_rewritable_to_epsilon(g)
self.assertEqual(len(n), 4)
for i in [S, A, B, C]:
self.assertIn(i, n)
if __name__ == '__main__':
main()
|
Add next text of chaining when looking for nonterminals rewritable to epsilon
|
Add next text of chaining when looking for nonterminals rewritable to epsilon
|
Python
|
mit
|
PatrikValkovic/grammpy
|
Add next text of chaining when looking for nonterminals rewritable to epsilon
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 20.08.2017 16:07
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import TestCase, main
from grammpy import *
from grammpy_transforms import ContextFree
class S(Nonterminal): pass
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class Rules(Rule):
rules = [
([S], [0, A]),
([S], [1, C]),
([S], [C, C]),
([A], [B]),
([B], [S]),
([B], [EPS]),
([C], [A]),
([C], [S])]
class SimpleChainingTest(TestCase):
def test_simpleChainingTest(self):
g = Grammar(terminals=[0, 1],
nonterminals=[S, A, B, C],
rules=[Rules])
n = ContextFree.find_terminals_rewritable_to_epsilon(g)
self.assertEqual(len(n), 4)
for i in [S, A, B, C]:
self.assertIn(i, n)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add next text of chaining when looking for nonterminals rewritable to epsilon<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 20.08.2017 16:07
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import TestCase, main
from grammpy import *
from grammpy_transforms import ContextFree
class S(Nonterminal): pass
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class Rules(Rule):
rules = [
([S], [0, A]),
([S], [1, C]),
([S], [C, C]),
([A], [B]),
([B], [S]),
([B], [EPS]),
([C], [A]),
([C], [S])]
class SimpleChainingTest(TestCase):
def test_simpleChainingTest(self):
g = Grammar(terminals=[0, 1],
nonterminals=[S, A, B, C],
rules=[Rules])
n = ContextFree.find_terminals_rewritable_to_epsilon(g)
self.assertEqual(len(n), 4)
for i in [S, A, B, C]:
self.assertIn(i, n)
if __name__ == '__main__':
main()
|
Add next text of chaining when looking for nonterminals rewritable to epsilon#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 20.08.2017 16:07
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import TestCase, main
from grammpy import *
from grammpy_transforms import ContextFree
class S(Nonterminal): pass
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class Rules(Rule):
rules = [
([S], [0, A]),
([S], [1, C]),
([S], [C, C]),
([A], [B]),
([B], [S]),
([B], [EPS]),
([C], [A]),
([C], [S])]
class SimpleChainingTest(TestCase):
def test_simpleChainingTest(self):
g = Grammar(terminals=[0, 1],
nonterminals=[S, A, B, C],
rules=[Rules])
n = ContextFree.find_terminals_rewritable_to_epsilon(g)
self.assertEqual(len(n), 4)
for i in [S, A, B, C]:
self.assertIn(i, n)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add next text of chaining when looking for nonterminals rewritable to epsilon<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 20.08.2017 16:07
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import TestCase, main
from grammpy import *
from grammpy_transforms import ContextFree
class S(Nonterminal): pass
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class Rules(Rule):
rules = [
([S], [0, A]),
([S], [1, C]),
([S], [C, C]),
([A], [B]),
([B], [S]),
([B], [EPS]),
([C], [A]),
([C], [S])]
class SimpleChainingTest(TestCase):
def test_simpleChainingTest(self):
g = Grammar(terminals=[0, 1],
nonterminals=[S, A, B, C],
rules=[Rules])
n = ContextFree.find_terminals_rewritable_to_epsilon(g)
self.assertEqual(len(n), 4)
for i in [S, A, B, C]:
self.assertIn(i, n)
if __name__ == '__main__':
main()
|
|
87babdfba0236de5d0742f9e336f8e3dbf603c65
|
temba/flows/migrations/0046_flowrun_responded_unnull.py
|
temba/flows/migrations/0046_flowrun_responded_unnull.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('flows', '0045_populate_responded'),
]
operations = [
migrations.AlterField(
model_name='flowrun',
name='responded',
field=models.BooleanField(default=False, help_text='Whether contact has responded in this run'),
),
]
|
Set FlowRun.respnoded to be non-nullable
|
Set FlowRun.respnoded to be non-nullable
|
Python
|
agpl-3.0
|
pulilab/rapidpro,reyrodrigues/EU-SMS,tsotetsi/textily-web,tsotetsi/textily-web,pulilab/rapidpro,reyrodrigues/EU-SMS,tsotetsi/textily-web,tsotetsi/textily-web,pulilab/rapidpro,ewheeler/rapidpro,ewheeler/rapidpro,tsotetsi/textily-web,pulilab/rapidpro,pulilab/rapidpro,ewheeler/rapidpro,reyrodrigues/EU-SMS,ewheeler/rapidpro
|
Set FlowRun.respnoded to be non-nullable
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('flows', '0045_populate_responded'),
]
operations = [
migrations.AlterField(
model_name='flowrun',
name='responded',
field=models.BooleanField(default=False, help_text='Whether contact has responded in this run'),
),
]
|
<commit_before><commit_msg>Set FlowRun.respnoded to be non-nullable<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('flows', '0045_populate_responded'),
]
operations = [
migrations.AlterField(
model_name='flowrun',
name='responded',
field=models.BooleanField(default=False, help_text='Whether contact has responded in this run'),
),
]
|
Set FlowRun.respnoded to be non-nullable# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('flows', '0045_populate_responded'),
]
operations = [
migrations.AlterField(
model_name='flowrun',
name='responded',
field=models.BooleanField(default=False, help_text='Whether contact has responded in this run'),
),
]
|
<commit_before><commit_msg>Set FlowRun.respnoded to be non-nullable<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('flows', '0045_populate_responded'),
]
operations = [
migrations.AlterField(
model_name='flowrun',
name='responded',
field=models.BooleanField(default=False, help_text='Whether contact has responded in this run'),
),
]
|
|
af4c6d9747197b23014ba71803da792f9e612a12
|
django_mailbox/migrations/0004_bytestring_to_unicode.py
|
django_mailbox/migrations/0004_bytestring_to_unicode.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('django_mailbox', '0003_auto_20150409_0316'),
]
operations = [
migrations.AlterField(
model_name='message',
name='eml',
field=models.FileField(verbose_name='Raw message contents', upload_to='messages', null=True, help_text='Original full content of message'),
),
migrations.AlterField(
model_name='messageattachment',
name='document',
field=models.FileField(verbose_name='Document', upload_to='mailbox_attachments/%Y/%m/%d/'),
),
]
|
Add migration to resolve inconsistency between python2 and python3 strings
|
Add migration to resolve inconsistency between python2 and python3 strings
|
Python
|
mit
|
Shekharrajak/django-mailbox,coddingtonbear/django-mailbox,ad-m/django-mailbox,leifurhauks/django-mailbox
|
Add migration to resolve inconsistency between python2 and python3 strings
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('django_mailbox', '0003_auto_20150409_0316'),
]
operations = [
migrations.AlterField(
model_name='message',
name='eml',
field=models.FileField(verbose_name='Raw message contents', upload_to='messages', null=True, help_text='Original full content of message'),
),
migrations.AlterField(
model_name='messageattachment',
name='document',
field=models.FileField(verbose_name='Document', upload_to='mailbox_attachments/%Y/%m/%d/'),
),
]
|
<commit_before><commit_msg>Add migration to resolve inconsistency between python2 and python3 strings<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('django_mailbox', '0003_auto_20150409_0316'),
]
operations = [
migrations.AlterField(
model_name='message',
name='eml',
field=models.FileField(verbose_name='Raw message contents', upload_to='messages', null=True, help_text='Original full content of message'),
),
migrations.AlterField(
model_name='messageattachment',
name='document',
field=models.FileField(verbose_name='Document', upload_to='mailbox_attachments/%Y/%m/%d/'),
),
]
|
Add migration to resolve inconsistency between python2 and python3 strings# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('django_mailbox', '0003_auto_20150409_0316'),
]
operations = [
migrations.AlterField(
model_name='message',
name='eml',
field=models.FileField(verbose_name='Raw message contents', upload_to='messages', null=True, help_text='Original full content of message'),
),
migrations.AlterField(
model_name='messageattachment',
name='document',
field=models.FileField(verbose_name='Document', upload_to='mailbox_attachments/%Y/%m/%d/'),
),
]
|
<commit_before><commit_msg>Add migration to resolve inconsistency between python2 and python3 strings<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('django_mailbox', '0003_auto_20150409_0316'),
]
operations = [
migrations.AlterField(
model_name='message',
name='eml',
field=models.FileField(verbose_name='Raw message contents', upload_to='messages', null=True, help_text='Original full content of message'),
),
migrations.AlterField(
model_name='messageattachment',
name='document',
field=models.FileField(verbose_name='Document', upload_to='mailbox_attachments/%Y/%m/%d/'),
),
]
|
|
3cedd52bb022dac3083c7de1245fd49882f39f41
|
examples/sacred/mnist/model_from_mongo.py
|
examples/sacred/mnist/model_from_mongo.py
|
"""
A simple Python API for loading Keras models from MongoDB run artifact stored
by the Sacred library.
Usage:
run_id = '5843062c4e60f9a60c9db41f'
model = get_model(get_run(run_id))
"""
from bson.objectid import ObjectId
import keras
import pymongo
from tempfile import TemporaryDirectory
mongo_client = pymongo.MongoClient()
db = mongo_client['sacred']
def list_runs(exp_name=None):
return db['default.runs'].find({}, {'_id': True})
def list_experiment_runs(exp_name):
return db['default.runs'].find(
{'experiment.name': exp_name},
{'_id': True})
def list_experiments():
return db['default.runs'].distinct('experiment.name')
def get_run(run_id):
runs = db['default.runs']
if isinstance(run_id, str):
run_id = ObjectId(run_id)
return runs.find_one(run_id)
def get_file_chunks(file_id):
chunks = db['default.chunks']
for chunk in chunks.find({'files_id': file_id}) \
.sort([('n', pymongo.ASCENDING)]):
yield chunk['data']
def find_model_artifact(run, suffix='model.h5'):
files = db['default.files']
for artifact_id in run['artifacts']:
artifact = files.find_one(artifact_id)
if artifact['filename'].endswith(suffix):
return artifact
# We stored the model as a run artifact. The artifact file is stored in
# multiple chunks which we need to put together.
def model_from_chunks(model_chunks):
# Since h5py doesn't allow reading from in-memory file-like objects,
# let's store it to a temporary file.
# http://stackoverflow.com/questions/16654251/can-h5py-load-a-file-from-a-byte-array-in-memory
# We use TemporaryDirectory instead of NamedTemporaryFile since after
# closing such file it gets deleted before Keras can load it.
with TemporaryDirectory() as temp_dir:
model_file = temp_dir + '/model.h5'
with open(model_file, 'wb') as f:
for chunk_data in model_chunks:
f.write(chunk_data)
return keras.models.load_model(model_file)
def get_model(run):
model_artifact = find_model_artifact(run)
print('Loading model:', model_artifact['filename'])
model_chunks = get_file_chunks(model_artifact['_id'])
model = model_from_chunks(model_chunks)
return model
|
Add a script to load a Keras model from MongoDB where it was stored by Sacred.
|
Add a script to load a Keras model from MongoDB where it was stored by Sacred.
|
Python
|
mit
|
bzamecnik/sanctuary
|
Add a script to load a Keras model from MongoDB where it was stored by Sacred.
|
"""
A simple Python API for loading Keras models from MongoDB run artifact stored
by the Sacred library.
Usage:
run_id = '5843062c4e60f9a60c9db41f'
model = get_model(get_run(run_id))
"""
from bson.objectid import ObjectId
import keras
import pymongo
from tempfile import TemporaryDirectory
mongo_client = pymongo.MongoClient()
db = mongo_client['sacred']
def list_runs(exp_name=None):
return db['default.runs'].find({}, {'_id': True})
def list_experiment_runs(exp_name):
return db['default.runs'].find(
{'experiment.name': exp_name},
{'_id': True})
def list_experiments():
return db['default.runs'].distinct('experiment.name')
def get_run(run_id):
runs = db['default.runs']
if isinstance(run_id, str):
run_id = ObjectId(run_id)
return runs.find_one(run_id)
def get_file_chunks(file_id):
chunks = db['default.chunks']
for chunk in chunks.find({'files_id': file_id}) \
.sort([('n', pymongo.ASCENDING)]):
yield chunk['data']
def find_model_artifact(run, suffix='model.h5'):
files = db['default.files']
for artifact_id in run['artifacts']:
artifact = files.find_one(artifact_id)
if artifact['filename'].endswith(suffix):
return artifact
# We stored the model as a run artifact. The artifact file is stored in
# multiple chunks which we need to put together.
def model_from_chunks(model_chunks):
# Since h5py doesn't allow reading from in-memory file-like objects,
# let's store it to a temporary file.
# http://stackoverflow.com/questions/16654251/can-h5py-load-a-file-from-a-byte-array-in-memory
# We use TemporaryDirectory instead of NamedTemporaryFile since after
# closing such file it gets deleted before Keras can load it.
with TemporaryDirectory() as temp_dir:
model_file = temp_dir + '/model.h5'
with open(model_file, 'wb') as f:
for chunk_data in model_chunks:
f.write(chunk_data)
return keras.models.load_model(model_file)
def get_model(run):
model_artifact = find_model_artifact(run)
print('Loading model:', model_artifact['filename'])
model_chunks = get_file_chunks(model_artifact['_id'])
model = model_from_chunks(model_chunks)
return model
|
<commit_before><commit_msg>Add a script to load a Keras model from MongoDB where it was stored by Sacred.<commit_after>
|
"""
A simple Python API for loading Keras models from MongoDB run artifact stored
by the Sacred library.
Usage:
run_id = '5843062c4e60f9a60c9db41f'
model = get_model(get_run(run_id))
"""
from bson.objectid import ObjectId
import keras
import pymongo
from tempfile import TemporaryDirectory
mongo_client = pymongo.MongoClient()
db = mongo_client['sacred']
def list_runs(exp_name=None):
return db['default.runs'].find({}, {'_id': True})
def list_experiment_runs(exp_name):
return db['default.runs'].find(
{'experiment.name': exp_name},
{'_id': True})
def list_experiments():
return db['default.runs'].distinct('experiment.name')
def get_run(run_id):
runs = db['default.runs']
if isinstance(run_id, str):
run_id = ObjectId(run_id)
return runs.find_one(run_id)
def get_file_chunks(file_id):
chunks = db['default.chunks']
for chunk in chunks.find({'files_id': file_id}) \
.sort([('n', pymongo.ASCENDING)]):
yield chunk['data']
def find_model_artifact(run, suffix='model.h5'):
files = db['default.files']
for artifact_id in run['artifacts']:
artifact = files.find_one(artifact_id)
if artifact['filename'].endswith(suffix):
return artifact
# We stored the model as a run artifact. The artifact file is stored in
# multiple chunks which we need to put together.
def model_from_chunks(model_chunks):
# Since h5py doesn't allow reading from in-memory file-like objects,
# let's store it to a temporary file.
# http://stackoverflow.com/questions/16654251/can-h5py-load-a-file-from-a-byte-array-in-memory
# We use TemporaryDirectory instead of NamedTemporaryFile since after
# closing such file it gets deleted before Keras can load it.
with TemporaryDirectory() as temp_dir:
model_file = temp_dir + '/model.h5'
with open(model_file, 'wb') as f:
for chunk_data in model_chunks:
f.write(chunk_data)
return keras.models.load_model(model_file)
def get_model(run):
model_artifact = find_model_artifact(run)
print('Loading model:', model_artifact['filename'])
model_chunks = get_file_chunks(model_artifact['_id'])
model = model_from_chunks(model_chunks)
return model
|
Add a script to load a Keras model from MongoDB where it was stored by Sacred."""
A simple Python API for loading Keras models from MongoDB run artifact stored
by the Sacred library.
Usage:
run_id = '5843062c4e60f9a60c9db41f'
model = get_model(get_run(run_id))
"""
from bson.objectid import ObjectId
import keras
import pymongo
from tempfile import TemporaryDirectory
mongo_client = pymongo.MongoClient()
db = mongo_client['sacred']
def list_runs(exp_name=None):
return db['default.runs'].find({}, {'_id': True})
def list_experiment_runs(exp_name):
return db['default.runs'].find(
{'experiment.name': exp_name},
{'_id': True})
def list_experiments():
return db['default.runs'].distinct('experiment.name')
def get_run(run_id):
runs = db['default.runs']
if isinstance(run_id, str):
run_id = ObjectId(run_id)
return runs.find_one(run_id)
def get_file_chunks(file_id):
chunks = db['default.chunks']
for chunk in chunks.find({'files_id': file_id}) \
.sort([('n', pymongo.ASCENDING)]):
yield chunk['data']
def find_model_artifact(run, suffix='model.h5'):
files = db['default.files']
for artifact_id in run['artifacts']:
artifact = files.find_one(artifact_id)
if artifact['filename'].endswith(suffix):
return artifact
# We stored the model as a run artifact. The artifact file is stored in
# multiple chunks which we need to put together.
def model_from_chunks(model_chunks):
# Since h5py doesn't allow reading from in-memory file-like objects,
# let's store it to a temporary file.
# http://stackoverflow.com/questions/16654251/can-h5py-load-a-file-from-a-byte-array-in-memory
# We use TemporaryDirectory instead of NamedTemporaryFile since after
# closing such file it gets deleted before Keras can load it.
with TemporaryDirectory() as temp_dir:
model_file = temp_dir + '/model.h5'
with open(model_file, 'wb') as f:
for chunk_data in model_chunks:
f.write(chunk_data)
return keras.models.load_model(model_file)
def get_model(run):
model_artifact = find_model_artifact(run)
print('Loading model:', model_artifact['filename'])
model_chunks = get_file_chunks(model_artifact['_id'])
model = model_from_chunks(model_chunks)
return model
|
<commit_before><commit_msg>Add a script to load a Keras model from MongoDB where it was stored by Sacred.<commit_after>"""
A simple Python API for loading Keras models from MongoDB run artifact stored
by the Sacred library.
Usage:
run_id = '5843062c4e60f9a60c9db41f'
model = get_model(get_run(run_id))
"""
from bson.objectid import ObjectId
import keras
import pymongo
from tempfile import TemporaryDirectory
mongo_client = pymongo.MongoClient()
db = mongo_client['sacred']
def list_runs(exp_name=None):
return db['default.runs'].find({}, {'_id': True})
def list_experiment_runs(exp_name):
return db['default.runs'].find(
{'experiment.name': exp_name},
{'_id': True})
def list_experiments():
return db['default.runs'].distinct('experiment.name')
def get_run(run_id):
runs = db['default.runs']
if isinstance(run_id, str):
run_id = ObjectId(run_id)
return runs.find_one(run_id)
def get_file_chunks(file_id):
chunks = db['default.chunks']
for chunk in chunks.find({'files_id': file_id}) \
.sort([('n', pymongo.ASCENDING)]):
yield chunk['data']
def find_model_artifact(run, suffix='model.h5'):
files = db['default.files']
for artifact_id in run['artifacts']:
artifact = files.find_one(artifact_id)
if artifact['filename'].endswith(suffix):
return artifact
# We stored the model as a run artifact. The artifact file is stored in
# multiple chunks which we need to put together.
def model_from_chunks(model_chunks):
# Since h5py doesn't allow reading from in-memory file-like objects,
# let's store it to a temporary file.
# http://stackoverflow.com/questions/16654251/can-h5py-load-a-file-from-a-byte-array-in-memory
# We use TemporaryDirectory instead of NamedTemporaryFile since after
# closing such file it gets deleted before Keras can load it.
with TemporaryDirectory() as temp_dir:
model_file = temp_dir + '/model.h5'
with open(model_file, 'wb') as f:
for chunk_data in model_chunks:
f.write(chunk_data)
return keras.models.load_model(model_file)
def get_model(run):
model_artifact = find_model_artifact(run)
print('Loading model:', model_artifact['filename'])
model_chunks = get_file_chunks(model_artifact['_id'])
model = model_from_chunks(model_chunks)
return model
|
|
48dd9e5de783297a99278a347f20914a4e4053a8
|
efs_cache_cleaner/cache_cleaner.py
|
efs_cache_cleaner/cache_cleaner.py
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import os
import time
import sys
def delete(path):
print(path)
try:
os.unlink(path)
except PermissionError as err:
print(f"Failed to delete {path}: {err}", file=sys.stderr)
def delete_directory_if_empty(path):
try:
os.rmdir(path)
print(path)
except OSError:
# TODO check that it fails because the directory is not empty
pass
def main():
now = time.time()
max_age = 1 * 24 * 60 * 60
for root, _, filenames in os.walk("/tmp"):
for f in filenames:
path = os.path.join(root, f)
last_access_time = os.stat(path).st_atime
if now - last_access_time > max_age:
delete(path)
for root, dirnames, _ in os.walk("/tmp"):
for directory in dirnames:
path = os.path.join(root, directory)
delete_directory_if_empty(path)
if __name__ == "__main__":
main()
|
Remove old files and empty directories
|
Remove old files and empty directories
|
Python
|
mit
|
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
|
Remove old files and empty directories
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import os
import time
import sys
def delete(path):
print(path)
try:
os.unlink(path)
except PermissionError as err:
print(f"Failed to delete {path}: {err}", file=sys.stderr)
def delete_directory_if_empty(path):
try:
os.rmdir(path)
print(path)
except OSError:
# TODO check that it fails because the directory is not empty
pass
def main():
now = time.time()
max_age = 1 * 24 * 60 * 60
for root, _, filenames in os.walk("/tmp"):
for f in filenames:
path = os.path.join(root, f)
last_access_time = os.stat(path).st_atime
if now - last_access_time > max_age:
delete(path)
for root, dirnames, _ in os.walk("/tmp"):
for directory in dirnames:
path = os.path.join(root, directory)
delete_directory_if_empty(path)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Remove old files and empty directories<commit_after>
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import os
import time
import sys
def delete(path):
print(path)
try:
os.unlink(path)
except PermissionError as err:
print(f"Failed to delete {path}: {err}", file=sys.stderr)
def delete_directory_if_empty(path):
try:
os.rmdir(path)
print(path)
except OSError:
# TODO check that it fails because the directory is not empty
pass
def main():
now = time.time()
max_age = 1 * 24 * 60 * 60
for root, _, filenames in os.walk("/tmp"):
for f in filenames:
path = os.path.join(root, f)
last_access_time = os.stat(path).st_atime
if now - last_access_time > max_age:
delete(path)
for root, dirnames, _ in os.walk("/tmp"):
for directory in dirnames:
path = os.path.join(root, directory)
delete_directory_if_empty(path)
if __name__ == "__main__":
main()
|
Remove old files and empty directories#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import os
import time
import sys
def delete(path):
print(path)
try:
os.unlink(path)
except PermissionError as err:
print(f"Failed to delete {path}: {err}", file=sys.stderr)
def delete_directory_if_empty(path):
try:
os.rmdir(path)
print(path)
except OSError:
# TODO check that it fails because the directory is not empty
pass
def main():
now = time.time()
max_age = 1 * 24 * 60 * 60
for root, _, filenames in os.walk("/tmp"):
for f in filenames:
path = os.path.join(root, f)
last_access_time = os.stat(path).st_atime
if now - last_access_time > max_age:
delete(path)
for root, dirnames, _ in os.walk("/tmp"):
for directory in dirnames:
path = os.path.join(root, directory)
delete_directory_if_empty(path)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Remove old files and empty directories<commit_after>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import os
import time
import sys
def delete(path):
print(path)
try:
os.unlink(path)
except PermissionError as err:
print(f"Failed to delete {path}: {err}", file=sys.stderr)
def delete_directory_if_empty(path):
try:
os.rmdir(path)
print(path)
except OSError:
# TODO check that it fails because the directory is not empty
pass
def main():
now = time.time()
max_age = 1 * 24 * 60 * 60
for root, _, filenames in os.walk("/tmp"):
for f in filenames:
path = os.path.join(root, f)
last_access_time = os.stat(path).st_atime
if now - last_access_time > max_age:
delete(path)
for root, dirnames, _ in os.walk("/tmp"):
for directory in dirnames:
path = os.path.join(root, directory)
delete_directory_if_empty(path)
if __name__ == "__main__":
main()
|
|
4c3a7cee4993ce9a76a084e89bcdf9f0931375a5
|
modules/nagios/files/usr/lib/nagios/plugins/check_file_size.py
|
modules/nagios/files/usr/lib/nagios/plugins/check_file_size.py
|
#!/usr/bin/env python
import sys
import os.path
import datetime
import optparse
DESC="Nagios check: WARN if a file is smaller than a minimum number of bytes."
def main(argv):
parser = optparse.OptionParser(description=DESC)
parser.add_option('-p', '--path', type='string', help="Path to check")
parser.add_option('-s', '--size', type='string', help="Minimum file size. Supports B, K, M and G suffixes (default B)")
opts, args = parser.parse_args()
if not opts.path or not opts.size:
parser.print_help()
return 1
if not os.path.exists(opts.path):
print "WARNING: Path <%r> does not exist" % opts.path
return 1
import re
def raise_(e):
"""lambdas cannot contain statements, so this is an expression which can be used to raise an exception."""
raise e
rules = (
(
'^\d+B$',
lambda size: int(re.sub('B', '', size))
),
(
'^\d+K$',
lambda size: int(re.sub('K', '', size)) * 1024
),
(
'^\d+M$',
lambda size: int(re.sub('M', '', size)) * 1024 * 1024
),
(
'^\d+G$',
lambda size: int(re.sub('G', '', size)) * 1024 * 1024 * 1024
),
(
'^\d+$',
lambda size: int(size)
),
(
'.*',
lambda size: raise_(Exception("Unknown size value <%s>" % size))
),
)
def parseSize(size):
for reRule, multiplierRule in rules:
if re.search(reRule, size):
return multiplierRule(size)
minimum_size_in_bytes = parseSize(opts.size)
file_size = os.stat(opts.path).st_size
if file_size < minimum_size_in_bytes:
print "WARNING: Path <%r> (%dB) is less than %dB" \
% (opts.path, file_size, minimum_size_in_bytes)
return 1
else:
print "OK: Path <%r> (%dB) is larger than or equal to %dB" \
% (opts.path, file_size, minimum_size_in_bytes)
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
Add a check to assert a file is a minimum size
|
Add a check to assert a file is a minimum size
EFG generates an extract of the IL2 data by extracting and converting
each night to an IL0 representation.
This check is intended to be used to verify that the file is generated
and looks reasonable.
|
Python
|
mit
|
alphagov/govuk-puppet,alphagov/govuk-puppet,alphagov/govuk-puppet,alphagov/govuk-puppet,alphagov/govuk-puppet,alphagov/govuk-puppet
|
Add a check to assert a file is a minimum size
EFG generates an extract of the IL2 data by extracting and converting
each night to an IL0 representation.
This check is intended to be used to verify that the file is generated
and looks reasonable.
|
#!/usr/bin/env python
import sys
import os.path
import datetime
import optparse
DESC="Nagios check: WARN if a file is smaller than a minimum number of bytes."
def main(argv):
parser = optparse.OptionParser(description=DESC)
parser.add_option('-p', '--path', type='string', help="Path to check")
parser.add_option('-s', '--size', type='string', help="Minimum file size. Supports B, K, M and G suffixes (default B)")
opts, args = parser.parse_args()
if not opts.path or not opts.size:
parser.print_help()
return 1
if not os.path.exists(opts.path):
print "WARNING: Path <%r> does not exist" % opts.path
return 1
import re
def raise_(e):
"""lambdas cannot contain statements, so this is an expression which can be used to raise an exception."""
raise e
rules = (
(
'^\d+B$',
lambda size: int(re.sub('B', '', size))
),
(
'^\d+K$',
lambda size: int(re.sub('K', '', size)) * 1024
),
(
'^\d+M$',
lambda size: int(re.sub('M', '', size)) * 1024 * 1024
),
(
'^\d+G$',
lambda size: int(re.sub('G', '', size)) * 1024 * 1024 * 1024
),
(
'^\d+$',
lambda size: int(size)
),
(
'.*',
lambda size: raise_(Exception("Unknown size value <%s>" % size))
),
)
def parseSize(size):
for reRule, multiplierRule in rules:
if re.search(reRule, size):
return multiplierRule(size)
minimum_size_in_bytes = parseSize(opts.size)
file_size = os.stat(opts.path).st_size
if file_size < minimum_size_in_bytes:
print "WARNING: Path <%r> (%dB) is less than %dB" \
% (opts.path, file_size, minimum_size_in_bytes)
return 1
else:
print "OK: Path <%r> (%dB) is larger than or equal to %dB" \
% (opts.path, file_size, minimum_size_in_bytes)
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
<commit_before><commit_msg>Add a check to assert a file is a minimum size
EFG generates an extract of the IL2 data by extracting and converting
each night to an IL0 representation.
This check is intended to be used to verify that the file is generated
and looks reasonable.<commit_after>
|
#!/usr/bin/env python
import sys
import os.path
import datetime
import optparse
DESC="Nagios check: WARN if a file is smaller than a minimum number of bytes."
def main(argv):
parser = optparse.OptionParser(description=DESC)
parser.add_option('-p', '--path', type='string', help="Path to check")
parser.add_option('-s', '--size', type='string', help="Minimum file size. Supports B, K, M and G suffixes (default B)")
opts, args = parser.parse_args()
if not opts.path or not opts.size:
parser.print_help()
return 1
if not os.path.exists(opts.path):
print "WARNING: Path <%r> does not exist" % opts.path
return 1
import re
def raise_(e):
"""lambdas cannot contain statements, so this is an expression which can be used to raise an exception."""
raise e
rules = (
(
'^\d+B$',
lambda size: int(re.sub('B', '', size))
),
(
'^\d+K$',
lambda size: int(re.sub('K', '', size)) * 1024
),
(
'^\d+M$',
lambda size: int(re.sub('M', '', size)) * 1024 * 1024
),
(
'^\d+G$',
lambda size: int(re.sub('G', '', size)) * 1024 * 1024 * 1024
),
(
'^\d+$',
lambda size: int(size)
),
(
'.*',
lambda size: raise_(Exception("Unknown size value <%s>" % size))
),
)
def parseSize(size):
for reRule, multiplierRule in rules:
if re.search(reRule, size):
return multiplierRule(size)
minimum_size_in_bytes = parseSize(opts.size)
file_size = os.stat(opts.path).st_size
if file_size < minimum_size_in_bytes:
print "WARNING: Path <%r> (%dB) is less than %dB" \
% (opts.path, file_size, minimum_size_in_bytes)
return 1
else:
print "OK: Path <%r> (%dB) is larger than or equal to %dB" \
% (opts.path, file_size, minimum_size_in_bytes)
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
Add a check to assert a file is a minimum size
EFG generates an extract of the IL2 data by extracting and converting
each night to an IL0 representation.
This check is intended to be used to verify that the file is generated
and looks reasonable.#!/usr/bin/env python
import sys
import os.path
import datetime
import optparse
DESC="Nagios check: WARN if a file is smaller than a minimum number of bytes."
def main(argv):
parser = optparse.OptionParser(description=DESC)
parser.add_option('-p', '--path', type='string', help="Path to check")
parser.add_option('-s', '--size', type='string', help="Minimum file size. Supports B, K, M and G suffixes (default B)")
opts, args = parser.parse_args()
if not opts.path or not opts.size:
parser.print_help()
return 1
if not os.path.exists(opts.path):
print "WARNING: Path <%r> does not exist" % opts.path
return 1
import re
def raise_(e):
"""lambdas cannot contain statements, so this is an expression which can be used to raise an exception."""
raise e
rules = (
(
'^\d+B$',
lambda size: int(re.sub('B', '', size))
),
(
'^\d+K$',
lambda size: int(re.sub('K', '', size)) * 1024
),
(
'^\d+M$',
lambda size: int(re.sub('M', '', size)) * 1024 * 1024
),
(
'^\d+G$',
lambda size: int(re.sub('G', '', size)) * 1024 * 1024 * 1024
),
(
'^\d+$',
lambda size: int(size)
),
(
'.*',
lambda size: raise_(Exception("Unknown size value <%s>" % size))
),
)
def parseSize(size):
for reRule, multiplierRule in rules:
if re.search(reRule, size):
return multiplierRule(size)
minimum_size_in_bytes = parseSize(opts.size)
file_size = os.stat(opts.path).st_size
if file_size < minimum_size_in_bytes:
print "WARNING: Path <%r> (%dB) is less than %dB" \
% (opts.path, file_size, minimum_size_in_bytes)
return 1
else:
print "OK: Path <%r> (%dB) is larger than or equal to %dB" \
% (opts.path, file_size, minimum_size_in_bytes)
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
<commit_before><commit_msg>Add a check to assert a file is a minimum size
EFG generates an extract of the IL2 data by extracting and converting
each night to an IL0 representation.
This check is intended to be used to verify that the file is generated
and looks reasonable.<commit_after>#!/usr/bin/env python
import sys
import os.path
import datetime
import optparse
DESC="Nagios check: WARN if a file is smaller than a minimum number of bytes."
def main(argv):
parser = optparse.OptionParser(description=DESC)
parser.add_option('-p', '--path', type='string', help="Path to check")
parser.add_option('-s', '--size', type='string', help="Minimum file size. Supports B, K, M and G suffixes (default B)")
opts, args = parser.parse_args()
if not opts.path or not opts.size:
parser.print_help()
return 1
if not os.path.exists(opts.path):
print "WARNING: Path <%r> does not exist" % opts.path
return 1
import re
def raise_(e):
"""lambdas cannot contain statements, so this is an expression which can be used to raise an exception."""
raise e
rules = (
(
'^\d+B$',
lambda size: int(re.sub('B', '', size))
),
(
'^\d+K$',
lambda size: int(re.sub('K', '', size)) * 1024
),
(
'^\d+M$',
lambda size: int(re.sub('M', '', size)) * 1024 * 1024
),
(
'^\d+G$',
lambda size: int(re.sub('G', '', size)) * 1024 * 1024 * 1024
),
(
'^\d+$',
lambda size: int(size)
),
(
'.*',
lambda size: raise_(Exception("Unknown size value <%s>" % size))
),
)
def parseSize(size):
for reRule, multiplierRule in rules:
if re.search(reRule, size):
return multiplierRule(size)
minimum_size_in_bytes = parseSize(opts.size)
file_size = os.stat(opts.path).st_size
if file_size < minimum_size_in_bytes:
print "WARNING: Path <%r> (%dB) is less than %dB" \
% (opts.path, file_size, minimum_size_in_bytes)
return 1
else:
print "OK: Path <%r> (%dB) is larger than or equal to %dB" \
% (opts.path, file_size, minimum_size_in_bytes)
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
|
4816a7e7c24474e37d9dfe4ecde8079c92eb66e1
|
doc/examples/segmentation/plot_compact_watershed.py
|
doc/examples/segmentation/plot_compact_watershed.py
|
"""
=============================================
Find Regular Segments Using Compact Watershed
=============================================
The watershed transform is commonly used as a starting point for many
segmentation algorithms. However, without a judicious choice of seeds, it
can produce very uneven fragment sizes, which can be difficult to deal with
in downstream analyses.
The *compact* watershed transform remedies this by favoring seeds that are
close to the pixel being considered.
Both are implemented in the :py:func:`skimage.morphology.watershed` function.
To use the compact form, simply pass a ``compactness`` value greater than 0.
"""
import numpy as np
from skimage import data, util, filters, color
from skimage.morphology import watershed
import matplotlib.pyplot as plt
coins = data.coins()
edges = filters.sobel(coins)
grid = util.regular_grid(coins.shape, n_points=468)
seeds = np.zeros(coins.shape, dtype=int)
seeds[grid] = np.arange(seeds[grid].size).reshape(seeds[grid].shape) + 1
w0 = watershed(edges, seeds)
w1 = watershed(edges, seeds, compactness=0.01)
fig, (ax0, ax1) = plt.subplots(1, 2)
ax0.imshow(color.label2rgb(w0, coins))
ax0.set_title('Classical watershed')
ax1.imshow(color.label2rgb(w1, coins))
ax1.set_title('Compact watershed')
plt.show()
|
Add compact watershed gallery example
|
Add compact watershed gallery example
|
Python
|
bsd-3-clause
|
rjeli/scikit-image,paalge/scikit-image,vighneshbirodkar/scikit-image,vighneshbirodkar/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image,rjeli/scikit-image,rjeli/scikit-image,paalge/scikit-image
|
Add compact watershed gallery example
|
"""
=============================================
Find Regular Segments Using Compact Watershed
=============================================
The watershed transform is commonly used as a starting point for many
segmentation algorithms. However, without a judicious choice of seeds, it
can produce very uneven fragment sizes, which can be difficult to deal with
in downstream analyses.
The *compact* watershed transform remedies this by favoring seeds that are
close to the pixel being considered.
Both are implemented in the :py:func:`skimage.morphology.watershed` function.
To use the compact form, simply pass a ``compactness`` value greater than 0.
"""
import numpy as np
from skimage import data, util, filters, color
from skimage.morphology import watershed
import matplotlib.pyplot as plt
coins = data.coins()
edges = filters.sobel(coins)
grid = util.regular_grid(coins.shape, n_points=468)
seeds = np.zeros(coins.shape, dtype=int)
seeds[grid] = np.arange(seeds[grid].size).reshape(seeds[grid].shape) + 1
w0 = watershed(edges, seeds)
w1 = watershed(edges, seeds, compactness=0.01)
fig, (ax0, ax1) = plt.subplots(1, 2)
ax0.imshow(color.label2rgb(w0, coins))
ax0.set_title('Classical watershed')
ax1.imshow(color.label2rgb(w1, coins))
ax1.set_title('Compact watershed')
plt.show()
|
<commit_before><commit_msg>Add compact watershed gallery example<commit_after>
|
"""
=============================================
Find Regular Segments Using Compact Watershed
=============================================
The watershed transform is commonly used as a starting point for many
segmentation algorithms. However, without a judicious choice of seeds, it
can produce very uneven fragment sizes, which can be difficult to deal with
in downstream analyses.
The *compact* watershed transform remedies this by favoring seeds that are
close to the pixel being considered.
Both are implemented in the :py:func:`skimage.morphology.watershed` function.
To use the compact form, simply pass a ``compactness`` value greater than 0.
"""
import numpy as np
from skimage import data, util, filters, color
from skimage.morphology import watershed
import matplotlib.pyplot as plt
coins = data.coins()
edges = filters.sobel(coins)
grid = util.regular_grid(coins.shape, n_points=468)
seeds = np.zeros(coins.shape, dtype=int)
seeds[grid] = np.arange(seeds[grid].size).reshape(seeds[grid].shape) + 1
w0 = watershed(edges, seeds)
w1 = watershed(edges, seeds, compactness=0.01)
fig, (ax0, ax1) = plt.subplots(1, 2)
ax0.imshow(color.label2rgb(w0, coins))
ax0.set_title('Classical watershed')
ax1.imshow(color.label2rgb(w1, coins))
ax1.set_title('Compact watershed')
plt.show()
|
Add compact watershed gallery example"""
=============================================
Find Regular Segments Using Compact Watershed
=============================================
The watershed transform is commonly used as a starting point for many
segmentation algorithms. However, without a judicious choice of seeds, it
can produce very uneven fragment sizes, which can be difficult to deal with
in downstream analyses.
The *compact* watershed transform remedies this by favoring seeds that are
close to the pixel being considered.
Both are implemented in the :py:func:`skimage.morphology.watershed` function.
To use the compact form, simply pass a ``compactness`` value greater than 0.
"""
import numpy as np
from skimage import data, util, filters, color
from skimage.morphology import watershed
import matplotlib.pyplot as plt
coins = data.coins()
edges = filters.sobel(coins)
grid = util.regular_grid(coins.shape, n_points=468)
seeds = np.zeros(coins.shape, dtype=int)
seeds[grid] = np.arange(seeds[grid].size).reshape(seeds[grid].shape) + 1
w0 = watershed(edges, seeds)
w1 = watershed(edges, seeds, compactness=0.01)
fig, (ax0, ax1) = plt.subplots(1, 2)
ax0.imshow(color.label2rgb(w0, coins))
ax0.set_title('Classical watershed')
ax1.imshow(color.label2rgb(w1, coins))
ax1.set_title('Compact watershed')
plt.show()
|
<commit_before><commit_msg>Add compact watershed gallery example<commit_after>"""
=============================================
Find Regular Segments Using Compact Watershed
=============================================
The watershed transform is commonly used as a starting point for many
segmentation algorithms. However, without a judicious choice of seeds, it
can produce very uneven fragment sizes, which can be difficult to deal with
in downstream analyses.
The *compact* watershed transform remedies this by favoring seeds that are
close to the pixel being considered.
Both are implemented in the :py:func:`skimage.morphology.watershed` function.
To use the compact form, simply pass a ``compactness`` value greater than 0.
"""
import numpy as np
from skimage import data, util, filters, color
from skimage.morphology import watershed
import matplotlib.pyplot as plt
coins = data.coins()
edges = filters.sobel(coins)
grid = util.regular_grid(coins.shape, n_points=468)
seeds = np.zeros(coins.shape, dtype=int)
seeds[grid] = np.arange(seeds[grid].size).reshape(seeds[grid].shape) + 1
w0 = watershed(edges, seeds)
w1 = watershed(edges, seeds, compactness=0.01)
fig, (ax0, ax1) = plt.subplots(1, 2)
ax0.imshow(color.label2rgb(w0, coins))
ax0.set_title('Classical watershed')
ax1.imshow(color.label2rgb(w1, coins))
ax1.set_title('Compact watershed')
plt.show()
|
|
0b0c0269b6fbd14aac97d68fae35cefb0b2e4578
|
alembic/versions/283afd3c9a32_add_jingle_interval.py
|
alembic/versions/283afd3c9a32_add_jingle_interval.py
|
"""Add jingle interval
Revision ID: 283afd3c9a32
Revises: 53954e77cafc
Create Date: 2019-06-26 12:48:16.180224
"""
# revision identifiers, used by Alembic.
revision = '283afd3c9a32'
down_revision = '53954e77cafc'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('radio_station', sa.Column('jingle_interval', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('radio_station', 'jingle_interval')
### end Alembic commands ###
|
Add configurable jingle interval migration
|
Add configurable jingle interval migration
|
Python
|
agpl-3.0
|
rootio/rootio_web,rootio/rootio_web,rootio/rootio_web,rootio/rootio_web
|
Add configurable jingle interval migration
|
"""Add jingle interval
Revision ID: 283afd3c9a32
Revises: 53954e77cafc
Create Date: 2019-06-26 12:48:16.180224
"""
# revision identifiers, used by Alembic.
revision = '283afd3c9a32'
down_revision = '53954e77cafc'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('radio_station', sa.Column('jingle_interval', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('radio_station', 'jingle_interval')
### end Alembic commands ###
|
<commit_before><commit_msg>Add configurable jingle interval migration<commit_after>
|
"""Add jingle interval
Revision ID: 283afd3c9a32
Revises: 53954e77cafc
Create Date: 2019-06-26 12:48:16.180224
"""
# revision identifiers, used by Alembic.
revision = '283afd3c9a32'
down_revision = '53954e77cafc'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('radio_station', sa.Column('jingle_interval', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('radio_station', 'jingle_interval')
### end Alembic commands ###
|
Add configurable jingle interval migration"""Add jingle interval
Revision ID: 283afd3c9a32
Revises: 53954e77cafc
Create Date: 2019-06-26 12:48:16.180224
"""
# revision identifiers, used by Alembic.
revision = '283afd3c9a32'
down_revision = '53954e77cafc'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('radio_station', sa.Column('jingle_interval', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('radio_station', 'jingle_interval')
### end Alembic commands ###
|
<commit_before><commit_msg>Add configurable jingle interval migration<commit_after>"""Add jingle interval
Revision ID: 283afd3c9a32
Revises: 53954e77cafc
Create Date: 2019-06-26 12:48:16.180224
"""
# revision identifiers, used by Alembic.
revision = '283afd3c9a32'
down_revision = '53954e77cafc'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('radio_station', sa.Column('jingle_interval', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('radio_station', 'jingle_interval')
### end Alembic commands ###
|
|
eaa5bad75434ac050f438dff730936164106bfa9
|
designate/storage/impl_sqlalchemy/migrate_repo/versions/066_add_update_status_index.py
|
designate/storage/impl_sqlalchemy/migrate_repo/versions/066_add_update_status_index.py
|
# Copyright (c) 2015 Rackspace Inc.
#
# Author: Tim Simmons <tim.simmons@rackspace.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Index, MetaData, Table
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
meta = MetaData()
def index_exists(index):
table = index[1]._get_table()
cols = sorted([str(x).split('.')[1] for x in index[1:]])
for idx in table.indexes:
if sorted(idx.columns.keys()) == cols:
return True
return False
def upgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
indices = [
['update_status_index', records_table.c.status,
records_table.c.domain_id, records_table.c.tenant_id,
records_table.c.created_at, records_table.c.serial]
]
for ind in indices:
if not index_exists(ind):
index = Index(*ind)
index.create(migrate_engine)
|
Add an index to speed up update_status
|
Add an index to speed up update_status
This adds an index that speeds up the update_status method in
designate-central
Change-Id: I62600ebbf066dc746a696263f0e72ca373076353
Closes-Bug: 1445115
|
Python
|
apache-2.0
|
cneill/designate-testing,ramsateesh/designate,grahamhayes/designate,cneill/designate,grahamhayes/designate,tonyli71/designate,cneill/designate,kiall/designate-py3,ionrock/designate,ionrock/designate,ionrock/designate,muraliselva10/designate,ramsateesh/designate,grahamhayes/designate,kiall/designate-py3,openstack/designate,kiall/designate-py3,tonyli71/designate,openstack/designate,cneill/designate,cneill/designate-testing,tonyli71/designate,muraliselva10/designate,kiall/designate-py3,kiall/designate-py3,cneill/designate,ramsateesh/designate,cneill/designate,muraliselva10/designate,cneill/designate-testing,openstack/designate
|
Add an index to speed up update_status
This adds an index that speeds up the update_status method in
designate-central
Change-Id: I62600ebbf066dc746a696263f0e72ca373076353
Closes-Bug: 1445115
|
# Copyright (c) 2015 Rackspace Inc.
#
# Author: Tim Simmons <tim.simmons@rackspace.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Index, MetaData, Table
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
meta = MetaData()
def index_exists(index):
table = index[1]._get_table()
cols = sorted([str(x).split('.')[1] for x in index[1:]])
for idx in table.indexes:
if sorted(idx.columns.keys()) == cols:
return True
return False
def upgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
indices = [
['update_status_index', records_table.c.status,
records_table.c.domain_id, records_table.c.tenant_id,
records_table.c.created_at, records_table.c.serial]
]
for ind in indices:
if not index_exists(ind):
index = Index(*ind)
index.create(migrate_engine)
|
<commit_before><commit_msg>Add an index to speed up update_status
This adds an index that speeds up the update_status method in
designate-central
Change-Id: I62600ebbf066dc746a696263f0e72ca373076353
Closes-Bug: 1445115<commit_after>
|
# Copyright (c) 2015 Rackspace Inc.
#
# Author: Tim Simmons <tim.simmons@rackspace.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Index, MetaData, Table
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
meta = MetaData()
def index_exists(index):
table = index[1]._get_table()
cols = sorted([str(x).split('.')[1] for x in index[1:]])
for idx in table.indexes:
if sorted(idx.columns.keys()) == cols:
return True
return False
def upgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
indices = [
['update_status_index', records_table.c.status,
records_table.c.domain_id, records_table.c.tenant_id,
records_table.c.created_at, records_table.c.serial]
]
for ind in indices:
if not index_exists(ind):
index = Index(*ind)
index.create(migrate_engine)
|
Add an index to speed up update_status
This adds an index that speeds up the update_status method in
designate-central
Change-Id: I62600ebbf066dc746a696263f0e72ca373076353
Closes-Bug: 1445115# Copyright (c) 2015 Rackspace Inc.
#
# Author: Tim Simmons <tim.simmons@rackspace.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Index, MetaData, Table
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
meta = MetaData()
def index_exists(index):
table = index[1]._get_table()
cols = sorted([str(x).split('.')[1] for x in index[1:]])
for idx in table.indexes:
if sorted(idx.columns.keys()) == cols:
return True
return False
def upgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
indices = [
['update_status_index', records_table.c.status,
records_table.c.domain_id, records_table.c.tenant_id,
records_table.c.created_at, records_table.c.serial]
]
for ind in indices:
if not index_exists(ind):
index = Index(*ind)
index.create(migrate_engine)
|
<commit_before><commit_msg>Add an index to speed up update_status
This adds an index that speeds up the update_status method in
designate-central
Change-Id: I62600ebbf066dc746a696263f0e72ca373076353
Closes-Bug: 1445115<commit_after># Copyright (c) 2015 Rackspace Inc.
#
# Author: Tim Simmons <tim.simmons@rackspace.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Index, MetaData, Table
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
meta = MetaData()
def index_exists(index):
table = index[1]._get_table()
cols = sorted([str(x).split('.')[1] for x in index[1:]])
for idx in table.indexes:
if sorted(idx.columns.keys()) == cols:
return True
return False
def upgrade(migrate_engine):
meta.bind = migrate_engine
records_table = Table('records', meta, autoload=True)
indices = [
['update_status_index', records_table.c.status,
records_table.c.domain_id, records_table.c.tenant_id,
records_table.c.created_at, records_table.c.serial]
]
for ind in indices:
if not index_exists(ind):
index = Index(*ind)
index.create(migrate_engine)
|
|
b4085ddea32ce5487d2d649a130984d8902e8399
|
tests/alservice/service/test_wsgi.py
|
tests/alservice/service/test_wsgi.py
|
import json
import pytest
from future.backports.urllib.parse import urlencode
from jwkest.jwk import RSAKey, rsa_load
from jwkest.jws import JWS
class TestWSGIApp:
@pytest.fixture(autouse=True)
def create_test_client(self, app, cert_and_key):
self.app = app.test_client()
self.signing_key = RSAKey(key=rsa_load(cert_and_key[1]), alg="RS256")
def test_full_flow(self):
# make account linking request
request_args = {"id": "id", "idp": "idp", "redirect_endpoint": "https://client.example.com/redirect_endpoint"}
jws = JWS(json.dumps(request_args)).sign_compact([self.signing_key])
path_get_id = "/get_id?{}".format(urlencode({"jwt": jws}))
resp = self.app.get(path_get_id)
assert resp.status_code == 404
ticket = resp.data.decode("utf-8")
# user gets redirected to the account linking page
resp = self.app.get("/approve/{}".format(ticket))
assert resp.status_code == 200
# redirect user to create an account page
resp = self.app.post("/create_account")
assert resp.status_code == 200
# send token by email (faked by writing it to a file)
resp = self.app.post("/send_token", data={"email": "test@example.com"})
assert resp.status_code == 200
# get token from file
with open("token") as f:
token = f.read()
# verify token
resp = self.app.post("/verify_token", data={"token": token})
assert resp.status_code == 200
# save account with a pin code
resp = self.app.post("/save_account", data={"pin": "!AbC123#"})
assert resp.status_code == 302
assert resp.headers["Location"] == request_args["redirect_endpoint"]
# get the id
resp = self.app.get(path_get_id)
assert resp.status_code == 200
assert resp.data.decode("utf-8")
|
Add test of full flow in WSGI app.
|
Add test of full flow in WSGI app.
|
Python
|
apache-2.0
|
its-dirg/ALservice
|
Add test of full flow in WSGI app.
|
import json
import pytest
from future.backports.urllib.parse import urlencode
from jwkest.jwk import RSAKey, rsa_load
from jwkest.jws import JWS
class TestWSGIApp:
@pytest.fixture(autouse=True)
def create_test_client(self, app, cert_and_key):
self.app = app.test_client()
self.signing_key = RSAKey(key=rsa_load(cert_and_key[1]), alg="RS256")
def test_full_flow(self):
# make account linking request
request_args = {"id": "id", "idp": "idp", "redirect_endpoint": "https://client.example.com/redirect_endpoint"}
jws = JWS(json.dumps(request_args)).sign_compact([self.signing_key])
path_get_id = "/get_id?{}".format(urlencode({"jwt": jws}))
resp = self.app.get(path_get_id)
assert resp.status_code == 404
ticket = resp.data.decode("utf-8")
# user gets redirected to the account linking page
resp = self.app.get("/approve/{}".format(ticket))
assert resp.status_code == 200
# redirect user to create an account page
resp = self.app.post("/create_account")
assert resp.status_code == 200
# send token by email (faked by writing it to a file)
resp = self.app.post("/send_token", data={"email": "test@example.com"})
assert resp.status_code == 200
# get token from file
with open("token") as f:
token = f.read()
# verify token
resp = self.app.post("/verify_token", data={"token": token})
assert resp.status_code == 200
# save account with a pin code
resp = self.app.post("/save_account", data={"pin": "!AbC123#"})
assert resp.status_code == 302
assert resp.headers["Location"] == request_args["redirect_endpoint"]
# get the id
resp = self.app.get(path_get_id)
assert resp.status_code == 200
assert resp.data.decode("utf-8")
|
<commit_before><commit_msg>Add test of full flow in WSGI app.<commit_after>
|
import json
import pytest
from future.backports.urllib.parse import urlencode
from jwkest.jwk import RSAKey, rsa_load
from jwkest.jws import JWS
class TestWSGIApp:
@pytest.fixture(autouse=True)
def create_test_client(self, app, cert_and_key):
self.app = app.test_client()
self.signing_key = RSAKey(key=rsa_load(cert_and_key[1]), alg="RS256")
def test_full_flow(self):
# make account linking request
request_args = {"id": "id", "idp": "idp", "redirect_endpoint": "https://client.example.com/redirect_endpoint"}
jws = JWS(json.dumps(request_args)).sign_compact([self.signing_key])
path_get_id = "/get_id?{}".format(urlencode({"jwt": jws}))
resp = self.app.get(path_get_id)
assert resp.status_code == 404
ticket = resp.data.decode("utf-8")
# user gets redirected to the account linking page
resp = self.app.get("/approve/{}".format(ticket))
assert resp.status_code == 200
# redirect user to create an account page
resp = self.app.post("/create_account")
assert resp.status_code == 200
# send token by email (faked by writing it to a file)
resp = self.app.post("/send_token", data={"email": "test@example.com"})
assert resp.status_code == 200
# get token from file
with open("token") as f:
token = f.read()
# verify token
resp = self.app.post("/verify_token", data={"token": token})
assert resp.status_code == 200
# save account with a pin code
resp = self.app.post("/save_account", data={"pin": "!AbC123#"})
assert resp.status_code == 302
assert resp.headers["Location"] == request_args["redirect_endpoint"]
# get the id
resp = self.app.get(path_get_id)
assert resp.status_code == 200
assert resp.data.decode("utf-8")
|
Add test of full flow in WSGI app.import json
import pytest
from future.backports.urllib.parse import urlencode
from jwkest.jwk import RSAKey, rsa_load
from jwkest.jws import JWS
class TestWSGIApp:
@pytest.fixture(autouse=True)
def create_test_client(self, app, cert_and_key):
self.app = app.test_client()
self.signing_key = RSAKey(key=rsa_load(cert_and_key[1]), alg="RS256")
def test_full_flow(self):
# make account linking request
request_args = {"id": "id", "idp": "idp", "redirect_endpoint": "https://client.example.com/redirect_endpoint"}
jws = JWS(json.dumps(request_args)).sign_compact([self.signing_key])
path_get_id = "/get_id?{}".format(urlencode({"jwt": jws}))
resp = self.app.get(path_get_id)
assert resp.status_code == 404
ticket = resp.data.decode("utf-8")
# user gets redirected to the account linking page
resp = self.app.get("/approve/{}".format(ticket))
assert resp.status_code == 200
# redirect user to create an account page
resp = self.app.post("/create_account")
assert resp.status_code == 200
# send token by email (faked by writing it to a file)
resp = self.app.post("/send_token", data={"email": "test@example.com"})
assert resp.status_code == 200
# get token from file
with open("token") as f:
token = f.read()
# verify token
resp = self.app.post("/verify_token", data={"token": token})
assert resp.status_code == 200
# save account with a pin code
resp = self.app.post("/save_account", data={"pin": "!AbC123#"})
assert resp.status_code == 302
assert resp.headers["Location"] == request_args["redirect_endpoint"]
# get the id
resp = self.app.get(path_get_id)
assert resp.status_code == 200
assert resp.data.decode("utf-8")
|
<commit_before><commit_msg>Add test of full flow in WSGI app.<commit_after>import json
import pytest
from future.backports.urllib.parse import urlencode
from jwkest.jwk import RSAKey, rsa_load
from jwkest.jws import JWS
class TestWSGIApp:
@pytest.fixture(autouse=True)
def create_test_client(self, app, cert_and_key):
self.app = app.test_client()
self.signing_key = RSAKey(key=rsa_load(cert_and_key[1]), alg="RS256")
def test_full_flow(self):
# make account linking request
request_args = {"id": "id", "idp": "idp", "redirect_endpoint": "https://client.example.com/redirect_endpoint"}
jws = JWS(json.dumps(request_args)).sign_compact([self.signing_key])
path_get_id = "/get_id?{}".format(urlencode({"jwt": jws}))
resp = self.app.get(path_get_id)
assert resp.status_code == 404
ticket = resp.data.decode("utf-8")
# user gets redirected to the account linking page
resp = self.app.get("/approve/{}".format(ticket))
assert resp.status_code == 200
# redirect user to create an account page
resp = self.app.post("/create_account")
assert resp.status_code == 200
# send token by email (faked by writing it to a file)
resp = self.app.post("/send_token", data={"email": "test@example.com"})
assert resp.status_code == 200
# get token from file
with open("token") as f:
token = f.read()
# verify token
resp = self.app.post("/verify_token", data={"token": token})
assert resp.status_code == 200
# save account with a pin code
resp = self.app.post("/save_account", data={"pin": "!AbC123#"})
assert resp.status_code == 302
assert resp.headers["Location"] == request_args["redirect_endpoint"]
# get the id
resp = self.app.get(path_get_id)
assert resp.status_code == 200
assert resp.data.decode("utf-8")
|
|
abfa62411ae77b5e541ac6b5a23883b6d1b6f31f
|
neuroimaging/utils/tests/data/__init__.py
|
neuroimaging/utils/tests/data/__init__.py
|
"""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.data_io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
|
Add data repository package. Link to externally installed nipy data.
|
Add data repository package. Link to externally installed nipy data.
|
Python
|
bsd-3-clause
|
yarikoptic/NiPy-OLD,yarikoptic/NiPy-OLD
|
Add data repository package. Link to externally installed nipy data.
|
"""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.data_io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
|
<commit_before><commit_msg>Add data repository package. Link to externally installed nipy data.<commit_after>
|
"""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.data_io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
|
Add data repository package. Link to externally installed nipy data."""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.data_io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
|
<commit_before><commit_msg>Add data repository package. Link to externally installed nipy data.<commit_after>"""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.data_io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
|
|
6aceb95b6372654c9cb2fa2d86fd07fb81f33c50
|
contrib/automation_tests/orbit_thread_state.py
|
contrib/automation_tests/orbit_thread_state.py
|
"""
Copyright (c) 2020 The Orbit Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
"""
from absl import app
from core.orbit_e2e import E2ETestSuite
from test_cases.connection_window import FilterAndSelectFirstProcess, ConnectToStadiaInstance
from test_cases.capture_window import Capture
"""Smoke test for thread state collection.
This automated test takes a capture on "hello_ggp_standalone" with thread state
collection enabled and verifies the presence of at least one track in the
capture window.
"""
def main(argv):
test_cases = [
ConnectToStadiaInstance(),
FilterAndSelectFirstProcess(process_filter='hello_'),
Capture(collect_thread_states=True)
]
suite = E2ETestSuite(test_name="Collect Thread States", test_cases=test_cases)
suite.execute()
if __name__ == '__main__':
app.run(main)
|
Add smoke test for thread state collection
|
Add smoke test for thread state collection
Only a smoke test until more advanced facilities to inspect the capture view
are introduced.
Bug: http://b/176960292
Test: Run the test locally.
|
Python
|
bsd-2-clause
|
google/orbit,google/orbit,google/orbit,google/orbit
|
Add smoke test for thread state collection
Only a smoke test until more advanced facilities to inspect the capture view
are introduced.
Bug: http://b/176960292
Test: Run the test locally.
|
"""
Copyright (c) 2020 The Orbit Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
"""
from absl import app
from core.orbit_e2e import E2ETestSuite
from test_cases.connection_window import FilterAndSelectFirstProcess, ConnectToStadiaInstance
from test_cases.capture_window import Capture
"""Smoke test for thread state collection.
This automated test takes a capture on "hello_ggp_standalone" with thread state
collection enabled and verifies the presence of at least one track in the
capture window.
"""
def main(argv):
test_cases = [
ConnectToStadiaInstance(),
FilterAndSelectFirstProcess(process_filter='hello_'),
Capture(collect_thread_states=True)
]
suite = E2ETestSuite(test_name="Collect Thread States", test_cases=test_cases)
suite.execute()
if __name__ == '__main__':
app.run(main)
|
<commit_before><commit_msg>Add smoke test for thread state collection
Only a smoke test until more advanced facilities to inspect the capture view
are introduced.
Bug: http://b/176960292
Test: Run the test locally.<commit_after>
|
"""
Copyright (c) 2020 The Orbit Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
"""
from absl import app
from core.orbit_e2e import E2ETestSuite
from test_cases.connection_window import FilterAndSelectFirstProcess, ConnectToStadiaInstance
from test_cases.capture_window import Capture
"""Smoke test for thread state collection.
This automated test takes a capture on "hello_ggp_standalone" with thread state
collection enabled and verifies the presence of at least one track in the
capture window.
"""
def main(argv):
test_cases = [
ConnectToStadiaInstance(),
FilterAndSelectFirstProcess(process_filter='hello_'),
Capture(collect_thread_states=True)
]
suite = E2ETestSuite(test_name="Collect Thread States", test_cases=test_cases)
suite.execute()
if __name__ == '__main__':
app.run(main)
|
Add smoke test for thread state collection
Only a smoke test until more advanced facilities to inspect the capture view
are introduced.
Bug: http://b/176960292
Test: Run the test locally."""
Copyright (c) 2020 The Orbit Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
"""
from absl import app
from core.orbit_e2e import E2ETestSuite
from test_cases.connection_window import FilterAndSelectFirstProcess, ConnectToStadiaInstance
from test_cases.capture_window import Capture
"""Smoke test for thread state collection.
This automated test takes a capture on "hello_ggp_standalone" with thread state
collection enabled and verifies the presence of at least one track in the
capture window.
"""
def main(argv):
test_cases = [
ConnectToStadiaInstance(),
FilterAndSelectFirstProcess(process_filter='hello_'),
Capture(collect_thread_states=True)
]
suite = E2ETestSuite(test_name="Collect Thread States", test_cases=test_cases)
suite.execute()
if __name__ == '__main__':
app.run(main)
|
<commit_before><commit_msg>Add smoke test for thread state collection
Only a smoke test until more advanced facilities to inspect the capture view
are introduced.
Bug: http://b/176960292
Test: Run the test locally.<commit_after>"""
Copyright (c) 2020 The Orbit Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
"""
from absl import app
from core.orbit_e2e import E2ETestSuite
from test_cases.connection_window import FilterAndSelectFirstProcess, ConnectToStadiaInstance
from test_cases.capture_window import Capture
"""Smoke test for thread state collection.
This automated test takes a capture on "hello_ggp_standalone" with thread state
collection enabled and verifies the presence of at least one track in the
capture window.
"""
def main(argv):
test_cases = [
ConnectToStadiaInstance(),
FilterAndSelectFirstProcess(process_filter='hello_'),
Capture(collect_thread_states=True)
]
suite = E2ETestSuite(test_name="Collect Thread States", test_cases=test_cases)
suite.execute()
if __name__ == '__main__':
app.run(main)
|
|
798ddd081ff488194c9f0cb1bcab839099e1db70
|
bluebottle/funding/migrations/0047_auto_20191116_1540.py
|
bluebottle/funding/migrations/0047_auto_20191116_1540.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-16 14:40
from __future__ import unicode_literals
from django.db import migrations
from django.db.models import F
def fix_matching_currencies(apps, schema_editor):
Funding = apps.get_model('funding', 'Funding')
Funding.objects.update(amount_matching_currency=F('target_currency'))
class Migration(migrations.Migration):
dependencies = [
('funding', '0046_merge_20191112_1256'),
]
operations = [
migrations.RunPython(fix_matching_currencies, migrations.RunPython.noop)
]
|
Fix matching currency being different then target currency
|
Fix matching currency being different then target currency
BB-15798 #resolve
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Fix matching currency being different then target currency
BB-15798 #resolve
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-16 14:40
from __future__ import unicode_literals
from django.db import migrations
from django.db.models import F
def fix_matching_currencies(apps, schema_editor):
Funding = apps.get_model('funding', 'Funding')
Funding.objects.update(amount_matching_currency=F('target_currency'))
class Migration(migrations.Migration):
dependencies = [
('funding', '0046_merge_20191112_1256'),
]
operations = [
migrations.RunPython(fix_matching_currencies, migrations.RunPython.noop)
]
|
<commit_before><commit_msg>Fix matching currency being different then target currency
BB-15798 #resolve<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-16 14:40
from __future__ import unicode_literals
from django.db import migrations
from django.db.models import F
def fix_matching_currencies(apps, schema_editor):
Funding = apps.get_model('funding', 'Funding')
Funding.objects.update(amount_matching_currency=F('target_currency'))
class Migration(migrations.Migration):
dependencies = [
('funding', '0046_merge_20191112_1256'),
]
operations = [
migrations.RunPython(fix_matching_currencies, migrations.RunPython.noop)
]
|
Fix matching currency being different then target currency
BB-15798 #resolve# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-16 14:40
from __future__ import unicode_literals
from django.db import migrations
from django.db.models import F
def fix_matching_currencies(apps, schema_editor):
Funding = apps.get_model('funding', 'Funding')
Funding.objects.update(amount_matching_currency=F('target_currency'))
class Migration(migrations.Migration):
dependencies = [
('funding', '0046_merge_20191112_1256'),
]
operations = [
migrations.RunPython(fix_matching_currencies, migrations.RunPython.noop)
]
|
<commit_before><commit_msg>Fix matching currency being different then target currency
BB-15798 #resolve<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-16 14:40
from __future__ import unicode_literals
from django.db import migrations
from django.db.models import F
def fix_matching_currencies(apps, schema_editor):
Funding = apps.get_model('funding', 'Funding')
Funding.objects.update(amount_matching_currency=F('target_currency'))
class Migration(migrations.Migration):
dependencies = [
('funding', '0046_merge_20191112_1256'),
]
operations = [
migrations.RunPython(fix_matching_currencies, migrations.RunPython.noop)
]
|
|
18a2fde9b573e4db34e247cdd8dce6f506d8e34e
|
tests/Settings/TestSettingRelation.py
|
tests/Settings/TestSettingRelation.py
|
# Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
import UM.Settings.SettingRelation
def test_create():
with pytest.raises(ValueError):
relation = UM.Settings.SettingRelation.SettingRelation(None, 2, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
with pytest.raises(ValueError):
relation = UM.Settings.SettingRelation.SettingRelation(1, None, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
relation = UM.Settings.SettingRelation.SettingRelation(1, 2, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
assert relation.owner == 1
assert relation.target == 2
assert relation.type == UM.Settings.SettingRelation.RelationType.RequiresTarget
assert relation.role == "max"
relation = UM.Settings.SettingRelation.SettingRelation(1, 2, UM.Settings.SettingRelation.RelationType.RequiredByTarget, "min")
assert relation.owner == 1
assert relation.target == 2
assert relation.type == UM.Settings.SettingRelation.RelationType.RequiredByTarget
assert relation.role == "min"
|
Add test suite for SettingRelation
|
Add test suite for SettingRelation
Extremely simple test since SettingRelation is essentially a tuple.
Contributes to issue CURA-1278.
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
Add test suite for SettingRelation
Extremely simple test since SettingRelation is essentially a tuple.
Contributes to issue CURA-1278.
|
# Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
import UM.Settings.SettingRelation
def test_create():
with pytest.raises(ValueError):
relation = UM.Settings.SettingRelation.SettingRelation(None, 2, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
with pytest.raises(ValueError):
relation = UM.Settings.SettingRelation.SettingRelation(1, None, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
relation = UM.Settings.SettingRelation.SettingRelation(1, 2, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
assert relation.owner == 1
assert relation.target == 2
assert relation.type == UM.Settings.SettingRelation.RelationType.RequiresTarget
assert relation.role == "max"
relation = UM.Settings.SettingRelation.SettingRelation(1, 2, UM.Settings.SettingRelation.RelationType.RequiredByTarget, "min")
assert relation.owner == 1
assert relation.target == 2
assert relation.type == UM.Settings.SettingRelation.RelationType.RequiredByTarget
assert relation.role == "min"
|
<commit_before><commit_msg>Add test suite for SettingRelation
Extremely simple test since SettingRelation is essentially a tuple.
Contributes to issue CURA-1278.<commit_after>
|
# Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
import UM.Settings.SettingRelation
def test_create():
with pytest.raises(ValueError):
relation = UM.Settings.SettingRelation.SettingRelation(None, 2, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
with pytest.raises(ValueError):
relation = UM.Settings.SettingRelation.SettingRelation(1, None, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
relation = UM.Settings.SettingRelation.SettingRelation(1, 2, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
assert relation.owner == 1
assert relation.target == 2
assert relation.type == UM.Settings.SettingRelation.RelationType.RequiresTarget
assert relation.role == "max"
relation = UM.Settings.SettingRelation.SettingRelation(1, 2, UM.Settings.SettingRelation.RelationType.RequiredByTarget, "min")
assert relation.owner == 1
assert relation.target == 2
assert relation.type == UM.Settings.SettingRelation.RelationType.RequiredByTarget
assert relation.role == "min"
|
Add test suite for SettingRelation
Extremely simple test since SettingRelation is essentially a tuple.
Contributes to issue CURA-1278.# Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
import UM.Settings.SettingRelation
def test_create():
with pytest.raises(ValueError):
relation = UM.Settings.SettingRelation.SettingRelation(None, 2, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
with pytest.raises(ValueError):
relation = UM.Settings.SettingRelation.SettingRelation(1, None, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
relation = UM.Settings.SettingRelation.SettingRelation(1, 2, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
assert relation.owner == 1
assert relation.target == 2
assert relation.type == UM.Settings.SettingRelation.RelationType.RequiresTarget
assert relation.role == "max"
relation = UM.Settings.SettingRelation.SettingRelation(1, 2, UM.Settings.SettingRelation.RelationType.RequiredByTarget, "min")
assert relation.owner == 1
assert relation.target == 2
assert relation.type == UM.Settings.SettingRelation.RelationType.RequiredByTarget
assert relation.role == "min"
|
<commit_before><commit_msg>Add test suite for SettingRelation
Extremely simple test since SettingRelation is essentially a tuple.
Contributes to issue CURA-1278.<commit_after># Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
import UM.Settings.SettingRelation
def test_create():
with pytest.raises(ValueError):
relation = UM.Settings.SettingRelation.SettingRelation(None, 2, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
with pytest.raises(ValueError):
relation = UM.Settings.SettingRelation.SettingRelation(1, None, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
relation = UM.Settings.SettingRelation.SettingRelation(1, 2, UM.Settings.SettingRelation.RelationType.RequiresTarget, "max")
assert relation.owner == 1
assert relation.target == 2
assert relation.type == UM.Settings.SettingRelation.RelationType.RequiresTarget
assert relation.role == "max"
relation = UM.Settings.SettingRelation.SettingRelation(1, 2, UM.Settings.SettingRelation.RelationType.RequiredByTarget, "min")
assert relation.owner == 1
assert relation.target == 2
assert relation.type == UM.Settings.SettingRelation.RelationType.RequiredByTarget
assert relation.role == "min"
|
|
42c53d0f9afa61799e3c98327746e790f6fb0b1b
|
letters/migrations/0002_set_ordering_letter.py
|
letters/migrations/0002_set_ordering_letter.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-08-04 19:28
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('letters', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='letter',
options={'ordering': ['book', 'letter']},
),
]
|
Add ordering for letters to migration
|
Add ordering for letters to migration
|
Python
|
mit
|
bwhicks/PlinyProject,bwhicks/PlinyProject,bwhicks/PlinyProject,bwhicks/PlinyProject
|
Add ordering for letters to migration
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-08-04 19:28
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('letters', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='letter',
options={'ordering': ['book', 'letter']},
),
]
|
<commit_before><commit_msg>Add ordering for letters to migration<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-08-04 19:28
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('letters', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='letter',
options={'ordering': ['book', 'letter']},
),
]
|
Add ordering for letters to migration# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-08-04 19:28
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('letters', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='letter',
options={'ordering': ['book', 'letter']},
),
]
|
<commit_before><commit_msg>Add ordering for letters to migration<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-08-04 19:28
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('letters', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='letter',
options={'ordering': ['book', 'letter']},
),
]
|
|
ddcc6e3fe252f5666d9f33023361cb0e01ca351a
|
tests/test_inet.py
|
tests/test_inet.py
|
# -*- coding: utf-8 -*-
import pytest
import csv
from inet.inet import Inet
class TestInet():
"""Test the Inet class functions as expected"""
def test_no_data_file(self):
with pytest.raises(AttributeError):
Inet(data_file=None)
def test_wrong_file_type(self, tmpdir):
with pytest.raises(TypeError):
p = tmpdir.mkdir("sub").join("temp.txt")
p.write("content")
Inet(data_file=str(p))
def test_read_csv(self, tmpdir):
headers = ['header1', 'header2']
rows = [('AA', 'BB'), ('CC', 'DD')]
temp_file = tmpdir.mkdir("sub").join("temp.csv")
with open(str(temp_file), 'w') as f:
f_csv = csv.writer(f)
f_csv.writerow(headers)
f_csv.writerows(rows)
inet = Inet(data_file=str(temp_file))
rows = inet.rows
assert len(rows) == 2
assert rows[0].header1 == 'AA'
assert rows[0].header2 == 'BB'
assert rows[1].header1 == 'CC'
assert rows[1].header2 == 'DD'
with pytest.raises(AttributeError):
assert rows[0].header3 == 'AA'
if __name__ == '__main__':
pytest.main()
|
Add inet read file tests
|
Add inet read file tests
|
Python
|
mit
|
nestauk/inet
|
Add inet read file tests
|
# -*- coding: utf-8 -*-
import pytest
import csv
from inet.inet import Inet
class TestInet():
"""Test the Inet class functions as expected"""
def test_no_data_file(self):
with pytest.raises(AttributeError):
Inet(data_file=None)
def test_wrong_file_type(self, tmpdir):
with pytest.raises(TypeError):
p = tmpdir.mkdir("sub").join("temp.txt")
p.write("content")
Inet(data_file=str(p))
def test_read_csv(self, tmpdir):
headers = ['header1', 'header2']
rows = [('AA', 'BB'), ('CC', 'DD')]
temp_file = tmpdir.mkdir("sub").join("temp.csv")
with open(str(temp_file), 'w') as f:
f_csv = csv.writer(f)
f_csv.writerow(headers)
f_csv.writerows(rows)
inet = Inet(data_file=str(temp_file))
rows = inet.rows
assert len(rows) == 2
assert rows[0].header1 == 'AA'
assert rows[0].header2 == 'BB'
assert rows[1].header1 == 'CC'
assert rows[1].header2 == 'DD'
with pytest.raises(AttributeError):
assert rows[0].header3 == 'AA'
if __name__ == '__main__':
pytest.main()
|
<commit_before><commit_msg>Add inet read file tests<commit_after>
|
# -*- coding: utf-8 -*-
import pytest
import csv
from inet.inet import Inet
class TestInet():
"""Test the Inet class functions as expected"""
def test_no_data_file(self):
with pytest.raises(AttributeError):
Inet(data_file=None)
def test_wrong_file_type(self, tmpdir):
with pytest.raises(TypeError):
p = tmpdir.mkdir("sub").join("temp.txt")
p.write("content")
Inet(data_file=str(p))
def test_read_csv(self, tmpdir):
headers = ['header1', 'header2']
rows = [('AA', 'BB'), ('CC', 'DD')]
temp_file = tmpdir.mkdir("sub").join("temp.csv")
with open(str(temp_file), 'w') as f:
f_csv = csv.writer(f)
f_csv.writerow(headers)
f_csv.writerows(rows)
inet = Inet(data_file=str(temp_file))
rows = inet.rows
assert len(rows) == 2
assert rows[0].header1 == 'AA'
assert rows[0].header2 == 'BB'
assert rows[1].header1 == 'CC'
assert rows[1].header2 == 'DD'
with pytest.raises(AttributeError):
assert rows[0].header3 == 'AA'
if __name__ == '__main__':
pytest.main()
|
Add inet read file tests# -*- coding: utf-8 -*-
import pytest
import csv
from inet.inet import Inet
class TestInet():
"""Test the Inet class functions as expected"""
def test_no_data_file(self):
with pytest.raises(AttributeError):
Inet(data_file=None)
def test_wrong_file_type(self, tmpdir):
with pytest.raises(TypeError):
p = tmpdir.mkdir("sub").join("temp.txt")
p.write("content")
Inet(data_file=str(p))
def test_read_csv(self, tmpdir):
headers = ['header1', 'header2']
rows = [('AA', 'BB'), ('CC', 'DD')]
temp_file = tmpdir.mkdir("sub").join("temp.csv")
with open(str(temp_file), 'w') as f:
f_csv = csv.writer(f)
f_csv.writerow(headers)
f_csv.writerows(rows)
inet = Inet(data_file=str(temp_file))
rows = inet.rows
assert len(rows) == 2
assert rows[0].header1 == 'AA'
assert rows[0].header2 == 'BB'
assert rows[1].header1 == 'CC'
assert rows[1].header2 == 'DD'
with pytest.raises(AttributeError):
assert rows[0].header3 == 'AA'
if __name__ == '__main__':
pytest.main()
|
<commit_before><commit_msg>Add inet read file tests<commit_after># -*- coding: utf-8 -*-
import pytest
import csv
from inet.inet import Inet
class TestInet():
"""Test the Inet class functions as expected"""
def test_no_data_file(self):
with pytest.raises(AttributeError):
Inet(data_file=None)
def test_wrong_file_type(self, tmpdir):
with pytest.raises(TypeError):
p = tmpdir.mkdir("sub").join("temp.txt")
p.write("content")
Inet(data_file=str(p))
def test_read_csv(self, tmpdir):
headers = ['header1', 'header2']
rows = [('AA', 'BB'), ('CC', 'DD')]
temp_file = tmpdir.mkdir("sub").join("temp.csv")
with open(str(temp_file), 'w') as f:
f_csv = csv.writer(f)
f_csv.writerow(headers)
f_csv.writerows(rows)
inet = Inet(data_file=str(temp_file))
rows = inet.rows
assert len(rows) == 2
assert rows[0].header1 == 'AA'
assert rows[0].header2 == 'BB'
assert rows[1].header1 == 'CC'
assert rows[1].header2 == 'DD'
with pytest.raises(AttributeError):
assert rows[0].header3 == 'AA'
if __name__ == '__main__':
pytest.main()
|
|
31a36698c42e4c5d0e5d5a44cb924dcff231a7e4
|
tests/test_main.py
|
tests/test_main.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
from centerline import Centerline
from shapely.geometry import (GeometryCollection, LineString, MultiLineString,
MultiPoint, MultiPolygon, Point, Polygon)
class TestCenterlineSupportedGeometryTypes(TestCase):
"""Only Polygons should be supported.
For more information about creating the geometry objects (like the
ones used below) see The Shapely User Manual:
https://shapely.readthedocs.io/en/latest/manual.html
"""
def test__polygon__returns_multilinestring(self):
POLYGON = Polygon([[0, 0], [0, 4], [4, 4], [4, 0]])
centerline = Centerline(POLYGON)
self.assertIsInstance(centerline, MultiLineString)
def test__polygon_with_interior_ring__returns_multilinestring(self):
EXTERIOR = [(0, 0), (0, 2), (2, 2), (2, 0), (0, 0)]
INTERIOR = [(1, 0), (0.5, 0.5), (1, 1), (1.5, 0.5), (1, 0)][::-1]
POLYGON = Polygon(EXTERIOR, [INTERIOR])
centerline = Centerline(POLYGON)
self.assertIsInstance(centerline, MultiLineString)
def test__multipolygon__raises_valueerror(self):
POLYGONS = [Point(i, 0).buffer(0.1) for i in range(2)]
MULTIPOLYGON = MultiPolygon(POLYGONS)
with self.assertRaises(ValueError):
Centerline(MULTIPOLYGON)
def test__point__raises_valueerror(self):
POINT = Point(0, 0)
with self.assertRaises(ValueError):
Centerline(POINT)
def test__multipoint__raises_valueerror(self):
MULTIPOINT = MultiPoint([Point(0, 0), Point(1, 1)])
with self.assertRaises(ValueError):
Centerline(MULTIPOINT)
def test__linestring__raises_valueerror(self):
LINESTRING = LineString([(0, 0), (0.8, 0.8), (1.8, 0.95), (2.6, 0.5)])
with self.assertRaises(ValueError):
Centerline(LINESTRING)
def test__multilinestring__raises_valueerror(self):
MULTILINESTRING = MultiLineString(
[((0, 0), (1, 1)), ((-1, 0), (1, 0))]
)
with self.assertRaises(ValueError):
Centerline(MULTILINESTRING)
def test__geometry_collection__raises_valueerror(self):
GEOMETRY_COLLECTION = GeometryCollection(
(
Point(0, 0),
LineString([(0, 0), (0.8, 0.8), (1.8, 0.95), (2.6, 0.5)]),
Polygon([[0, 0], [0, 4], [4, 4], [4, 0]])
)
)
with self.assertRaises(ValueError):
Centerline(GEOMETRY_COLLECTION)
|
Cover the type support with tests
|
Cover the type support with tests
|
Python
|
mit
|
fitodic/centerline,fitodic/centerline,fitodic/polygon-centerline
|
Cover the type support with tests
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
from centerline import Centerline
from shapely.geometry import (GeometryCollection, LineString, MultiLineString,
MultiPoint, MultiPolygon, Point, Polygon)
class TestCenterlineSupportedGeometryTypes(TestCase):
"""Only Polygons should be supported.
For more information about creating the geometry objects (like the
ones used below) see The Shapely User Manual:
https://shapely.readthedocs.io/en/latest/manual.html
"""
def test__polygon__returns_multilinestring(self):
POLYGON = Polygon([[0, 0], [0, 4], [4, 4], [4, 0]])
centerline = Centerline(POLYGON)
self.assertIsInstance(centerline, MultiLineString)
def test__polygon_with_interior_ring__returns_multilinestring(self):
EXTERIOR = [(0, 0), (0, 2), (2, 2), (2, 0), (0, 0)]
INTERIOR = [(1, 0), (0.5, 0.5), (1, 1), (1.5, 0.5), (1, 0)][::-1]
POLYGON = Polygon(EXTERIOR, [INTERIOR])
centerline = Centerline(POLYGON)
self.assertIsInstance(centerline, MultiLineString)
def test__multipolygon__raises_valueerror(self):
POLYGONS = [Point(i, 0).buffer(0.1) for i in range(2)]
MULTIPOLYGON = MultiPolygon(POLYGONS)
with self.assertRaises(ValueError):
Centerline(MULTIPOLYGON)
def test__point__raises_valueerror(self):
POINT = Point(0, 0)
with self.assertRaises(ValueError):
Centerline(POINT)
def test__multipoint__raises_valueerror(self):
MULTIPOINT = MultiPoint([Point(0, 0), Point(1, 1)])
with self.assertRaises(ValueError):
Centerline(MULTIPOINT)
def test__linestring__raises_valueerror(self):
LINESTRING = LineString([(0, 0), (0.8, 0.8), (1.8, 0.95), (2.6, 0.5)])
with self.assertRaises(ValueError):
Centerline(LINESTRING)
def test__multilinestring__raises_valueerror(self):
MULTILINESTRING = MultiLineString(
[((0, 0), (1, 1)), ((-1, 0), (1, 0))]
)
with self.assertRaises(ValueError):
Centerline(MULTILINESTRING)
def test__geometry_collection__raises_valueerror(self):
GEOMETRY_COLLECTION = GeometryCollection(
(
Point(0, 0),
LineString([(0, 0), (0.8, 0.8), (1.8, 0.95), (2.6, 0.5)]),
Polygon([[0, 0], [0, 4], [4, 4], [4, 0]])
)
)
with self.assertRaises(ValueError):
Centerline(GEOMETRY_COLLECTION)
|
<commit_before><commit_msg>Cover the type support with tests<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
from centerline import Centerline
from shapely.geometry import (GeometryCollection, LineString, MultiLineString,
MultiPoint, MultiPolygon, Point, Polygon)
class TestCenterlineSupportedGeometryTypes(TestCase):
"""Only Polygons should be supported.
For more information about creating the geometry objects (like the
ones used below) see The Shapely User Manual:
https://shapely.readthedocs.io/en/latest/manual.html
"""
def test__polygon__returns_multilinestring(self):
POLYGON = Polygon([[0, 0], [0, 4], [4, 4], [4, 0]])
centerline = Centerline(POLYGON)
self.assertIsInstance(centerline, MultiLineString)
def test__polygon_with_interior_ring__returns_multilinestring(self):
EXTERIOR = [(0, 0), (0, 2), (2, 2), (2, 0), (0, 0)]
INTERIOR = [(1, 0), (0.5, 0.5), (1, 1), (1.5, 0.5), (1, 0)][::-1]
POLYGON = Polygon(EXTERIOR, [INTERIOR])
centerline = Centerline(POLYGON)
self.assertIsInstance(centerline, MultiLineString)
def test__multipolygon__raises_valueerror(self):
POLYGONS = [Point(i, 0).buffer(0.1) for i in range(2)]
MULTIPOLYGON = MultiPolygon(POLYGONS)
with self.assertRaises(ValueError):
Centerline(MULTIPOLYGON)
def test__point__raises_valueerror(self):
POINT = Point(0, 0)
with self.assertRaises(ValueError):
Centerline(POINT)
def test__multipoint__raises_valueerror(self):
MULTIPOINT = MultiPoint([Point(0, 0), Point(1, 1)])
with self.assertRaises(ValueError):
Centerline(MULTIPOINT)
def test__linestring__raises_valueerror(self):
LINESTRING = LineString([(0, 0), (0.8, 0.8), (1.8, 0.95), (2.6, 0.5)])
with self.assertRaises(ValueError):
Centerline(LINESTRING)
def test__multilinestring__raises_valueerror(self):
MULTILINESTRING = MultiLineString(
[((0, 0), (1, 1)), ((-1, 0), (1, 0))]
)
with self.assertRaises(ValueError):
Centerline(MULTILINESTRING)
def test__geometry_collection__raises_valueerror(self):
GEOMETRY_COLLECTION = GeometryCollection(
(
Point(0, 0),
LineString([(0, 0), (0.8, 0.8), (1.8, 0.95), (2.6, 0.5)]),
Polygon([[0, 0], [0, 4], [4, 4], [4, 0]])
)
)
with self.assertRaises(ValueError):
Centerline(GEOMETRY_COLLECTION)
|
Cover the type support with tests# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
from centerline import Centerline
from shapely.geometry import (GeometryCollection, LineString, MultiLineString,
MultiPoint, MultiPolygon, Point, Polygon)
class TestCenterlineSupportedGeometryTypes(TestCase):
"""Only Polygons should be supported.
For more information about creating the geometry objects (like the
ones used below) see The Shapely User Manual:
https://shapely.readthedocs.io/en/latest/manual.html
"""
def test__polygon__returns_multilinestring(self):
POLYGON = Polygon([[0, 0], [0, 4], [4, 4], [4, 0]])
centerline = Centerline(POLYGON)
self.assertIsInstance(centerline, MultiLineString)
def test__polygon_with_interior_ring__returns_multilinestring(self):
EXTERIOR = [(0, 0), (0, 2), (2, 2), (2, 0), (0, 0)]
INTERIOR = [(1, 0), (0.5, 0.5), (1, 1), (1.5, 0.5), (1, 0)][::-1]
POLYGON = Polygon(EXTERIOR, [INTERIOR])
centerline = Centerline(POLYGON)
self.assertIsInstance(centerline, MultiLineString)
def test__multipolygon__raises_valueerror(self):
POLYGONS = [Point(i, 0).buffer(0.1) for i in range(2)]
MULTIPOLYGON = MultiPolygon(POLYGONS)
with self.assertRaises(ValueError):
Centerline(MULTIPOLYGON)
def test__point__raises_valueerror(self):
POINT = Point(0, 0)
with self.assertRaises(ValueError):
Centerline(POINT)
def test__multipoint__raises_valueerror(self):
MULTIPOINT = MultiPoint([Point(0, 0), Point(1, 1)])
with self.assertRaises(ValueError):
Centerline(MULTIPOINT)
def test__linestring__raises_valueerror(self):
LINESTRING = LineString([(0, 0), (0.8, 0.8), (1.8, 0.95), (2.6, 0.5)])
with self.assertRaises(ValueError):
Centerline(LINESTRING)
def test__multilinestring__raises_valueerror(self):
MULTILINESTRING = MultiLineString(
[((0, 0), (1, 1)), ((-1, 0), (1, 0))]
)
with self.assertRaises(ValueError):
Centerline(MULTILINESTRING)
def test__geometry_collection__raises_valueerror(self):
GEOMETRY_COLLECTION = GeometryCollection(
(
Point(0, 0),
LineString([(0, 0), (0.8, 0.8), (1.8, 0.95), (2.6, 0.5)]),
Polygon([[0, 0], [0, 4], [4, 4], [4, 0]])
)
)
with self.assertRaises(ValueError):
Centerline(GEOMETRY_COLLECTION)
|
<commit_before><commit_msg>Cover the type support with tests<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
from centerline import Centerline
from shapely.geometry import (GeometryCollection, LineString, MultiLineString,
MultiPoint, MultiPolygon, Point, Polygon)
class TestCenterlineSupportedGeometryTypes(TestCase):
"""Only Polygons should be supported.
For more information about creating the geometry objects (like the
ones used below) see The Shapely User Manual:
https://shapely.readthedocs.io/en/latest/manual.html
"""
def test__polygon__returns_multilinestring(self):
POLYGON = Polygon([[0, 0], [0, 4], [4, 4], [4, 0]])
centerline = Centerline(POLYGON)
self.assertIsInstance(centerline, MultiLineString)
def test__polygon_with_interior_ring__returns_multilinestring(self):
EXTERIOR = [(0, 0), (0, 2), (2, 2), (2, 0), (0, 0)]
INTERIOR = [(1, 0), (0.5, 0.5), (1, 1), (1.5, 0.5), (1, 0)][::-1]
POLYGON = Polygon(EXTERIOR, [INTERIOR])
centerline = Centerline(POLYGON)
self.assertIsInstance(centerline, MultiLineString)
def test__multipolygon__raises_valueerror(self):
POLYGONS = [Point(i, 0).buffer(0.1) for i in range(2)]
MULTIPOLYGON = MultiPolygon(POLYGONS)
with self.assertRaises(ValueError):
Centerline(MULTIPOLYGON)
def test__point__raises_valueerror(self):
POINT = Point(0, 0)
with self.assertRaises(ValueError):
Centerline(POINT)
def test__multipoint__raises_valueerror(self):
MULTIPOINT = MultiPoint([Point(0, 0), Point(1, 1)])
with self.assertRaises(ValueError):
Centerline(MULTIPOINT)
def test__linestring__raises_valueerror(self):
LINESTRING = LineString([(0, 0), (0.8, 0.8), (1.8, 0.95), (2.6, 0.5)])
with self.assertRaises(ValueError):
Centerline(LINESTRING)
def test__multilinestring__raises_valueerror(self):
MULTILINESTRING = MultiLineString(
[((0, 0), (1, 1)), ((-1, 0), (1, 0))]
)
with self.assertRaises(ValueError):
Centerline(MULTILINESTRING)
def test__geometry_collection__raises_valueerror(self):
GEOMETRY_COLLECTION = GeometryCollection(
(
Point(0, 0),
LineString([(0, 0), (0.8, 0.8), (1.8, 0.95), (2.6, 0.5)]),
Polygon([[0, 0], [0, 4], [4, 4], [4, 0]])
)
)
with self.assertRaises(ValueError):
Centerline(GEOMETRY_COLLECTION)
|
|
6ba5fe781d32467a185e0c0d73fda401846e5370
|
scripts/update_centroid_reports.py
|
scripts/update_centroid_reports.py
|
#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
import argparse
from mica import centroid_dashboard
# Cheat. Needs entrypoint scripts
centroid_dashboard.update_observed_metrics()
|
Add a silly script for the centroid reports
|
Add a silly script for the centroid reports
|
Python
|
bsd-3-clause
|
sot/mica,sot/mica
|
Add a silly script for the centroid reports
|
#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
import argparse
from mica import centroid_dashboard
# Cheat. Needs entrypoint scripts
centroid_dashboard.update_observed_metrics()
|
<commit_before><commit_msg>Add a silly script for the centroid reports<commit_after>
|
#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
import argparse
from mica import centroid_dashboard
# Cheat. Needs entrypoint scripts
centroid_dashboard.update_observed_metrics()
|
Add a silly script for the centroid reports#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
import argparse
from mica import centroid_dashboard
# Cheat. Needs entrypoint scripts
centroid_dashboard.update_observed_metrics()
|
<commit_before><commit_msg>Add a silly script for the centroid reports<commit_after>#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
import argparse
from mica import centroid_dashboard
# Cheat. Needs entrypoint scripts
centroid_dashboard.update_observed_metrics()
|
|
3eb99e0cd378e9cc3e15bef85c55e02ef28a8af3
|
education/management/commands/fake_incoming_message.py
|
education/management/commands/fake_incoming_message.py
|
from django.core.management.base import BaseCommand
from optparse import make_option
from rapidsms_httprouter.router import get_router
from rapidsms.models import Connection
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-p", "--phone", dest="phone"),
make_option("-t", "--text", dest="text"),
)
def handle(self, **options):
if not options['phone']:
phone = raw_input('Phone number you wish the message to appear to come from: ')
else:
phone = options['phone']
if not options['text']:
text = raw_input('Text of the message: ')
else:
text = options['text']
connection = Connection.object.get(identity = phone)
router = get_router()
handled = router.handle_incoming(connection.backend.name, connection.identity, text)
self.stdout.write('Done!\n')
|
Add command to make testing incoming messages easier.
|
Add command to make testing incoming messages easier.
|
Python
|
bsd-3-clause
|
unicefuganda/edtrac,unicefuganda/edtrac,unicefuganda/edtrac
|
Add command to make testing incoming messages easier.
|
from django.core.management.base import BaseCommand
from optparse import make_option
from rapidsms_httprouter.router import get_router
from rapidsms.models import Connection
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-p", "--phone", dest="phone"),
make_option("-t", "--text", dest="text"),
)
def handle(self, **options):
if not options['phone']:
phone = raw_input('Phone number you wish the message to appear to come from: ')
else:
phone = options['phone']
if not options['text']:
text = raw_input('Text of the message: ')
else:
text = options['text']
connection = Connection.object.get(identity = phone)
router = get_router()
handled = router.handle_incoming(connection.backend.name, connection.identity, text)
self.stdout.write('Done!\n')
|
<commit_before><commit_msg>Add command to make testing incoming messages easier.<commit_after>
|
from django.core.management.base import BaseCommand
from optparse import make_option
from rapidsms_httprouter.router import get_router
from rapidsms.models import Connection
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-p", "--phone", dest="phone"),
make_option("-t", "--text", dest="text"),
)
def handle(self, **options):
if not options['phone']:
phone = raw_input('Phone number you wish the message to appear to come from: ')
else:
phone = options['phone']
if not options['text']:
text = raw_input('Text of the message: ')
else:
text = options['text']
connection = Connection.object.get(identity = phone)
router = get_router()
handled = router.handle_incoming(connection.backend.name, connection.identity, text)
self.stdout.write('Done!\n')
|
Add command to make testing incoming messages easier.from django.core.management.base import BaseCommand
from optparse import make_option
from rapidsms_httprouter.router import get_router
from rapidsms.models import Connection
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-p", "--phone", dest="phone"),
make_option("-t", "--text", dest="text"),
)
def handle(self, **options):
if not options['phone']:
phone = raw_input('Phone number you wish the message to appear to come from: ')
else:
phone = options['phone']
if not options['text']:
text = raw_input('Text of the message: ')
else:
text = options['text']
connection = Connection.object.get(identity = phone)
router = get_router()
handled = router.handle_incoming(connection.backend.name, connection.identity, text)
self.stdout.write('Done!\n')
|
<commit_before><commit_msg>Add command to make testing incoming messages easier.<commit_after>from django.core.management.base import BaseCommand
from optparse import make_option
from rapidsms_httprouter.router import get_router
from rapidsms.models import Connection
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-p", "--phone", dest="phone"),
make_option("-t", "--text", dest="text"),
)
def handle(self, **options):
if not options['phone']:
phone = raw_input('Phone number you wish the message to appear to come from: ')
else:
phone = options['phone']
if not options['text']:
text = raw_input('Text of the message: ')
else:
text = options['text']
connection = Connection.object.get(identity = phone)
router = get_router()
handled = router.handle_incoming(connection.backend.name, connection.identity, text)
self.stdout.write('Done!\n')
|
|
5e53c6fd8f58935adfe8db3d7825dfd91780f961
|
test/data/observatory/repository/test_repo_client.py
|
test/data/observatory/repository/test_repo_client.py
|
import unittest
from cartoframes.data.clients import SQLClient
from cartoframes.data.observatory.repository.repo_client import RepoClient
from ..examples import db_dataset1, db_dataset2
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
class TestRepoClient(unittest.TestCase):
@patch.object(SQLClient, 'query')
def test_run_query_with_one_filter(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {'category_id': 'demographics'}
expected_query = "SELECT t.* FROM datasets t WHERE t.category_id = 'demographics'"
# When
categories = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert categories == [db_dataset1, db_dataset2]
@patch.object(SQLClient, 'query')
def test_run_query_with_multiple_filter(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {
'category_id': 'demographics',
'country_id': 'usa'}
expected_query = "SELECT t.* FROM datasets t WHERE t.category_id = 'demographics' AND t.country_id = 'usa'"
# When
datasets = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert datasets == [db_dataset1, db_dataset2]
@patch.object(SQLClient, 'query')
def test_run_query_with_id_list(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {'id': ['carto-do.dataset.census', 'carto-do.dataset.municipalities']}
expected_query = "SELECT t.* FROM datasets t " \
"WHERE t.id IN ('carto-do.dataset.census','carto-do.dataset.municipalities')"
# When
datasets = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert datasets == [db_dataset1, db_dataset2]
|
Add unit tests for repo client
|
Add unit tests for repo client
|
Python
|
bsd-3-clause
|
CartoDB/cartoframes,CartoDB/cartoframes
|
Add unit tests for repo client
|
import unittest
from cartoframes.data.clients import SQLClient
from cartoframes.data.observatory.repository.repo_client import RepoClient
from ..examples import db_dataset1, db_dataset2
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
class TestRepoClient(unittest.TestCase):
@patch.object(SQLClient, 'query')
def test_run_query_with_one_filter(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {'category_id': 'demographics'}
expected_query = "SELECT t.* FROM datasets t WHERE t.category_id = 'demographics'"
# When
categories = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert categories == [db_dataset1, db_dataset2]
@patch.object(SQLClient, 'query')
def test_run_query_with_multiple_filter(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {
'category_id': 'demographics',
'country_id': 'usa'}
expected_query = "SELECT t.* FROM datasets t WHERE t.category_id = 'demographics' AND t.country_id = 'usa'"
# When
datasets = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert datasets == [db_dataset1, db_dataset2]
@patch.object(SQLClient, 'query')
def test_run_query_with_id_list(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {'id': ['carto-do.dataset.census', 'carto-do.dataset.municipalities']}
expected_query = "SELECT t.* FROM datasets t " \
"WHERE t.id IN ('carto-do.dataset.census','carto-do.dataset.municipalities')"
# When
datasets = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert datasets == [db_dataset1, db_dataset2]
|
<commit_before><commit_msg>Add unit tests for repo client<commit_after>
|
import unittest
from cartoframes.data.clients import SQLClient
from cartoframes.data.observatory.repository.repo_client import RepoClient
from ..examples import db_dataset1, db_dataset2
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
class TestRepoClient(unittest.TestCase):
@patch.object(SQLClient, 'query')
def test_run_query_with_one_filter(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {'category_id': 'demographics'}
expected_query = "SELECT t.* FROM datasets t WHERE t.category_id = 'demographics'"
# When
categories = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert categories == [db_dataset1, db_dataset2]
@patch.object(SQLClient, 'query')
def test_run_query_with_multiple_filter(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {
'category_id': 'demographics',
'country_id': 'usa'}
expected_query = "SELECT t.* FROM datasets t WHERE t.category_id = 'demographics' AND t.country_id = 'usa'"
# When
datasets = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert datasets == [db_dataset1, db_dataset2]
@patch.object(SQLClient, 'query')
def test_run_query_with_id_list(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {'id': ['carto-do.dataset.census', 'carto-do.dataset.municipalities']}
expected_query = "SELECT t.* FROM datasets t " \
"WHERE t.id IN ('carto-do.dataset.census','carto-do.dataset.municipalities')"
# When
datasets = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert datasets == [db_dataset1, db_dataset2]
|
Add unit tests for repo clientimport unittest
from cartoframes.data.clients import SQLClient
from cartoframes.data.observatory.repository.repo_client import RepoClient
from ..examples import db_dataset1, db_dataset2
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
class TestRepoClient(unittest.TestCase):
@patch.object(SQLClient, 'query')
def test_run_query_with_one_filter(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {'category_id': 'demographics'}
expected_query = "SELECT t.* FROM datasets t WHERE t.category_id = 'demographics'"
# When
categories = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert categories == [db_dataset1, db_dataset2]
@patch.object(SQLClient, 'query')
def test_run_query_with_multiple_filter(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {
'category_id': 'demographics',
'country_id': 'usa'}
expected_query = "SELECT t.* FROM datasets t WHERE t.category_id = 'demographics' AND t.country_id = 'usa'"
# When
datasets = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert datasets == [db_dataset1, db_dataset2]
@patch.object(SQLClient, 'query')
def test_run_query_with_id_list(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {'id': ['carto-do.dataset.census', 'carto-do.dataset.municipalities']}
expected_query = "SELECT t.* FROM datasets t " \
"WHERE t.id IN ('carto-do.dataset.census','carto-do.dataset.municipalities')"
# When
datasets = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert datasets == [db_dataset1, db_dataset2]
|
<commit_before><commit_msg>Add unit tests for repo client<commit_after>import unittest
from cartoframes.data.clients import SQLClient
from cartoframes.data.observatory.repository.repo_client import RepoClient
from ..examples import db_dataset1, db_dataset2
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
class TestRepoClient(unittest.TestCase):
@patch.object(SQLClient, 'query')
def test_run_query_with_one_filter(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {'category_id': 'demographics'}
expected_query = "SELECT t.* FROM datasets t WHERE t.category_id = 'demographics'"
# When
categories = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert categories == [db_dataset1, db_dataset2]
@patch.object(SQLClient, 'query')
def test_run_query_with_multiple_filter(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {
'category_id': 'demographics',
'country_id': 'usa'}
expected_query = "SELECT t.* FROM datasets t WHERE t.category_id = 'demographics' AND t.country_id = 'usa'"
# When
datasets = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert datasets == [db_dataset1, db_dataset2]
@patch.object(SQLClient, 'query')
def test_run_query_with_id_list(self, mocked_client):
# Given
mocked_client.return_value = [db_dataset1, db_dataset2]
repo = RepoClient()
query = 'SELECT t.* FROM datasets t'
filters = {'id': ['carto-do.dataset.census', 'carto-do.dataset.municipalities']}
expected_query = "SELECT t.* FROM datasets t " \
"WHERE t.id IN ('carto-do.dataset.census','carto-do.dataset.municipalities')"
# When
datasets = repo._run_query(query, filters)
# Then
mocked_client.assert_called_once_with(expected_query)
assert datasets == [db_dataset1, db_dataset2]
|
|
d4949ced78d06ebc87a6f938d67f706eb9425f91
|
flatblocks/management/commands/unassignedflatblocks.py
|
flatblocks/management/commands/unassignedflatblocks.py
|
import os
from django.core.management.base import BaseCommand
from django.template.loader import get_template
from django.conf import settings
from flatblocks.templatetags.flatblock_tags import FlatBlockNode
from flatblocks.models import FlatBlock
class Command(BaseCommand):
help = "List unassigned flatblocks in the templates"
def handle(self, *args, **options):
save_nodes = (len(args) and args[0] == 'create')
templ_list = []
flatblock_nodes = []
print_nodes = []
#get list of templates
for templ_dir in settings.TEMPLATE_DIRS:
templ_list += [os.path.join(dp, f) for dp, dn, fn in os.walk(os.path.expanduser(templ_dir)) for f in fn]
#load templates and get FlatBlockNode slugs
for templ in templ_list:
try:
t = get_template(templ)
flatblock_nodes += [node.slug for node in t.nodelist.get_nodes_by_type(FlatBlockNode)]
except:
pass
#distinct slugs
flatblock_nodes = list(set(flatblock_nodes))
#check if flatblocks have entry in database
for node in flatblock_nodes:
if FlatBlock.objects.filter(slug=node).count() == 0:
#if create argument was supplied, save empty nodes
if save_nodes:
block = FlatBlock(header="[{0}]".format(node), content="Generated flatblock", slug=node)
block.save()
print_nodes.append(node)
if len(print_nodes):
if save_nodes:
print "Following nodes were created:"
print "\n".join(print_nodes)
else:
print "All FlatBlock items are in database"
|
Add command for checking & saving unassigned flatblocks in the database
|
Add command for checking & saving unassigned flatblocks in the database
|
Python
|
bsd-3-clause
|
funkybob/django-flatblocks,funkybob/django-flatblocks
|
Add command for checking & saving unassigned flatblocks in the database
|
import os
from django.core.management.base import BaseCommand
from django.template.loader import get_template
from django.conf import settings
from flatblocks.templatetags.flatblock_tags import FlatBlockNode
from flatblocks.models import FlatBlock
class Command(BaseCommand):
help = "List unassigned flatblocks in the templates"
def handle(self, *args, **options):
save_nodes = (len(args) and args[0] == 'create')
templ_list = []
flatblock_nodes = []
print_nodes = []
#get list of templates
for templ_dir in settings.TEMPLATE_DIRS:
templ_list += [os.path.join(dp, f) for dp, dn, fn in os.walk(os.path.expanduser(templ_dir)) for f in fn]
#load templates and get FlatBlockNode slugs
for templ in templ_list:
try:
t = get_template(templ)
flatblock_nodes += [node.slug for node in t.nodelist.get_nodes_by_type(FlatBlockNode)]
except:
pass
#distinct slugs
flatblock_nodes = list(set(flatblock_nodes))
#check if flatblocks have entry in database
for node in flatblock_nodes:
if FlatBlock.objects.filter(slug=node).count() == 0:
#if create argument was supplied, save empty nodes
if save_nodes:
block = FlatBlock(header="[{0}]".format(node), content="Generated flatblock", slug=node)
block.save()
print_nodes.append(node)
if len(print_nodes):
if save_nodes:
print "Following nodes were created:"
print "\n".join(print_nodes)
else:
print "All FlatBlock items are in database"
|
<commit_before><commit_msg>Add command for checking & saving unassigned flatblocks in the database<commit_after>
|
import os
from django.core.management.base import BaseCommand
from django.template.loader import get_template
from django.conf import settings
from flatblocks.templatetags.flatblock_tags import FlatBlockNode
from flatblocks.models import FlatBlock
class Command(BaseCommand):
help = "List unassigned flatblocks in the templates"
def handle(self, *args, **options):
save_nodes = (len(args) and args[0] == 'create')
templ_list = []
flatblock_nodes = []
print_nodes = []
#get list of templates
for templ_dir in settings.TEMPLATE_DIRS:
templ_list += [os.path.join(dp, f) for dp, dn, fn in os.walk(os.path.expanduser(templ_dir)) for f in fn]
#load templates and get FlatBlockNode slugs
for templ in templ_list:
try:
t = get_template(templ)
flatblock_nodes += [node.slug for node in t.nodelist.get_nodes_by_type(FlatBlockNode)]
except:
pass
#distinct slugs
flatblock_nodes = list(set(flatblock_nodes))
#check if flatblocks have entry in database
for node in flatblock_nodes:
if FlatBlock.objects.filter(slug=node).count() == 0:
#if create argument was supplied, save empty nodes
if save_nodes:
block = FlatBlock(header="[{0}]".format(node), content="Generated flatblock", slug=node)
block.save()
print_nodes.append(node)
if len(print_nodes):
if save_nodes:
print "Following nodes were created:"
print "\n".join(print_nodes)
else:
print "All FlatBlock items are in database"
|
Add command for checking & saving unassigned flatblocks in the databaseimport os
from django.core.management.base import BaseCommand
from django.template.loader import get_template
from django.conf import settings
from flatblocks.templatetags.flatblock_tags import FlatBlockNode
from flatblocks.models import FlatBlock
class Command(BaseCommand):
help = "List unassigned flatblocks in the templates"
def handle(self, *args, **options):
save_nodes = (len(args) and args[0] == 'create')
templ_list = []
flatblock_nodes = []
print_nodes = []
#get list of templates
for templ_dir in settings.TEMPLATE_DIRS:
templ_list += [os.path.join(dp, f) for dp, dn, fn in os.walk(os.path.expanduser(templ_dir)) for f in fn]
#load templates and get FlatBlockNode slugs
for templ in templ_list:
try:
t = get_template(templ)
flatblock_nodes += [node.slug for node in t.nodelist.get_nodes_by_type(FlatBlockNode)]
except:
pass
#distinct slugs
flatblock_nodes = list(set(flatblock_nodes))
#check if flatblocks have entry in database
for node in flatblock_nodes:
if FlatBlock.objects.filter(slug=node).count() == 0:
#if create argument was supplied, save empty nodes
if save_nodes:
block = FlatBlock(header="[{0}]".format(node), content="Generated flatblock", slug=node)
block.save()
print_nodes.append(node)
if len(print_nodes):
if save_nodes:
print "Following nodes were created:"
print "\n".join(print_nodes)
else:
print "All FlatBlock items are in database"
|
<commit_before><commit_msg>Add command for checking & saving unassigned flatblocks in the database<commit_after>import os
from django.core.management.base import BaseCommand
from django.template.loader import get_template
from django.conf import settings
from flatblocks.templatetags.flatblock_tags import FlatBlockNode
from flatblocks.models import FlatBlock
class Command(BaseCommand):
help = "List unassigned flatblocks in the templates"
def handle(self, *args, **options):
save_nodes = (len(args) and args[0] == 'create')
templ_list = []
flatblock_nodes = []
print_nodes = []
#get list of templates
for templ_dir in settings.TEMPLATE_DIRS:
templ_list += [os.path.join(dp, f) for dp, dn, fn in os.walk(os.path.expanduser(templ_dir)) for f in fn]
#load templates and get FlatBlockNode slugs
for templ in templ_list:
try:
t = get_template(templ)
flatblock_nodes += [node.slug for node in t.nodelist.get_nodes_by_type(FlatBlockNode)]
except:
pass
#distinct slugs
flatblock_nodes = list(set(flatblock_nodes))
#check if flatblocks have entry in database
for node in flatblock_nodes:
if FlatBlock.objects.filter(slug=node).count() == 0:
#if create argument was supplied, save empty nodes
if save_nodes:
block = FlatBlock(header="[{0}]".format(node), content="Generated flatblock", slug=node)
block.save()
print_nodes.append(node)
if len(print_nodes):
if save_nodes:
print "Following nodes were created:"
print "\n".join(print_nodes)
else:
print "All FlatBlock items are in database"
|
|
cad8853286ee87c3efca07e80250cc5b5e43f3e3
|
pidman/pid/migrations/0003_rm_invalidark_target_urlfield.py
|
pidman/pid/migrations/0003_rm_invalidark_target_urlfield.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pid', '0002_pid_sequence_initial_value'),
]
operations = [
migrations.DeleteModel(
name='InvalidArk',
),
migrations.AlterField(
model_name='target',
name='uri',
field=models.URLField(max_length=2048),
),
]
|
Add invalid ark and target uri field migration
|
Add invalid ark and target uri field migration
Not significant changes, but django complains about model changes
not matching otherwise.
|
Python
|
apache-2.0
|
emory-libraries/pidman,emory-libraries/pidman
|
Add invalid ark and target uri field migration
Not significant changes, but django complains about model changes
not matching otherwise.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pid', '0002_pid_sequence_initial_value'),
]
operations = [
migrations.DeleteModel(
name='InvalidArk',
),
migrations.AlterField(
model_name='target',
name='uri',
field=models.URLField(max_length=2048),
),
]
|
<commit_before><commit_msg>Add invalid ark and target uri field migration
Not significant changes, but django complains about model changes
not matching otherwise.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pid', '0002_pid_sequence_initial_value'),
]
operations = [
migrations.DeleteModel(
name='InvalidArk',
),
migrations.AlterField(
model_name='target',
name='uri',
field=models.URLField(max_length=2048),
),
]
|
Add invalid ark and target uri field migration
Not significant changes, but django complains about model changes
not matching otherwise.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pid', '0002_pid_sequence_initial_value'),
]
operations = [
migrations.DeleteModel(
name='InvalidArk',
),
migrations.AlterField(
model_name='target',
name='uri',
field=models.URLField(max_length=2048),
),
]
|
<commit_before><commit_msg>Add invalid ark and target uri field migration
Not significant changes, but django complains about model changes
not matching otherwise.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pid', '0002_pid_sequence_initial_value'),
]
operations = [
migrations.DeleteModel(
name='InvalidArk',
),
migrations.AlterField(
model_name='target',
name='uri',
field=models.URLField(max_length=2048),
),
]
|
|
82f920aa0538f189417621f32a532644282ca7f6
|
pylearn2/scripts/tests/test_summarize_model.py
|
pylearn2/scripts/tests/test_summarize_model.py
|
"""
A unit test for the summarize_model.py script
"""
import cPickle
import os
from pylearn2.testing.skip import skip_if_no_matplotlib
from pylearn2.models.mlp import MLP, Linear
from pylearn2.scripts.summarize_model import summarize
def test_summarize_model():
"""
Asks the summarize_model.py script to inspect a pickled model and
check that it completes succesfully
"""
skip_if_no_matplotlib()
with open('model.pkl', 'wb') as f:
cPickle.dump(MLP(layers=[Linear(dim=5, layer_name='h0', irange=0.1)],
nvis=10), f, protocol=cPickle.HIGHEST_PROTOCOL)
summarize('model.pkl')
os.remove('model.pkl')
|
Add unit test for summarize_model.py
|
Add unit test for summarize_model.py
|
Python
|
bsd-3-clause
|
msingh172/pylearn2,abergeron/pylearn2,nouiz/pylearn2,sandeepkbhat/pylearn2,alexjc/pylearn2,hantek/pylearn2,ashhher3/pylearn2,abergeron/pylearn2,se4u/pylearn2,Refefer/pylearn2,shiquanwang/pylearn2,ashhher3/pylearn2,w1kke/pylearn2,TNick/pylearn2,cosmoharrigan/pylearn2,fyffyt/pylearn2,fishcorn/pylearn2,fishcorn/pylearn2,goodfeli/pylearn2,aalmah/pylearn2,jeremyfix/pylearn2,fyffyt/pylearn2,lancezlin/pylearn2,nouiz/pylearn2,theoryno3/pylearn2,daemonmaker/pylearn2,mclaughlin6464/pylearn2,matrogers/pylearn2,aalmah/pylearn2,JesseLivezey/pylearn2,se4u/pylearn2,nouiz/pylearn2,KennethPierce/pylearnk,junbochen/pylearn2,kastnerkyle/pylearn2,lisa-lab/pylearn2,shiquanwang/pylearn2,aalmah/pylearn2,ashhher3/pylearn2,ddboline/pylearn2,lamblin/pylearn2,jeremyfix/pylearn2,msingh172/pylearn2,mkraemer67/pylearn2,lancezlin/pylearn2,caidongyun/pylearn2,se4u/pylearn2,alexjc/pylearn2,JesseLivezey/pylearn2,hyqneuron/pylearn2-maxsom,chrish42/pylearn,mclaughlin6464/pylearn2,junbochen/pylearn2,fyffyt/pylearn2,bartvm/pylearn2,JesseLivezey/plankton,kose-y/pylearn2,theoryno3/pylearn2,fulmicoton/pylearn2,w1kke/pylearn2,JesseLivezey/pylearn2,hantek/pylearn2,TNick/pylearn2,aalmah/pylearn2,skearnes/pylearn2,chrish42/pylearn,jamessergeant/pylearn2,TNick/pylearn2,fyffyt/pylearn2,ddboline/pylearn2,jeremyfix/pylearn2,sandeepkbhat/pylearn2,bartvm/pylearn2,KennethPierce/pylearnk,skearnes/pylearn2,mclaughlin6464/pylearn2,Refefer/pylearn2,pombredanne/pylearn2,lunyang/pylearn2,pkainz/pylearn2,Refefer/pylearn2,CIFASIS/pylearn2,daemonmaker/pylearn2,skearnes/pylearn2,pombredanne/pylearn2,msingh172/pylearn2,lancezlin/pylearn2,theoryno3/pylearn2,skearnes/pylearn2,mkraemer67/pylearn2,fulmicoton/pylearn2,JesseLivezey/plankton,JesseLivezey/pylearn2,CIFASIS/pylearn2,pombredanne/pylearn2,caidongyun/pylearn2,lisa-lab/pylearn2,theoryno3/pylearn2,ddboline/pylearn2,lunyang/pylearn2,TNick/pylearn2,shiquanwang/pylearn2,sandeepkbhat/pylearn2,caidongyun/pylearn2,lisa-lab/pylearn2,kastnerkyle/pylearn2,hyqneuron/pylearn2-maxsom,sandeepkbhat/pylearn2,JesseLivezey/plankton,kastnerkyle/pylearn2,abergeron/pylearn2,fishcorn/pylearn2,mclaughlin6464/pylearn2,caidongyun/pylearn2,se4u/pylearn2,hyqneuron/pylearn2-maxsom,CIFASIS/pylearn2,kose-y/pylearn2,hantek/pylearn2,msingh172/pylearn2,cosmoharrigan/pylearn2,Refefer/pylearn2,lamblin/pylearn2,chrish42/pylearn,mkraemer67/pylearn2,fulmicoton/pylearn2,pkainz/pylearn2,lamblin/pylearn2,lunyang/pylearn2,cosmoharrigan/pylearn2,bartvm/pylearn2,pombredanne/pylearn2,lancezlin/pylearn2,junbochen/pylearn2,KennethPierce/pylearnk,hyqneuron/pylearn2-maxsom,abergeron/pylearn2,chrish42/pylearn,jeremyfix/pylearn2,w1kke/pylearn2,hantek/pylearn2,JesseLivezey/plankton,pkainz/pylearn2,kose-y/pylearn2,lunyang/pylearn2,fishcorn/pylearn2,matrogers/pylearn2,jamessergeant/pylearn2,w1kke/pylearn2,bartvm/pylearn2,alexjc/pylearn2,pkainz/pylearn2,shiquanwang/pylearn2,kose-y/pylearn2,junbochen/pylearn2,daemonmaker/pylearn2,CIFASIS/pylearn2,ddboline/pylearn2,daemonmaker/pylearn2,kastnerkyle/pylearn2,goodfeli/pylearn2,lisa-lab/pylearn2,KennethPierce/pylearnk,woozzu/pylearn2,woozzu/pylearn2,alexjc/pylearn2,matrogers/pylearn2,goodfeli/pylearn2,fulmicoton/pylearn2,mkraemer67/pylearn2,matrogers/pylearn2,goodfeli/pylearn2,woozzu/pylearn2,jamessergeant/pylearn2,woozzu/pylearn2,jamessergeant/pylearn2,lamblin/pylearn2,nouiz/pylearn2,ashhher3/pylearn2,cosmoharrigan/pylearn2
|
Add unit test for summarize_model.py
|
"""
A unit test for the summarize_model.py script
"""
import cPickle
import os
from pylearn2.testing.skip import skip_if_no_matplotlib
from pylearn2.models.mlp import MLP, Linear
from pylearn2.scripts.summarize_model import summarize
def test_summarize_model():
"""
Asks the summarize_model.py script to inspect a pickled model and
check that it completes succesfully
"""
skip_if_no_matplotlib()
with open('model.pkl', 'wb') as f:
cPickle.dump(MLP(layers=[Linear(dim=5, layer_name='h0', irange=0.1)],
nvis=10), f, protocol=cPickle.HIGHEST_PROTOCOL)
summarize('model.pkl')
os.remove('model.pkl')
|
<commit_before><commit_msg>Add unit test for summarize_model.py<commit_after>
|
"""
A unit test for the summarize_model.py script
"""
import cPickle
import os
from pylearn2.testing.skip import skip_if_no_matplotlib
from pylearn2.models.mlp import MLP, Linear
from pylearn2.scripts.summarize_model import summarize
def test_summarize_model():
"""
Asks the summarize_model.py script to inspect a pickled model and
check that it completes succesfully
"""
skip_if_no_matplotlib()
with open('model.pkl', 'wb') as f:
cPickle.dump(MLP(layers=[Linear(dim=5, layer_name='h0', irange=0.1)],
nvis=10), f, protocol=cPickle.HIGHEST_PROTOCOL)
summarize('model.pkl')
os.remove('model.pkl')
|
Add unit test for summarize_model.py"""
A unit test for the summarize_model.py script
"""
import cPickle
import os
from pylearn2.testing.skip import skip_if_no_matplotlib
from pylearn2.models.mlp import MLP, Linear
from pylearn2.scripts.summarize_model import summarize
def test_summarize_model():
"""
Asks the summarize_model.py script to inspect a pickled model and
check that it completes succesfully
"""
skip_if_no_matplotlib()
with open('model.pkl', 'wb') as f:
cPickle.dump(MLP(layers=[Linear(dim=5, layer_name='h0', irange=0.1)],
nvis=10), f, protocol=cPickle.HIGHEST_PROTOCOL)
summarize('model.pkl')
os.remove('model.pkl')
|
<commit_before><commit_msg>Add unit test for summarize_model.py<commit_after>"""
A unit test for the summarize_model.py script
"""
import cPickle
import os
from pylearn2.testing.skip import skip_if_no_matplotlib
from pylearn2.models.mlp import MLP, Linear
from pylearn2.scripts.summarize_model import summarize
def test_summarize_model():
"""
Asks the summarize_model.py script to inspect a pickled model and
check that it completes succesfully
"""
skip_if_no_matplotlib()
with open('model.pkl', 'wb') as f:
cPickle.dump(MLP(layers=[Linear(dim=5, layer_name='h0', irange=0.1)],
nvis=10), f, protocol=cPickle.HIGHEST_PROTOCOL)
summarize('model.pkl')
os.remove('model.pkl')
|
|
bdc3a1620f15b842ade92815ba24bf611f9d96c1
|
taggit_machinetags/migrations/0002_auto_20201012_1308.py
|
taggit_machinetags/migrations/0002_auto_20201012_1308.py
|
# Generated by Django 3.1.2 on 2020-10-12 12:08
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('taggit_machinetags', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='machinetaggeditem',
name='content_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='taggit_machinetags_machinetaggeditem_tagged_items', to='contenttypes.contenttype', verbose_name='content type'),
),
migrations.AlterField(
model_name='machinetaggeditem',
name='object_id',
field=models.IntegerField(db_index=True, verbose_name='object ID'),
),
]
|
Add migration for content types fields
|
Add migration for content types fields
|
Python
|
bsd-2-clause
|
lpomfrey/django-taggit-machinetags
|
Add migration for content types fields
|
# Generated by Django 3.1.2 on 2020-10-12 12:08
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('taggit_machinetags', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='machinetaggeditem',
name='content_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='taggit_machinetags_machinetaggeditem_tagged_items', to='contenttypes.contenttype', verbose_name='content type'),
),
migrations.AlterField(
model_name='machinetaggeditem',
name='object_id',
field=models.IntegerField(db_index=True, verbose_name='object ID'),
),
]
|
<commit_before><commit_msg>Add migration for content types fields<commit_after>
|
# Generated by Django 3.1.2 on 2020-10-12 12:08
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('taggit_machinetags', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='machinetaggeditem',
name='content_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='taggit_machinetags_machinetaggeditem_tagged_items', to='contenttypes.contenttype', verbose_name='content type'),
),
migrations.AlterField(
model_name='machinetaggeditem',
name='object_id',
field=models.IntegerField(db_index=True, verbose_name='object ID'),
),
]
|
Add migration for content types fields# Generated by Django 3.1.2 on 2020-10-12 12:08
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('taggit_machinetags', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='machinetaggeditem',
name='content_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='taggit_machinetags_machinetaggeditem_tagged_items', to='contenttypes.contenttype', verbose_name='content type'),
),
migrations.AlterField(
model_name='machinetaggeditem',
name='object_id',
field=models.IntegerField(db_index=True, verbose_name='object ID'),
),
]
|
<commit_before><commit_msg>Add migration for content types fields<commit_after># Generated by Django 3.1.2 on 2020-10-12 12:08
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('taggit_machinetags', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='machinetaggeditem',
name='content_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='taggit_machinetags_machinetaggeditem_tagged_items', to='contenttypes.contenttype', verbose_name='content type'),
),
migrations.AlterField(
model_name='machinetaggeditem',
name='object_id',
field=models.IntegerField(db_index=True, verbose_name='object ID'),
),
]
|
|
e333e8d07ee32668fc132a21c13cdf674443d1e9
|
chrome_frame/tools/helper_shutdown.py
|
chrome_frame/tools/helper_shutdown.py
|
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''This is a simple helper script to shut down the Chrome Frame helper process.
It needs the Python Win32 extensions.'''
import pywintypes
import sys
import win32gui
import win32con
def main():
exit_code = 0
window = win32gui.FindWindow('ChromeFrameHelperWindowClass',
'ChromeFrameHelperWindowName')
if not window:
print 'Chrome Frame helper process not running.'
else:
try:
win32gui.PostMessage(window, win32con.WM_CLOSE, 0, 0)
print 'Chrome Frame helper process shut down.'
except pywintypes.error as ex:
print 'Failed to shutdown Chrome Frame helper process: '
print ex
exit_code = 1
return exit_code
if __name__ == '__main__':
sys.exit(main())
|
Add a tiny helper script to shutdown the chrome frame helper process.
|
Add a tiny helper script to shutdown the chrome frame helper process.
BUG=53127
TEST=Run script, chrome_frame_helper.exe is shut down.
Review URL: http://codereview.chromium.org/3312010
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@58587 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
Fireblend/chromium-crosswalk,M4sse/chromium.src,keishi/chromium,patrickm/chromium.src,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,nacl-webkit/chrome_deps,anirudhSK/chromium,junmin-zhu/chromium-rivertrail,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,Just-D/chromium-1,dednal/chromium.src,dushu1203/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,Chilledheart/chromium,ChromiumWebApps/chromium,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,Just-D/chromium-1,junmin-zhu/chromium-rivertrail,mohamed--abdel-maksoud/chromium.src,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,timopulkkinen/BubbleFish,krieger-od/nwjs_chromium.src,keishi/chromium,mogoweb/chromium-crosswalk,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,dednal/chromium.src,zcbenz/cefode-chromium,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,TheTypoMaster/chromium-crosswalk,junmin-zhu/chromium-rivertrail,junmin-zhu/chromium-rivertrail,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,junmin-zhu/chromium-rivertrail,hujiajie/pa-chromium,patrickm/chromium.src,junmin-zhu/chromium-rivertrail,ondra-novak/chromium.src,zcbenz/cefode-chromium,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,pozdnyakov/chromium-crosswalk,axinging/chromium-crosswalk,littlstar/chromium.src,robclark/chromium,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,jaruba/chromium.src,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,nacl-webkit/chrome_deps,rogerwang/chromium,littlstar/chromium.src,hujiajie/pa-chromium,hgl888/chromium-crosswalk,ltilve/chromium,robclark/chromium,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,patrickm/chromium.src,markYoungH/chromium.src,junmin-zhu/chromium-rivertrail,fujunwei/chromium-crosswalk,jaruba/chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,dednal/chromium.src,ltilve/chromium,ChromiumWebApps/chromium,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,ltilve/chromium,dushu1203/chromium.src,Chilledheart/chromium,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,robclark/chromium,nacl-webkit/chrome_deps,nacl-webkit/chrome_deps,M4sse/chromium.src,robclark/chromium,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,zcbenz/cefode-chromium,Jonekee/chromium.src,M4sse/chromium.src,timopulkkinen/BubbleFish,krieger-od/nwjs_chromium.src,nacl-webkit/chrome_deps,timopulkkinen/BubbleFish,markYoungH/chromium.src,zcbenz/cefode-chromium,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,rogerwang/chromium,fujunwei/chromium-crosswalk,mogoweb/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,mogoweb/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,keishi/chromium,jaruba/chromium.src,bright-sparks/chromium-spacewalk,zcbenz/cefode-chromium,keishi/chromium,anirudhSK/chromium,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,nacl-webkit/chrome_deps,littlstar/chromium.src,nacl-webkit/chrome_deps,hgl888/chromium-crosswalk,timopulkkinen/BubbleFish,Just-D/chromium-1,mogoweb/chromium-crosswalk,littlstar/chromium.src,anirudhSK/chromium,keishi/chromium,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,dushu1203/chromium.src,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,anirudhSK/chromium,fujunwei/chromium-crosswalk,mogoweb/chromium-crosswalk,fujunwei/chromium-crosswalk,hujiajie/pa-chromium,chuan9/chromium-crosswalk,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,Jonekee/chromium.src,ltilve/chromium,pozdnyakov/chromium-crosswalk,rogerwang/chromium,Pluto-tv/chromium-crosswalk,junmin-zhu/chromium-rivertrail,anirudhSK/chromium,Jonekee/chromium.src,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,nacl-webkit/chrome_deps,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,dednal/chromium.src,anirudhSK/chromium,Chilledheart/chromium,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,keishi/chromium,anirudhSK/chromium,ondra-novak/chromium.src,timopulkkinen/BubbleFish,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,mogoweb/chromium-crosswalk,Chilledheart/chromium,littlstar/chromium.src,timopulkkinen/BubbleFish,ltilve/chromium,rogerwang/chromium,mogoweb/chromium-crosswalk,Just-D/chromium-1,ltilve/chromium,ChromiumWebApps/chromium,Chilledheart/chromium,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,Chilledheart/chromium,Jonekee/chromium.src,junmin-zhu/chromium-rivertrail,timopulkkinen/BubbleFish,dednal/chromium.src,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,rogerwang/chromium,timopulkkinen/BubbleFish,Just-D/chromium-1,chuan9/chromium-crosswalk,keishi/chromium,ChromiumWebApps/chromium,ondra-novak/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,patrickm/chromium.src,M4sse/chromium.src,dednal/chromium.src,nacl-webkit/chrome_deps,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,jaruba/chromium.src,Jonekee/chromium.src,ondra-novak/chromium.src,pozdnyakov/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,robclark/chromium,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,mogoweb/chromium-crosswalk,timopulkkinen/BubbleFish,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,junmin-zhu/chromium-rivertrail,dednal/chromium.src,bright-sparks/chromium-spacewalk,jaruba/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,zcbenz/cefode-chromium,ChromiumWebApps/chromium,patrickm/chromium.src,robclark/chromium,nacl-webkit/chrome_deps,littlstar/chromium.src,axinging/chromium-crosswalk,axinging/chromium-crosswalk,rogerwang/chromium,markYoungH/chromium.src,keishi/chromium,Just-D/chromium-1,mogoweb/chromium-crosswalk,hujiajie/pa-chromium,Jonekee/chromium.src,hgl888/chromium-crosswalk,zcbenz/cefode-chromium,patrickm/chromium.src,krieger-od/nwjs_chromium.src,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,keishi/chromium,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,hujiajie/pa-chromium,M4sse/chromium.src,ChromiumWebApps/chromium,hujiajie/pa-chromium,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,robclark/chromium,Chilledheart/chromium,anirudhSK/chromium,robclark/chromium,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,littlstar/chromium.src,zcbenz/cefode-chromium,markYoungH/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,keishi/chromium,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dednal/chromium.src,bright-sparks/chromium-spacewalk,Chilledheart/chromium,ltilve/chromium,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,robclark/chromium,mohamed--abdel-maksoud/chromium.src,junmin-zhu/chromium-rivertrail,rogerwang/chromium,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,hujiajie/pa-chromium,anirudhSK/chromium,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,keishi/chromium,ltilve/chromium,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,robclark/chromium,chuan9/chromium-crosswalk,dednal/chromium.src,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,rogerwang/chromium,crosswalk-project/chromium-crosswalk-efl,rogerwang/chromium,axinging/chromium-crosswalk,ltilve/chromium,patrickm/chromium.src,M4sse/chromium.src,pozdnyakov/chromium-crosswalk,markYoungH/chromium.src,jaruba/chromium.src,hujiajie/pa-chromium
|
Add a tiny helper script to shutdown the chrome frame helper process.
BUG=53127
TEST=Run script, chrome_frame_helper.exe is shut down.
Review URL: http://codereview.chromium.org/3312010
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@58587 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''This is a simple helper script to shut down the Chrome Frame helper process.
It needs the Python Win32 extensions.'''
import pywintypes
import sys
import win32gui
import win32con
def main():
exit_code = 0
window = win32gui.FindWindow('ChromeFrameHelperWindowClass',
'ChromeFrameHelperWindowName')
if not window:
print 'Chrome Frame helper process not running.'
else:
try:
win32gui.PostMessage(window, win32con.WM_CLOSE, 0, 0)
print 'Chrome Frame helper process shut down.'
except pywintypes.error as ex:
print 'Failed to shutdown Chrome Frame helper process: '
print ex
exit_code = 1
return exit_code
if __name__ == '__main__':
sys.exit(main())
|
<commit_before><commit_msg>Add a tiny helper script to shutdown the chrome frame helper process.
BUG=53127
TEST=Run script, chrome_frame_helper.exe is shut down.
Review URL: http://codereview.chromium.org/3312010
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@58587 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''This is a simple helper script to shut down the Chrome Frame helper process.
It needs the Python Win32 extensions.'''
import pywintypes
import sys
import win32gui
import win32con
def main():
exit_code = 0
window = win32gui.FindWindow('ChromeFrameHelperWindowClass',
'ChromeFrameHelperWindowName')
if not window:
print 'Chrome Frame helper process not running.'
else:
try:
win32gui.PostMessage(window, win32con.WM_CLOSE, 0, 0)
print 'Chrome Frame helper process shut down.'
except pywintypes.error as ex:
print 'Failed to shutdown Chrome Frame helper process: '
print ex
exit_code = 1
return exit_code
if __name__ == '__main__':
sys.exit(main())
|
Add a tiny helper script to shutdown the chrome frame helper process.
BUG=53127
TEST=Run script, chrome_frame_helper.exe is shut down.
Review URL: http://codereview.chromium.org/3312010
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@58587 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''This is a simple helper script to shut down the Chrome Frame helper process.
It needs the Python Win32 extensions.'''
import pywintypes
import sys
import win32gui
import win32con
def main():
exit_code = 0
window = win32gui.FindWindow('ChromeFrameHelperWindowClass',
'ChromeFrameHelperWindowName')
if not window:
print 'Chrome Frame helper process not running.'
else:
try:
win32gui.PostMessage(window, win32con.WM_CLOSE, 0, 0)
print 'Chrome Frame helper process shut down.'
except pywintypes.error as ex:
print 'Failed to shutdown Chrome Frame helper process: '
print ex
exit_code = 1
return exit_code
if __name__ == '__main__':
sys.exit(main())
|
<commit_before><commit_msg>Add a tiny helper script to shutdown the chrome frame helper process.
BUG=53127
TEST=Run script, chrome_frame_helper.exe is shut down.
Review URL: http://codereview.chromium.org/3312010
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@58587 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''This is a simple helper script to shut down the Chrome Frame helper process.
It needs the Python Win32 extensions.'''
import pywintypes
import sys
import win32gui
import win32con
def main():
exit_code = 0
window = win32gui.FindWindow('ChromeFrameHelperWindowClass',
'ChromeFrameHelperWindowName')
if not window:
print 'Chrome Frame helper process not running.'
else:
try:
win32gui.PostMessage(window, win32con.WM_CLOSE, 0, 0)
print 'Chrome Frame helper process shut down.'
except pywintypes.error as ex:
print 'Failed to shutdown Chrome Frame helper process: '
print ex
exit_code = 1
return exit_code
if __name__ == '__main__':
sys.exit(main())
|
|
a65f897f441eb001bf825805c955ac52a6026a87
|
bluebottle/projects/migrations/0026_auto_20170424_1653.py
|
bluebottle/projects/migrations/0026_auto_20170424_1653.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-24 14:53
from __future__ import unicode_literals
from django.db import migrations
def correct_needs_approval_status(apps, schema_editor):
Project = apps.get_model('projects', 'Project')
for project in Project.objects.filter(payout_status='needs_approval'):
if project.projectpayout_set.get().status in ('in_progress', 'settled'):
project.payout_status = None
project.save()
class Migration(migrations.Migration):
dependencies = [
('projects', '0025_auto_20170404_1130'),
]
operations = [
migrations.RunPython(correct_needs_approval_status),
]
|
Mark projects that have a bluebottle payout as not needing approval
|
Mark projects that have a bluebottle payout as not needing approval
BB-9612 #resolve
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Mark projects that have a bluebottle payout as not needing approval
BB-9612 #resolve
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-24 14:53
from __future__ import unicode_literals
from django.db import migrations
def correct_needs_approval_status(apps, schema_editor):
Project = apps.get_model('projects', 'Project')
for project in Project.objects.filter(payout_status='needs_approval'):
if project.projectpayout_set.get().status in ('in_progress', 'settled'):
project.payout_status = None
project.save()
class Migration(migrations.Migration):
dependencies = [
('projects', '0025_auto_20170404_1130'),
]
operations = [
migrations.RunPython(correct_needs_approval_status),
]
|
<commit_before><commit_msg>Mark projects that have a bluebottle payout as not needing approval
BB-9612 #resolve<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-24 14:53
from __future__ import unicode_literals
from django.db import migrations
def correct_needs_approval_status(apps, schema_editor):
Project = apps.get_model('projects', 'Project')
for project in Project.objects.filter(payout_status='needs_approval'):
if project.projectpayout_set.get().status in ('in_progress', 'settled'):
project.payout_status = None
project.save()
class Migration(migrations.Migration):
dependencies = [
('projects', '0025_auto_20170404_1130'),
]
operations = [
migrations.RunPython(correct_needs_approval_status),
]
|
Mark projects that have a bluebottle payout as not needing approval
BB-9612 #resolve# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-24 14:53
from __future__ import unicode_literals
from django.db import migrations
def correct_needs_approval_status(apps, schema_editor):
Project = apps.get_model('projects', 'Project')
for project in Project.objects.filter(payout_status='needs_approval'):
if project.projectpayout_set.get().status in ('in_progress', 'settled'):
project.payout_status = None
project.save()
class Migration(migrations.Migration):
dependencies = [
('projects', '0025_auto_20170404_1130'),
]
operations = [
migrations.RunPython(correct_needs_approval_status),
]
|
<commit_before><commit_msg>Mark projects that have a bluebottle payout as not needing approval
BB-9612 #resolve<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-24 14:53
from __future__ import unicode_literals
from django.db import migrations
def correct_needs_approval_status(apps, schema_editor):
Project = apps.get_model('projects', 'Project')
for project in Project.objects.filter(payout_status='needs_approval'):
if project.projectpayout_set.get().status in ('in_progress', 'settled'):
project.payout_status = None
project.save()
class Migration(migrations.Migration):
dependencies = [
('projects', '0025_auto_20170404_1130'),
]
operations = [
migrations.RunPython(correct_needs_approval_status),
]
|
|
43cd8e0e79b37b932e4942890af8b708f2e37482
|
openstack/tests/functional/compute/v2/test_limits.py
|
openstack/tests/functional/compute/v2/test_limits.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.functional import base
class TestLimits(base.BaseFunctionalTest):
def test_limits(self):
sot = self.conn.compute.get_limits()
self.assertIn('maxTotalInstances', sot.absolute)
self.assertIn('maxTotalRAMSize', sot.absolute)
self.assertIn('maxTotalKeypairs', sot.absolute)
self.assertIn('maxSecurityGroups', sot.absolute)
self.assertIn('maxSecurityGroupRules', sot.absolute)
|
Add functional tests for compute limits
|
Add functional tests for compute limits
Change-Id: Ifb192d412081beadf15023343af71ab23ceba7a1
|
Python
|
apache-2.0
|
stackforge/python-openstacksdk,dudymas/python-openstacksdk,dtroyer/python-openstacksdk,mtougeron/python-openstacksdk,dudymas/python-openstacksdk,briancurtin/python-openstacksdk,briancurtin/python-openstacksdk,openstack/python-openstacksdk,mtougeron/python-openstacksdk,stackforge/python-openstacksdk,openstack/python-openstacksdk,dtroyer/python-openstacksdk
|
Add functional tests for compute limits
Change-Id: Ifb192d412081beadf15023343af71ab23ceba7a1
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.functional import base
class TestLimits(base.BaseFunctionalTest):
def test_limits(self):
sot = self.conn.compute.get_limits()
self.assertIn('maxTotalInstances', sot.absolute)
self.assertIn('maxTotalRAMSize', sot.absolute)
self.assertIn('maxTotalKeypairs', sot.absolute)
self.assertIn('maxSecurityGroups', sot.absolute)
self.assertIn('maxSecurityGroupRules', sot.absolute)
|
<commit_before><commit_msg>Add functional tests for compute limits
Change-Id: Ifb192d412081beadf15023343af71ab23ceba7a1<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.functional import base
class TestLimits(base.BaseFunctionalTest):
def test_limits(self):
sot = self.conn.compute.get_limits()
self.assertIn('maxTotalInstances', sot.absolute)
self.assertIn('maxTotalRAMSize', sot.absolute)
self.assertIn('maxTotalKeypairs', sot.absolute)
self.assertIn('maxSecurityGroups', sot.absolute)
self.assertIn('maxSecurityGroupRules', sot.absolute)
|
Add functional tests for compute limits
Change-Id: Ifb192d412081beadf15023343af71ab23ceba7a1# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.functional import base
class TestLimits(base.BaseFunctionalTest):
def test_limits(self):
sot = self.conn.compute.get_limits()
self.assertIn('maxTotalInstances', sot.absolute)
self.assertIn('maxTotalRAMSize', sot.absolute)
self.assertIn('maxTotalKeypairs', sot.absolute)
self.assertIn('maxSecurityGroups', sot.absolute)
self.assertIn('maxSecurityGroupRules', sot.absolute)
|
<commit_before><commit_msg>Add functional tests for compute limits
Change-Id: Ifb192d412081beadf15023343af71ab23ceba7a1<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.functional import base
class TestLimits(base.BaseFunctionalTest):
def test_limits(self):
sot = self.conn.compute.get_limits()
self.assertIn('maxTotalInstances', sot.absolute)
self.assertIn('maxTotalRAMSize', sot.absolute)
self.assertIn('maxTotalKeypairs', sot.absolute)
self.assertIn('maxSecurityGroups', sot.absolute)
self.assertIn('maxSecurityGroupRules', sot.absolute)
|
|
cda8afef07cf959989f5e46577d3ce6b19c9e105
|
rdtools/energy_normalization.py
|
rdtools/energy_normalization.py
|
''' Energy Normalization Module
This module contains functions to help normalize AC energy output with measured
irradiance in preparation for calculating PV system degradation.
'''
import pandas as pd
import pvlib
def normalize_with_sapm(pvlib_pvsystem, energy, irradiance):
'''
Normalize system AC energy output given measured irradiance and
meteorological data. This method relies on the Sandia Array Performance
Model (SAPM) to compute the effective DC energy using measured irradiance,
ambient temperature, and wind speed.
Energy timeseries and irradiance timeseries can be different granularities.
Parameters
----------
pvlib_pvsystem: pvlib-python LocalizedPVSystem object
Object contains orientation, geographic coordinates, equipment constants.
energy: Pandas Series (numeric)
Energy time series to be normalized.
irradiance: Pandas DataFrame (numeric)
Measured irradiance, ambient temperature, and wind speed.
Returns
-------
normalized_energy: Pandas Series (numeric)
Energy divided by Sandia Model DC energy.
'''
solar_position = pvlib_pvsystem.get_solarposition(irradiance.index)
total_irradiance = pvlib_pvsystem\
.get_irradiance(solar_position['zenith'],
solar_position['azimuth'],
irradiance['DNI'],
irradiance['GHI'],
irradiance['DHI'])
aoi = pvlib_pvsystem.get_aoi(solar_position['zenith'],
solar_position['azimuth'])
airmass = pvlib_pvsystem\
.get_airmass(solar_position=solar_position, model='kastenyoung1989')
airmass_absolute = airmass['airmass_absolute']
effective_poa = pvlib.pvsystem\
.sapm_effective_irradiance(poa_direct=total_irradiance['poa_direct'],
poa_diffuse=total_irradiance['poa_diffuse'],
airmass_absolute=airmass_absolute,
aoi=aoi,
module=pvlib_pvsystem.module,
reference_irradiance=1)
temp_cell = pvlib_pvsystem\
.sapm_celltemp(irrad=total_irradiance['poa_global'],
wind=irradiance['Wind Speed'],
temp=irradiance['Temperature'])
p_dc = pvlib_pvsystem\
.pvwatts_dc(g_poa_effective=effective_poa,
temp_cell=temp_cell['temp_cell'])
if energy.index.freq is None:
freq = pd.infer_freq(energy.index)
else:
freq = energy.index.freq
energy_dc = p_dc.resample(freq).sum()
normalized_energy = energy / energy_dc
return normalized_energy
|
Rename 'tools' to 'rdtools' module directory.
|
Rename 'tools' to 'rdtools' module directory.
|
Python
|
mit
|
kwhanalytics/rdtools,kwhanalytics/rdtools
|
Rename 'tools' to 'rdtools' module directory.
|
''' Energy Normalization Module
This module contains functions to help normalize AC energy output with measured
irradiance in preparation for calculating PV system degradation.
'''
import pandas as pd
import pvlib
def normalize_with_sapm(pvlib_pvsystem, energy, irradiance):
'''
Normalize system AC energy output given measured irradiance and
meteorological data. This method relies on the Sandia Array Performance
Model (SAPM) to compute the effective DC energy using measured irradiance,
ambient temperature, and wind speed.
Energy timeseries and irradiance timeseries can be different granularities.
Parameters
----------
pvlib_pvsystem: pvlib-python LocalizedPVSystem object
Object contains orientation, geographic coordinates, equipment constants.
energy: Pandas Series (numeric)
Energy time series to be normalized.
irradiance: Pandas DataFrame (numeric)
Measured irradiance, ambient temperature, and wind speed.
Returns
-------
normalized_energy: Pandas Series (numeric)
Energy divided by Sandia Model DC energy.
'''
solar_position = pvlib_pvsystem.get_solarposition(irradiance.index)
total_irradiance = pvlib_pvsystem\
.get_irradiance(solar_position['zenith'],
solar_position['azimuth'],
irradiance['DNI'],
irradiance['GHI'],
irradiance['DHI'])
aoi = pvlib_pvsystem.get_aoi(solar_position['zenith'],
solar_position['azimuth'])
airmass = pvlib_pvsystem\
.get_airmass(solar_position=solar_position, model='kastenyoung1989')
airmass_absolute = airmass['airmass_absolute']
effective_poa = pvlib.pvsystem\
.sapm_effective_irradiance(poa_direct=total_irradiance['poa_direct'],
poa_diffuse=total_irradiance['poa_diffuse'],
airmass_absolute=airmass_absolute,
aoi=aoi,
module=pvlib_pvsystem.module,
reference_irradiance=1)
temp_cell = pvlib_pvsystem\
.sapm_celltemp(irrad=total_irradiance['poa_global'],
wind=irradiance['Wind Speed'],
temp=irradiance['Temperature'])
p_dc = pvlib_pvsystem\
.pvwatts_dc(g_poa_effective=effective_poa,
temp_cell=temp_cell['temp_cell'])
if energy.index.freq is None:
freq = pd.infer_freq(energy.index)
else:
freq = energy.index.freq
energy_dc = p_dc.resample(freq).sum()
normalized_energy = energy / energy_dc
return normalized_energy
|
<commit_before><commit_msg>Rename 'tools' to 'rdtools' module directory.<commit_after>
|
''' Energy Normalization Module
This module contains functions to help normalize AC energy output with measured
irradiance in preparation for calculating PV system degradation.
'''
import pandas as pd
import pvlib
def normalize_with_sapm(pvlib_pvsystem, energy, irradiance):
'''
Normalize system AC energy output given measured irradiance and
meteorological data. This method relies on the Sandia Array Performance
Model (SAPM) to compute the effective DC energy using measured irradiance,
ambient temperature, and wind speed.
Energy timeseries and irradiance timeseries can be different granularities.
Parameters
----------
pvlib_pvsystem: pvlib-python LocalizedPVSystem object
Object contains orientation, geographic coordinates, equipment constants.
energy: Pandas Series (numeric)
Energy time series to be normalized.
irradiance: Pandas DataFrame (numeric)
Measured irradiance, ambient temperature, and wind speed.
Returns
-------
normalized_energy: Pandas Series (numeric)
Energy divided by Sandia Model DC energy.
'''
solar_position = pvlib_pvsystem.get_solarposition(irradiance.index)
total_irradiance = pvlib_pvsystem\
.get_irradiance(solar_position['zenith'],
solar_position['azimuth'],
irradiance['DNI'],
irradiance['GHI'],
irradiance['DHI'])
aoi = pvlib_pvsystem.get_aoi(solar_position['zenith'],
solar_position['azimuth'])
airmass = pvlib_pvsystem\
.get_airmass(solar_position=solar_position, model='kastenyoung1989')
airmass_absolute = airmass['airmass_absolute']
effective_poa = pvlib.pvsystem\
.sapm_effective_irradiance(poa_direct=total_irradiance['poa_direct'],
poa_diffuse=total_irradiance['poa_diffuse'],
airmass_absolute=airmass_absolute,
aoi=aoi,
module=pvlib_pvsystem.module,
reference_irradiance=1)
temp_cell = pvlib_pvsystem\
.sapm_celltemp(irrad=total_irradiance['poa_global'],
wind=irradiance['Wind Speed'],
temp=irradiance['Temperature'])
p_dc = pvlib_pvsystem\
.pvwatts_dc(g_poa_effective=effective_poa,
temp_cell=temp_cell['temp_cell'])
if energy.index.freq is None:
freq = pd.infer_freq(energy.index)
else:
freq = energy.index.freq
energy_dc = p_dc.resample(freq).sum()
normalized_energy = energy / energy_dc
return normalized_energy
|
Rename 'tools' to 'rdtools' module directory.''' Energy Normalization Module
This module contains functions to help normalize AC energy output with measured
irradiance in preparation for calculating PV system degradation.
'''
import pandas as pd
import pvlib
def normalize_with_sapm(pvlib_pvsystem, energy, irradiance):
'''
Normalize system AC energy output given measured irradiance and
meteorological data. This method relies on the Sandia Array Performance
Model (SAPM) to compute the effective DC energy using measured irradiance,
ambient temperature, and wind speed.
Energy timeseries and irradiance timeseries can be different granularities.
Parameters
----------
pvlib_pvsystem: pvlib-python LocalizedPVSystem object
Object contains orientation, geographic coordinates, equipment constants.
energy: Pandas Series (numeric)
Energy time series to be normalized.
irradiance: Pandas DataFrame (numeric)
Measured irradiance, ambient temperature, and wind speed.
Returns
-------
normalized_energy: Pandas Series (numeric)
Energy divided by Sandia Model DC energy.
'''
solar_position = pvlib_pvsystem.get_solarposition(irradiance.index)
total_irradiance = pvlib_pvsystem\
.get_irradiance(solar_position['zenith'],
solar_position['azimuth'],
irradiance['DNI'],
irradiance['GHI'],
irradiance['DHI'])
aoi = pvlib_pvsystem.get_aoi(solar_position['zenith'],
solar_position['azimuth'])
airmass = pvlib_pvsystem\
.get_airmass(solar_position=solar_position, model='kastenyoung1989')
airmass_absolute = airmass['airmass_absolute']
effective_poa = pvlib.pvsystem\
.sapm_effective_irradiance(poa_direct=total_irradiance['poa_direct'],
poa_diffuse=total_irradiance['poa_diffuse'],
airmass_absolute=airmass_absolute,
aoi=aoi,
module=pvlib_pvsystem.module,
reference_irradiance=1)
temp_cell = pvlib_pvsystem\
.sapm_celltemp(irrad=total_irradiance['poa_global'],
wind=irradiance['Wind Speed'],
temp=irradiance['Temperature'])
p_dc = pvlib_pvsystem\
.pvwatts_dc(g_poa_effective=effective_poa,
temp_cell=temp_cell['temp_cell'])
if energy.index.freq is None:
freq = pd.infer_freq(energy.index)
else:
freq = energy.index.freq
energy_dc = p_dc.resample(freq).sum()
normalized_energy = energy / energy_dc
return normalized_energy
|
<commit_before><commit_msg>Rename 'tools' to 'rdtools' module directory.<commit_after>''' Energy Normalization Module
This module contains functions to help normalize AC energy output with measured
irradiance in preparation for calculating PV system degradation.
'''
import pandas as pd
import pvlib
def normalize_with_sapm(pvlib_pvsystem, energy, irradiance):
'''
Normalize system AC energy output given measured irradiance and
meteorological data. This method relies on the Sandia Array Performance
Model (SAPM) to compute the effective DC energy using measured irradiance,
ambient temperature, and wind speed.
Energy timeseries and irradiance timeseries can be different granularities.
Parameters
----------
pvlib_pvsystem: pvlib-python LocalizedPVSystem object
Object contains orientation, geographic coordinates, equipment constants.
energy: Pandas Series (numeric)
Energy time series to be normalized.
irradiance: Pandas DataFrame (numeric)
Measured irradiance, ambient temperature, and wind speed.
Returns
-------
normalized_energy: Pandas Series (numeric)
Energy divided by Sandia Model DC energy.
'''
solar_position = pvlib_pvsystem.get_solarposition(irradiance.index)
total_irradiance = pvlib_pvsystem\
.get_irradiance(solar_position['zenith'],
solar_position['azimuth'],
irradiance['DNI'],
irradiance['GHI'],
irradiance['DHI'])
aoi = pvlib_pvsystem.get_aoi(solar_position['zenith'],
solar_position['azimuth'])
airmass = pvlib_pvsystem\
.get_airmass(solar_position=solar_position, model='kastenyoung1989')
airmass_absolute = airmass['airmass_absolute']
effective_poa = pvlib.pvsystem\
.sapm_effective_irradiance(poa_direct=total_irradiance['poa_direct'],
poa_diffuse=total_irradiance['poa_diffuse'],
airmass_absolute=airmass_absolute,
aoi=aoi,
module=pvlib_pvsystem.module,
reference_irradiance=1)
temp_cell = pvlib_pvsystem\
.sapm_celltemp(irrad=total_irradiance['poa_global'],
wind=irradiance['Wind Speed'],
temp=irradiance['Temperature'])
p_dc = pvlib_pvsystem\
.pvwatts_dc(g_poa_effective=effective_poa,
temp_cell=temp_cell['temp_cell'])
if energy.index.freq is None:
freq = pd.infer_freq(energy.index)
else:
freq = energy.index.freq
energy_dc = p_dc.resample(freq).sum()
normalized_energy = energy / energy_dc
return normalized_energy
|
|
13db5fc93d2aec28ea8a52f32353e86d8f49c12e
|
python/christmas_names/name_chooser.py
|
python/christmas_names/name_chooser.py
|
#! /usr/bin/python
# vim: set ai sw=4:
import random
import time
family_list = [["Alia", "Tanya"],
["Nick", "Ariana", "Max"],
["Paige", "Ian", "Kendra"]
]
# Given a family_list, create 2 new data structures:
# 1) Just a raw list of all the names
# 2) A dictionary mapping from a name to a family identifier
family_dict = {}
namelist = []
family_id = 0
for family in family_list:
for name in family:
family_dict[name] = family_id
namelist.append(name)
family_id += 1
# Create a random pairing (really just a permutation of the input list).
# This is returned as a list of pairs of names.
def pairing(input):
leftlist = list(input)
rightlist = list(input)
random.shuffle(rightlist)
return zip(leftlist, rightlist)
# Print a pairing nicely.
def show_pairing(pairing):
for p in pairing:
print "%8s gives to %s." % p
# Apply the constraints to a pairing. If it meets the constraints,
# return True; otherwise, return False.
def pairing_good(pairing):
for p in pairing:
if p[0] == p[1]: return False
if family_dict[p[0]] == family_dict[p[1]]: return False
return(True)
# Keep creating pairings until we find a good one.
def find_good_pairing(return_num_tries=False):
num_tries = 0
while (True):
p = pairing(namelist)
num_tries +=1
# print "Trying pairing", show_pairing(p)
if pairing_good(p):
print ("Took %d tries to get a good pairing." % num_tries)
if return_num_tries:
return (p, num_tries)
else:
return p
# time.sleep(1)
|
Add python program (really a collection of functions) which will choose a set of name pairings for a christmas gift exchange.
|
Add python program (really a collection of functions) which will choose a
set of name pairings for a christmas gift exchange.
|
Python
|
bsd-2-clause
|
tedzo/python_play
|
Add python program (really a collection of functions) which will choose a
set of name pairings for a christmas gift exchange.
|
#! /usr/bin/python
# vim: set ai sw=4:
import random
import time
family_list = [["Alia", "Tanya"],
["Nick", "Ariana", "Max"],
["Paige", "Ian", "Kendra"]
]
# Given a family_list, create 2 new data structures:
# 1) Just a raw list of all the names
# 2) A dictionary mapping from a name to a family identifier
family_dict = {}
namelist = []
family_id = 0
for family in family_list:
for name in family:
family_dict[name] = family_id
namelist.append(name)
family_id += 1
# Create a random pairing (really just a permutation of the input list).
# This is returned as a list of pairs of names.
def pairing(input):
leftlist = list(input)
rightlist = list(input)
random.shuffle(rightlist)
return zip(leftlist, rightlist)
# Print a pairing nicely.
def show_pairing(pairing):
for p in pairing:
print "%8s gives to %s." % p
# Apply the constraints to a pairing. If it meets the constraints,
# return True; otherwise, return False.
def pairing_good(pairing):
for p in pairing:
if p[0] == p[1]: return False
if family_dict[p[0]] == family_dict[p[1]]: return False
return(True)
# Keep creating pairings until we find a good one.
def find_good_pairing(return_num_tries=False):
num_tries = 0
while (True):
p = pairing(namelist)
num_tries +=1
# print "Trying pairing", show_pairing(p)
if pairing_good(p):
print ("Took %d tries to get a good pairing." % num_tries)
if return_num_tries:
return (p, num_tries)
else:
return p
# time.sleep(1)
|
<commit_before><commit_msg>Add python program (really a collection of functions) which will choose a
set of name pairings for a christmas gift exchange.<commit_after>
|
#! /usr/bin/python
# vim: set ai sw=4:
import random
import time
family_list = [["Alia", "Tanya"],
["Nick", "Ariana", "Max"],
["Paige", "Ian", "Kendra"]
]
# Given a family_list, create 2 new data structures:
# 1) Just a raw list of all the names
# 2) A dictionary mapping from a name to a family identifier
family_dict = {}
namelist = []
family_id = 0
for family in family_list:
for name in family:
family_dict[name] = family_id
namelist.append(name)
family_id += 1
# Create a random pairing (really just a permutation of the input list).
# This is returned as a list of pairs of names.
def pairing(input):
leftlist = list(input)
rightlist = list(input)
random.shuffle(rightlist)
return zip(leftlist, rightlist)
# Print a pairing nicely.
def show_pairing(pairing):
for p in pairing:
print "%8s gives to %s." % p
# Apply the constraints to a pairing. If it meets the constraints,
# return True; otherwise, return False.
def pairing_good(pairing):
for p in pairing:
if p[0] == p[1]: return False
if family_dict[p[0]] == family_dict[p[1]]: return False
return(True)
# Keep creating pairings until we find a good one.
def find_good_pairing(return_num_tries=False):
num_tries = 0
while (True):
p = pairing(namelist)
num_tries +=1
# print "Trying pairing", show_pairing(p)
if pairing_good(p):
print ("Took %d tries to get a good pairing." % num_tries)
if return_num_tries:
return (p, num_tries)
else:
return p
# time.sleep(1)
|
Add python program (really a collection of functions) which will choose a
set of name pairings for a christmas gift exchange.#! /usr/bin/python
# vim: set ai sw=4:
import random
import time
family_list = [["Alia", "Tanya"],
["Nick", "Ariana", "Max"],
["Paige", "Ian", "Kendra"]
]
# Given a family_list, create 2 new data structures:
# 1) Just a raw list of all the names
# 2) A dictionary mapping from a name to a family identifier
family_dict = {}
namelist = []
family_id = 0
for family in family_list:
for name in family:
family_dict[name] = family_id
namelist.append(name)
family_id += 1
# Create a random pairing (really just a permutation of the input list).
# This is returned as a list of pairs of names.
def pairing(input):
leftlist = list(input)
rightlist = list(input)
random.shuffle(rightlist)
return zip(leftlist, rightlist)
# Print a pairing nicely.
def show_pairing(pairing):
for p in pairing:
print "%8s gives to %s." % p
# Apply the constraints to a pairing. If it meets the constraints,
# return True; otherwise, return False.
def pairing_good(pairing):
for p in pairing:
if p[0] == p[1]: return False
if family_dict[p[0]] == family_dict[p[1]]: return False
return(True)
# Keep creating pairings until we find a good one.
def find_good_pairing(return_num_tries=False):
num_tries = 0
while (True):
p = pairing(namelist)
num_tries +=1
# print "Trying pairing", show_pairing(p)
if pairing_good(p):
print ("Took %d tries to get a good pairing." % num_tries)
if return_num_tries:
return (p, num_tries)
else:
return p
# time.sleep(1)
|
<commit_before><commit_msg>Add python program (really a collection of functions) which will choose a
set of name pairings for a christmas gift exchange.<commit_after>#! /usr/bin/python
# vim: set ai sw=4:
import random
import time
family_list = [["Alia", "Tanya"],
["Nick", "Ariana", "Max"],
["Paige", "Ian", "Kendra"]
]
# Given a family_list, create 2 new data structures:
# 1) Just a raw list of all the names
# 2) A dictionary mapping from a name to a family identifier
family_dict = {}
namelist = []
family_id = 0
for family in family_list:
for name in family:
family_dict[name] = family_id
namelist.append(name)
family_id += 1
# Create a random pairing (really just a permutation of the input list).
# This is returned as a list of pairs of names.
def pairing(input):
leftlist = list(input)
rightlist = list(input)
random.shuffle(rightlist)
return zip(leftlist, rightlist)
# Print a pairing nicely.
def show_pairing(pairing):
for p in pairing:
print "%8s gives to %s." % p
# Apply the constraints to a pairing. If it meets the constraints,
# return True; otherwise, return False.
def pairing_good(pairing):
for p in pairing:
if p[0] == p[1]: return False
if family_dict[p[0]] == family_dict[p[1]]: return False
return(True)
# Keep creating pairings until we find a good one.
def find_good_pairing(return_num_tries=False):
num_tries = 0
while (True):
p = pairing(namelist)
num_tries +=1
# print "Trying pairing", show_pairing(p)
if pairing_good(p):
print ("Took %d tries to get a good pairing." % num_tries)
if return_num_tries:
return (p, num_tries)
else:
return p
# time.sleep(1)
|
|
9a9469337fa562f11d23dab813dd334d9f35f0f1
|
elections/uk/migrations/0003_adjust_roles_for_grouping.py
|
elections/uk/migrations/0003_adjust_roles_for_grouping.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from django.db import migrations
def adjust_roles_for_grouping(apps, schema_editor):
Election = apps.get_model('elections', 'Election')
for election in Election.objects.all():
if re.search(r'^local\.[^.]+\.2016', election.slug):
election.for_post_role = 'Local Councillor'
election.save()
if re.search(r'^mayor\.[^.]+\.2016', election.slug):
election.for_post_role = 'Mayor'
election.save()
class Migration(migrations.Migration):
dependencies = [
('uk', '0002_remove-gb-prefix'),
]
operations = [
migrations.RunPython(adjust_roles_for_grouping),
]
|
Change for_post_role of local & mayoral elections for better grouping
|
Change for_post_role of local & mayoral elections for better grouping
On various pages we're now grouping elections by the post role that
they're for. Unfortunately in some UK installations there's a different
for_post_role for each local councillor election, and each mayoral
election (e.g. 'Councillor for St Helens' rather than 'Local
councillor')
This data migration looks for elections with slugs that match the local
and mayoral elections in 2016 according to:
https://democracyclub.org.uk/projects/election-ids/reference/
... and sets for_post_role on each of them to 'Local Councillor' and
'Mayor' respectively.
|
Python
|
agpl-3.0
|
neavouli/yournextrepresentative,mysociety/yournextmp-popit,DemocracyClub/yournextrepresentative,neavouli/yournextrepresentative,DemocracyClub/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,DemocracyClub/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextmp-popit,neavouli/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextrepresentative
|
Change for_post_role of local & mayoral elections for better grouping
On various pages we're now grouping elections by the post role that
they're for. Unfortunately in some UK installations there's a different
for_post_role for each local councillor election, and each mayoral
election (e.g. 'Councillor for St Helens' rather than 'Local
councillor')
This data migration looks for elections with slugs that match the local
and mayoral elections in 2016 according to:
https://democracyclub.org.uk/projects/election-ids/reference/
... and sets for_post_role on each of them to 'Local Councillor' and
'Mayor' respectively.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from django.db import migrations
def adjust_roles_for_grouping(apps, schema_editor):
Election = apps.get_model('elections', 'Election')
for election in Election.objects.all():
if re.search(r'^local\.[^.]+\.2016', election.slug):
election.for_post_role = 'Local Councillor'
election.save()
if re.search(r'^mayor\.[^.]+\.2016', election.slug):
election.for_post_role = 'Mayor'
election.save()
class Migration(migrations.Migration):
dependencies = [
('uk', '0002_remove-gb-prefix'),
]
operations = [
migrations.RunPython(adjust_roles_for_grouping),
]
|
<commit_before><commit_msg>Change for_post_role of local & mayoral elections for better grouping
On various pages we're now grouping elections by the post role that
they're for. Unfortunately in some UK installations there's a different
for_post_role for each local councillor election, and each mayoral
election (e.g. 'Councillor for St Helens' rather than 'Local
councillor')
This data migration looks for elections with slugs that match the local
and mayoral elections in 2016 according to:
https://democracyclub.org.uk/projects/election-ids/reference/
... and sets for_post_role on each of them to 'Local Councillor' and
'Mayor' respectively.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from django.db import migrations
def adjust_roles_for_grouping(apps, schema_editor):
Election = apps.get_model('elections', 'Election')
for election in Election.objects.all():
if re.search(r'^local\.[^.]+\.2016', election.slug):
election.for_post_role = 'Local Councillor'
election.save()
if re.search(r'^mayor\.[^.]+\.2016', election.slug):
election.for_post_role = 'Mayor'
election.save()
class Migration(migrations.Migration):
dependencies = [
('uk', '0002_remove-gb-prefix'),
]
operations = [
migrations.RunPython(adjust_roles_for_grouping),
]
|
Change for_post_role of local & mayoral elections for better grouping
On various pages we're now grouping elections by the post role that
they're for. Unfortunately in some UK installations there's a different
for_post_role for each local councillor election, and each mayoral
election (e.g. 'Councillor for St Helens' rather than 'Local
councillor')
This data migration looks for elections with slugs that match the local
and mayoral elections in 2016 according to:
https://democracyclub.org.uk/projects/election-ids/reference/
... and sets for_post_role on each of them to 'Local Councillor' and
'Mayor' respectively.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from django.db import migrations
def adjust_roles_for_grouping(apps, schema_editor):
Election = apps.get_model('elections', 'Election')
for election in Election.objects.all():
if re.search(r'^local\.[^.]+\.2016', election.slug):
election.for_post_role = 'Local Councillor'
election.save()
if re.search(r'^mayor\.[^.]+\.2016', election.slug):
election.for_post_role = 'Mayor'
election.save()
class Migration(migrations.Migration):
dependencies = [
('uk', '0002_remove-gb-prefix'),
]
operations = [
migrations.RunPython(adjust_roles_for_grouping),
]
|
<commit_before><commit_msg>Change for_post_role of local & mayoral elections for better grouping
On various pages we're now grouping elections by the post role that
they're for. Unfortunately in some UK installations there's a different
for_post_role for each local councillor election, and each mayoral
election (e.g. 'Councillor for St Helens' rather than 'Local
councillor')
This data migration looks for elections with slugs that match the local
and mayoral elections in 2016 according to:
https://democracyclub.org.uk/projects/election-ids/reference/
... and sets for_post_role on each of them to 'Local Councillor' and
'Mayor' respectively.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from django.db import migrations
def adjust_roles_for_grouping(apps, schema_editor):
Election = apps.get_model('elections', 'Election')
for election in Election.objects.all():
if re.search(r'^local\.[^.]+\.2016', election.slug):
election.for_post_role = 'Local Councillor'
election.save()
if re.search(r'^mayor\.[^.]+\.2016', election.slug):
election.for_post_role = 'Mayor'
election.save()
class Migration(migrations.Migration):
dependencies = [
('uk', '0002_remove-gb-prefix'),
]
operations = [
migrations.RunPython(adjust_roles_for_grouping),
]
|
|
6655f7d62347f96bf4fe8673faf26c0c3e2b2e0b
|
neutron/tests/unit/conf/policies/test_floatingip_pools.py
|
neutron/tests/unit/conf/policies/test_floatingip_pools.py
|
# Copyright (c) 2021 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_policy import policy as base_policy
from neutron import policy
from neutron.tests.unit.conf.policies import base
class FloatingipPoolsAPITestCase(base.PolicyBaseTestCase):
def setUp(self):
super(FloatingipPoolsAPITestCase, self).setUp()
self.target = {'project_id': self.project_id}
class SystemAdminTests(FloatingipPoolsAPITestCase):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.context = self.system_admin_ctx
def test_get_floatingip_pool(self):
self.assertTrue(
policy.enforce(self.context, 'get_floatingip_pool',
self.target))
class SystemMemberTests(SystemAdminTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.context = self.system_member_ctx
class SystemReaderTests(SystemAdminTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.context = self.system_reader_ctx
class ProjectAdminTests(FloatingipPoolsAPITestCase):
def setUp(self):
super(ProjectAdminTests, self).setUp()
self.alt_target = {'project_id': self.alt_project_id}
self.context = self.project_admin_ctx
def test_get_floatingip_pool(self):
self.assertTrue(
policy.enforce(self.context, 'get_floatingip_pool',
self.target))
def test_get_floatingip_pool_other_project(self):
self.assertRaises(
base_policy.PolicyNotAuthorized,
policy.enforce,
self.context, 'get_floatingip_pool', self.alt_target)
class ProjectMemberTests(ProjectAdminTests):
def setUp(self):
super(ProjectMemberTests, self).setUp()
self.context = self.project_member_ctx
class ProjectReaderTests(ProjectAdminTests):
def setUp(self):
super(ProjectReaderTests, self).setUp()
self.context = self.project_reader_ctx
|
Add tests for floatingip pools API's new policy rules
|
Add tests for floatingip pools API's new policy rules
Related-blueprint: bp/secure-rbac-roles
Change-Id: I3f4f668866a7d1dacb583a177e8475617a762bf7
|
Python
|
apache-2.0
|
openstack/neutron,mahak/neutron,openstack/neutron,mahak/neutron,openstack/neutron,mahak/neutron
|
Add tests for floatingip pools API's new policy rules
Related-blueprint: bp/secure-rbac-roles
Change-Id: I3f4f668866a7d1dacb583a177e8475617a762bf7
|
# Copyright (c) 2021 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_policy import policy as base_policy
from neutron import policy
from neutron.tests.unit.conf.policies import base
class FloatingipPoolsAPITestCase(base.PolicyBaseTestCase):
def setUp(self):
super(FloatingipPoolsAPITestCase, self).setUp()
self.target = {'project_id': self.project_id}
class SystemAdminTests(FloatingipPoolsAPITestCase):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.context = self.system_admin_ctx
def test_get_floatingip_pool(self):
self.assertTrue(
policy.enforce(self.context, 'get_floatingip_pool',
self.target))
class SystemMemberTests(SystemAdminTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.context = self.system_member_ctx
class SystemReaderTests(SystemAdminTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.context = self.system_reader_ctx
class ProjectAdminTests(FloatingipPoolsAPITestCase):
def setUp(self):
super(ProjectAdminTests, self).setUp()
self.alt_target = {'project_id': self.alt_project_id}
self.context = self.project_admin_ctx
def test_get_floatingip_pool(self):
self.assertTrue(
policy.enforce(self.context, 'get_floatingip_pool',
self.target))
def test_get_floatingip_pool_other_project(self):
self.assertRaises(
base_policy.PolicyNotAuthorized,
policy.enforce,
self.context, 'get_floatingip_pool', self.alt_target)
class ProjectMemberTests(ProjectAdminTests):
def setUp(self):
super(ProjectMemberTests, self).setUp()
self.context = self.project_member_ctx
class ProjectReaderTests(ProjectAdminTests):
def setUp(self):
super(ProjectReaderTests, self).setUp()
self.context = self.project_reader_ctx
|
<commit_before><commit_msg>Add tests for floatingip pools API's new policy rules
Related-blueprint: bp/secure-rbac-roles
Change-Id: I3f4f668866a7d1dacb583a177e8475617a762bf7<commit_after>
|
# Copyright (c) 2021 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_policy import policy as base_policy
from neutron import policy
from neutron.tests.unit.conf.policies import base
class FloatingipPoolsAPITestCase(base.PolicyBaseTestCase):
def setUp(self):
super(FloatingipPoolsAPITestCase, self).setUp()
self.target = {'project_id': self.project_id}
class SystemAdminTests(FloatingipPoolsAPITestCase):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.context = self.system_admin_ctx
def test_get_floatingip_pool(self):
self.assertTrue(
policy.enforce(self.context, 'get_floatingip_pool',
self.target))
class SystemMemberTests(SystemAdminTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.context = self.system_member_ctx
class SystemReaderTests(SystemAdminTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.context = self.system_reader_ctx
class ProjectAdminTests(FloatingipPoolsAPITestCase):
def setUp(self):
super(ProjectAdminTests, self).setUp()
self.alt_target = {'project_id': self.alt_project_id}
self.context = self.project_admin_ctx
def test_get_floatingip_pool(self):
self.assertTrue(
policy.enforce(self.context, 'get_floatingip_pool',
self.target))
def test_get_floatingip_pool_other_project(self):
self.assertRaises(
base_policy.PolicyNotAuthorized,
policy.enforce,
self.context, 'get_floatingip_pool', self.alt_target)
class ProjectMemberTests(ProjectAdminTests):
def setUp(self):
super(ProjectMemberTests, self).setUp()
self.context = self.project_member_ctx
class ProjectReaderTests(ProjectAdminTests):
def setUp(self):
super(ProjectReaderTests, self).setUp()
self.context = self.project_reader_ctx
|
Add tests for floatingip pools API's new policy rules
Related-blueprint: bp/secure-rbac-roles
Change-Id: I3f4f668866a7d1dacb583a177e8475617a762bf7# Copyright (c) 2021 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_policy import policy as base_policy
from neutron import policy
from neutron.tests.unit.conf.policies import base
class FloatingipPoolsAPITestCase(base.PolicyBaseTestCase):
def setUp(self):
super(FloatingipPoolsAPITestCase, self).setUp()
self.target = {'project_id': self.project_id}
class SystemAdminTests(FloatingipPoolsAPITestCase):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.context = self.system_admin_ctx
def test_get_floatingip_pool(self):
self.assertTrue(
policy.enforce(self.context, 'get_floatingip_pool',
self.target))
class SystemMemberTests(SystemAdminTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.context = self.system_member_ctx
class SystemReaderTests(SystemAdminTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.context = self.system_reader_ctx
class ProjectAdminTests(FloatingipPoolsAPITestCase):
def setUp(self):
super(ProjectAdminTests, self).setUp()
self.alt_target = {'project_id': self.alt_project_id}
self.context = self.project_admin_ctx
def test_get_floatingip_pool(self):
self.assertTrue(
policy.enforce(self.context, 'get_floatingip_pool',
self.target))
def test_get_floatingip_pool_other_project(self):
self.assertRaises(
base_policy.PolicyNotAuthorized,
policy.enforce,
self.context, 'get_floatingip_pool', self.alt_target)
class ProjectMemberTests(ProjectAdminTests):
def setUp(self):
super(ProjectMemberTests, self).setUp()
self.context = self.project_member_ctx
class ProjectReaderTests(ProjectAdminTests):
def setUp(self):
super(ProjectReaderTests, self).setUp()
self.context = self.project_reader_ctx
|
<commit_before><commit_msg>Add tests for floatingip pools API's new policy rules
Related-blueprint: bp/secure-rbac-roles
Change-Id: I3f4f668866a7d1dacb583a177e8475617a762bf7<commit_after># Copyright (c) 2021 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_policy import policy as base_policy
from neutron import policy
from neutron.tests.unit.conf.policies import base
class FloatingipPoolsAPITestCase(base.PolicyBaseTestCase):
def setUp(self):
super(FloatingipPoolsAPITestCase, self).setUp()
self.target = {'project_id': self.project_id}
class SystemAdminTests(FloatingipPoolsAPITestCase):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.context = self.system_admin_ctx
def test_get_floatingip_pool(self):
self.assertTrue(
policy.enforce(self.context, 'get_floatingip_pool',
self.target))
class SystemMemberTests(SystemAdminTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.context = self.system_member_ctx
class SystemReaderTests(SystemAdminTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.context = self.system_reader_ctx
class ProjectAdminTests(FloatingipPoolsAPITestCase):
def setUp(self):
super(ProjectAdminTests, self).setUp()
self.alt_target = {'project_id': self.alt_project_id}
self.context = self.project_admin_ctx
def test_get_floatingip_pool(self):
self.assertTrue(
policy.enforce(self.context, 'get_floatingip_pool',
self.target))
def test_get_floatingip_pool_other_project(self):
self.assertRaises(
base_policy.PolicyNotAuthorized,
policy.enforce,
self.context, 'get_floatingip_pool', self.alt_target)
class ProjectMemberTests(ProjectAdminTests):
def setUp(self):
super(ProjectMemberTests, self).setUp()
self.context = self.project_member_ctx
class ProjectReaderTests(ProjectAdminTests):
def setUp(self):
super(ProjectReaderTests, self).setUp()
self.context = self.project_reader_ctx
|
|
e93939c9b0aee674ed9003727a267f6844d8ece6
|
DungeonsOfNoudar486/make_palette.py
|
DungeonsOfNoudar486/make_palette.py
|
import glob
from PIL import Image
from math import floor
palette = [[0,0,0]];
def transform( pixel ):
return [ 20 * ( pixel[ 0 ] / 20), 20 * ( pixel[ 1 ] / 20), 20 * ( pixel[ 2 ] / 20 ) ]
def add_to_palette( filename ):
imgFile = Image.open( filename )
img = imgFile.load()
for y in range( 0, imgFile.height ):
for x in range( 0, imgFile.width ):
pixel = img[ x, y ]
adjusted = transform( pixel )
if pixel[ 3 ] < 254:
adjusted = [ 255, 0, 255 ]
if palette.count( adjusted ) == 0:
palette.append( adjusted )
for filename in glob.glob('res/*.png'):
add_to_palette( filename )
palette.sort()
print len( palette )
for pixel in palette:
print str(pixel[ 0 ] ) + "\t" + str(pixel[ 1 ] ) + "\t" + str(pixel[ 2 ] )
|
Add utility to extract palette for the DOS version
|
Add utility to extract palette for the DOS version
|
Python
|
bsd-2-clause
|
TheFakeMontyOnTheRun/dungeons-of-noudar,TheFakeMontyOnTheRun/dungeons-of-noudar
|
Add utility to extract palette for the DOS version
|
import glob
from PIL import Image
from math import floor
palette = [[0,0,0]];
def transform( pixel ):
return [ 20 * ( pixel[ 0 ] / 20), 20 * ( pixel[ 1 ] / 20), 20 * ( pixel[ 2 ] / 20 ) ]
def add_to_palette( filename ):
imgFile = Image.open( filename )
img = imgFile.load()
for y in range( 0, imgFile.height ):
for x in range( 0, imgFile.width ):
pixel = img[ x, y ]
adjusted = transform( pixel )
if pixel[ 3 ] < 254:
adjusted = [ 255, 0, 255 ]
if palette.count( adjusted ) == 0:
palette.append( adjusted )
for filename in glob.glob('res/*.png'):
add_to_palette( filename )
palette.sort()
print len( palette )
for pixel in palette:
print str(pixel[ 0 ] ) + "\t" + str(pixel[ 1 ] ) + "\t" + str(pixel[ 2 ] )
|
<commit_before><commit_msg>Add utility to extract palette for the DOS version<commit_after>
|
import glob
from PIL import Image
from math import floor
palette = [[0,0,0]];
def transform( pixel ):
return [ 20 * ( pixel[ 0 ] / 20), 20 * ( pixel[ 1 ] / 20), 20 * ( pixel[ 2 ] / 20 ) ]
def add_to_palette( filename ):
imgFile = Image.open( filename )
img = imgFile.load()
for y in range( 0, imgFile.height ):
for x in range( 0, imgFile.width ):
pixel = img[ x, y ]
adjusted = transform( pixel )
if pixel[ 3 ] < 254:
adjusted = [ 255, 0, 255 ]
if palette.count( adjusted ) == 0:
palette.append( adjusted )
for filename in glob.glob('res/*.png'):
add_to_palette( filename )
palette.sort()
print len( palette )
for pixel in palette:
print str(pixel[ 0 ] ) + "\t" + str(pixel[ 1 ] ) + "\t" + str(pixel[ 2 ] )
|
Add utility to extract palette for the DOS versionimport glob
from PIL import Image
from math import floor
palette = [[0,0,0]];
def transform( pixel ):
return [ 20 * ( pixel[ 0 ] / 20), 20 * ( pixel[ 1 ] / 20), 20 * ( pixel[ 2 ] / 20 ) ]
def add_to_palette( filename ):
imgFile = Image.open( filename )
img = imgFile.load()
for y in range( 0, imgFile.height ):
for x in range( 0, imgFile.width ):
pixel = img[ x, y ]
adjusted = transform( pixel )
if pixel[ 3 ] < 254:
adjusted = [ 255, 0, 255 ]
if palette.count( adjusted ) == 0:
palette.append( adjusted )
for filename in glob.glob('res/*.png'):
add_to_palette( filename )
palette.sort()
print len( palette )
for pixel in palette:
print str(pixel[ 0 ] ) + "\t" + str(pixel[ 1 ] ) + "\t" + str(pixel[ 2 ] )
|
<commit_before><commit_msg>Add utility to extract palette for the DOS version<commit_after>import glob
from PIL import Image
from math import floor
palette = [[0,0,0]];
def transform( pixel ):
return [ 20 * ( pixel[ 0 ] / 20), 20 * ( pixel[ 1 ] / 20), 20 * ( pixel[ 2 ] / 20 ) ]
def add_to_palette( filename ):
imgFile = Image.open( filename )
img = imgFile.load()
for y in range( 0, imgFile.height ):
for x in range( 0, imgFile.width ):
pixel = img[ x, y ]
adjusted = transform( pixel )
if pixel[ 3 ] < 254:
adjusted = [ 255, 0, 255 ]
if palette.count( adjusted ) == 0:
palette.append( adjusted )
for filename in glob.glob('res/*.png'):
add_to_palette( filename )
palette.sort()
print len( palette )
for pixel in palette:
print str(pixel[ 0 ] ) + "\t" + str(pixel[ 1 ] ) + "\t" + str(pixel[ 2 ] )
|
|
363f93e1ea11fe99311df06d6dfcba76343f3c4e
|
django/sierra/api/migrations/0002_auto_20190517_1053.py
|
django/sierra/api/migrations/0002_auto_20190517_1053.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='apiuser',
name='permissions',
field=models.TextField(default=b'{}'),
),
]
|
Add migration for earlier change to api models
|
Add migration for earlier change to api models
Oops! Apparently I forgot to run makemigrations after a minor change to
API models.
|
Python
|
bsd-3-clause
|
unt-libraries/catalog-api,unt-libraries/catalog-api,unt-libraries/catalog-api,unt-libraries/catalog-api
|
Add migration for earlier change to api models
Oops! Apparently I forgot to run makemigrations after a minor change to
API models.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='apiuser',
name='permissions',
field=models.TextField(default=b'{}'),
),
]
|
<commit_before><commit_msg>Add migration for earlier change to api models
Oops! Apparently I forgot to run makemigrations after a minor change to
API models.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='apiuser',
name='permissions',
field=models.TextField(default=b'{}'),
),
]
|
Add migration for earlier change to api models
Oops! Apparently I forgot to run makemigrations after a minor change to
API models.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='apiuser',
name='permissions',
field=models.TextField(default=b'{}'),
),
]
|
<commit_before><commit_msg>Add migration for earlier change to api models
Oops! Apparently I forgot to run makemigrations after a minor change to
API models.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='apiuser',
name='permissions',
field=models.TextField(default=b'{}'),
),
]
|
|
789bcb17da60c17e521d442c21dc6fe1aec28392
|
src/ggrc/migrations/versions/20170203150557_7471e16ebb76_fix_vendors_columns_data_type.py
|
src/ggrc/migrations/versions/20170203150557_7471e16ebb76_fix_vendors_columns_data_type.py
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Bring vendors' start_date and end_date columns into conformity with the model
Create Date: 2017-02-03 15:05:57.538217
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '6e9a3ed063d2'
down_revision = '24b94ce0860c'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
op.alter_column('vendors', 'start_date', type_=mysql.DATE(),
existing_type=mysql.DATETIME(), nullable=True)
op.alter_column('vendors', 'end_date', type_=mysql.DATE(),
existing_type=mysql.DATETIME(), nullable=True)
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.alter_column('vendors', 'start_date', type_=mysql.DATETIME(),
existing_type=mysql.DATE(), nullable=True)
op.alter_column('vendors', 'end_date', type_=mysql.DATETIME(),
existing_type=mysql.DATE(), nullable=True)
|
Fix column data type in vendors table
|
Fix column data type in vendors table
|
Python
|
apache-2.0
|
plamut/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core
|
Fix column data type in vendors table
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Bring vendors' start_date and end_date columns into conformity with the model
Create Date: 2017-02-03 15:05:57.538217
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '6e9a3ed063d2'
down_revision = '24b94ce0860c'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
op.alter_column('vendors', 'start_date', type_=mysql.DATE(),
existing_type=mysql.DATETIME(), nullable=True)
op.alter_column('vendors', 'end_date', type_=mysql.DATE(),
existing_type=mysql.DATETIME(), nullable=True)
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.alter_column('vendors', 'start_date', type_=mysql.DATETIME(),
existing_type=mysql.DATE(), nullable=True)
op.alter_column('vendors', 'end_date', type_=mysql.DATETIME(),
existing_type=mysql.DATE(), nullable=True)
|
<commit_before><commit_msg>Fix column data type in vendors table<commit_after>
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Bring vendors' start_date and end_date columns into conformity with the model
Create Date: 2017-02-03 15:05:57.538217
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '6e9a3ed063d2'
down_revision = '24b94ce0860c'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
op.alter_column('vendors', 'start_date', type_=mysql.DATE(),
existing_type=mysql.DATETIME(), nullable=True)
op.alter_column('vendors', 'end_date', type_=mysql.DATE(),
existing_type=mysql.DATETIME(), nullable=True)
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.alter_column('vendors', 'start_date', type_=mysql.DATETIME(),
existing_type=mysql.DATE(), nullable=True)
op.alter_column('vendors', 'end_date', type_=mysql.DATETIME(),
existing_type=mysql.DATE(), nullable=True)
|
Fix column data type in vendors table# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Bring vendors' start_date and end_date columns into conformity with the model
Create Date: 2017-02-03 15:05:57.538217
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '6e9a3ed063d2'
down_revision = '24b94ce0860c'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
op.alter_column('vendors', 'start_date', type_=mysql.DATE(),
existing_type=mysql.DATETIME(), nullable=True)
op.alter_column('vendors', 'end_date', type_=mysql.DATE(),
existing_type=mysql.DATETIME(), nullable=True)
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.alter_column('vendors', 'start_date', type_=mysql.DATETIME(),
existing_type=mysql.DATE(), nullable=True)
op.alter_column('vendors', 'end_date', type_=mysql.DATETIME(),
existing_type=mysql.DATE(), nullable=True)
|
<commit_before><commit_msg>Fix column data type in vendors table<commit_after># Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Bring vendors' start_date and end_date columns into conformity with the model
Create Date: 2017-02-03 15:05:57.538217
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '6e9a3ed063d2'
down_revision = '24b94ce0860c'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
op.alter_column('vendors', 'start_date', type_=mysql.DATE(),
existing_type=mysql.DATETIME(), nullable=True)
op.alter_column('vendors', 'end_date', type_=mysql.DATE(),
existing_type=mysql.DATETIME(), nullable=True)
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.alter_column('vendors', 'start_date', type_=mysql.DATETIME(),
existing_type=mysql.DATE(), nullable=True)
op.alter_column('vendors', 'end_date', type_=mysql.DATETIME(),
existing_type=mysql.DATE(), nullable=True)
|
|
e0cdbf908c47e926b371ec3d97165818ed0a4423
|
apps/documents/migrations/0005_update_content_types.py
|
apps/documents/migrations/0005_update_content_types.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
sql = """UPDATE django_content_type
SET model = 'chapter'
WHERE model = 'document' AND
app_label = 'meinberlin_documents';"""
reverse_sql = """UPDATE django_content_type
SET model = 'document'
WHERE model = 'chapter' AND
app_label = 'meinberlin_documents';"""
class Migration(migrations.Migration):
dependencies = [
('meinberlin_documents', '0004_remove_create_document_phase'),
]
operations = [
migrations.RunSQL(sql, reverse_sql)
]
|
Add content_type migration to keep comments etc.
|
Add content_type migration to keep comments etc.
|
Python
|
agpl-3.0
|
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
|
Add content_type migration to keep comments etc.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
sql = """UPDATE django_content_type
SET model = 'chapter'
WHERE model = 'document' AND
app_label = 'meinberlin_documents';"""
reverse_sql = """UPDATE django_content_type
SET model = 'document'
WHERE model = 'chapter' AND
app_label = 'meinberlin_documents';"""
class Migration(migrations.Migration):
dependencies = [
('meinberlin_documents', '0004_remove_create_document_phase'),
]
operations = [
migrations.RunSQL(sql, reverse_sql)
]
|
<commit_before><commit_msg>Add content_type migration to keep comments etc.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
sql = """UPDATE django_content_type
SET model = 'chapter'
WHERE model = 'document' AND
app_label = 'meinberlin_documents';"""
reverse_sql = """UPDATE django_content_type
SET model = 'document'
WHERE model = 'chapter' AND
app_label = 'meinberlin_documents';"""
class Migration(migrations.Migration):
dependencies = [
('meinberlin_documents', '0004_remove_create_document_phase'),
]
operations = [
migrations.RunSQL(sql, reverse_sql)
]
|
Add content_type migration to keep comments etc.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
sql = """UPDATE django_content_type
SET model = 'chapter'
WHERE model = 'document' AND
app_label = 'meinberlin_documents';"""
reverse_sql = """UPDATE django_content_type
SET model = 'document'
WHERE model = 'chapter' AND
app_label = 'meinberlin_documents';"""
class Migration(migrations.Migration):
dependencies = [
('meinberlin_documents', '0004_remove_create_document_phase'),
]
operations = [
migrations.RunSQL(sql, reverse_sql)
]
|
<commit_before><commit_msg>Add content_type migration to keep comments etc.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
sql = """UPDATE django_content_type
SET model = 'chapter'
WHERE model = 'document' AND
app_label = 'meinberlin_documents';"""
reverse_sql = """UPDATE django_content_type
SET model = 'document'
WHERE model = 'chapter' AND
app_label = 'meinberlin_documents';"""
class Migration(migrations.Migration):
dependencies = [
('meinberlin_documents', '0004_remove_create_document_phase'),
]
operations = [
migrations.RunSQL(sql, reverse_sql)
]
|
|
a0fea14fe7fd5a938e0f2583dbd9dc6bb9040bc1
|
Python/232_ImplementQueueUsingStack.py
|
Python/232_ImplementQueueUsingStack.py
|
class Stack(object):
def __init__(self):
"""
initialize your data structure here.
"""
self.__queue = []
def push(self, x):
"""
:type x: int
:rtype: nothing
"""
self.__queue.append(x)
def pop(self):
"""
:rtype: nothing
"""
p = self.__queue[-1]
tmpQ = self.__queue[:-1]
self.__queue = tmpQ
return p
def top(self):
"""
:rtype: int
"""
return self.__queue[-1]
def empty(self):
"""
:rtype: bool
"""
return not self.__queue
|
Add solution for 232 implement queue using stack.
|
Add solution for 232 implement queue using stack.
|
Python
|
mit
|
comicxmz001/LeetCode,comicxmz001/LeetCode
|
Add solution for 232 implement queue using stack.
|
class Stack(object):
def __init__(self):
"""
initialize your data structure here.
"""
self.__queue = []
def push(self, x):
"""
:type x: int
:rtype: nothing
"""
self.__queue.append(x)
def pop(self):
"""
:rtype: nothing
"""
p = self.__queue[-1]
tmpQ = self.__queue[:-1]
self.__queue = tmpQ
return p
def top(self):
"""
:rtype: int
"""
return self.__queue[-1]
def empty(self):
"""
:rtype: bool
"""
return not self.__queue
|
<commit_before><commit_msg>Add solution for 232 implement queue using stack.<commit_after>
|
class Stack(object):
def __init__(self):
"""
initialize your data structure here.
"""
self.__queue = []
def push(self, x):
"""
:type x: int
:rtype: nothing
"""
self.__queue.append(x)
def pop(self):
"""
:rtype: nothing
"""
p = self.__queue[-1]
tmpQ = self.__queue[:-1]
self.__queue = tmpQ
return p
def top(self):
"""
:rtype: int
"""
return self.__queue[-1]
def empty(self):
"""
:rtype: bool
"""
return not self.__queue
|
Add solution for 232 implement queue using stack.class Stack(object):
def __init__(self):
"""
initialize your data structure here.
"""
self.__queue = []
def push(self, x):
"""
:type x: int
:rtype: nothing
"""
self.__queue.append(x)
def pop(self):
"""
:rtype: nothing
"""
p = self.__queue[-1]
tmpQ = self.__queue[:-1]
self.__queue = tmpQ
return p
def top(self):
"""
:rtype: int
"""
return self.__queue[-1]
def empty(self):
"""
:rtype: bool
"""
return not self.__queue
|
<commit_before><commit_msg>Add solution for 232 implement queue using stack.<commit_after>class Stack(object):
def __init__(self):
"""
initialize your data structure here.
"""
self.__queue = []
def push(self, x):
"""
:type x: int
:rtype: nothing
"""
self.__queue.append(x)
def pop(self):
"""
:rtype: nothing
"""
p = self.__queue[-1]
tmpQ = self.__queue[:-1]
self.__queue = tmpQ
return p
def top(self):
"""
:rtype: int
"""
return self.__queue[-1]
def empty(self):
"""
:rtype: bool
"""
return not self.__queue
|
|
8cdca8db430b74b4f8ae74492d6d2d740aa670a6
|
apps/network/tests/test_routes/test_association_requests.py
|
apps/network/tests/test_routes/test_association_requests.py
|
def test_send_association_request(client):
result = client.post("/association-requests/request", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request sent!"}
def test_receive_association_request(client):
result = client.post("/association-requests/receive", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request received!"}
def test_reply_association_request(client):
result = client.post("/association-requests/respond", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request was replied!"}
def get_all_association_requests(client):
result = client.get("/association-requests/")
assert result.status_code == 200
assert result.get_json() == {"association-requests": ["Network A", "Network B", "Network C"]}
def get_specific_association_requests(client):
result = client.get("/association-requests/51613546")
assert result.status_code == 200
assert result.get_json() == {
"association-request": {
"ID": "51613546",
"address": "156.89.33.200",
}
}
def delete_association_requests(client):
result = client.get("/association-requests/51661659")
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request deleted!"}
|
ADD Network association_requests unit tests
|
ADD Network association_requests unit tests
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
ADD Network association_requests unit tests
|
def test_send_association_request(client):
result = client.post("/association-requests/request", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request sent!"}
def test_receive_association_request(client):
result = client.post("/association-requests/receive", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request received!"}
def test_reply_association_request(client):
result = client.post("/association-requests/respond", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request was replied!"}
def get_all_association_requests(client):
result = client.get("/association-requests/")
assert result.status_code == 200
assert result.get_json() == {"association-requests": ["Network A", "Network B", "Network C"]}
def get_specific_association_requests(client):
result = client.get("/association-requests/51613546")
assert result.status_code == 200
assert result.get_json() == {
"association-request": {
"ID": "51613546",
"address": "156.89.33.200",
}
}
def delete_association_requests(client):
result = client.get("/association-requests/51661659")
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request deleted!"}
|
<commit_before><commit_msg>ADD Network association_requests unit tests<commit_after>
|
def test_send_association_request(client):
result = client.post("/association-requests/request", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request sent!"}
def test_receive_association_request(client):
result = client.post("/association-requests/receive", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request received!"}
def test_reply_association_request(client):
result = client.post("/association-requests/respond", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request was replied!"}
def get_all_association_requests(client):
result = client.get("/association-requests/")
assert result.status_code == 200
assert result.get_json() == {"association-requests": ["Network A", "Network B", "Network C"]}
def get_specific_association_requests(client):
result = client.get("/association-requests/51613546")
assert result.status_code == 200
assert result.get_json() == {
"association-request": {
"ID": "51613546",
"address": "156.89.33.200",
}
}
def delete_association_requests(client):
result = client.get("/association-requests/51661659")
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request deleted!"}
|
ADD Network association_requests unit tests
def test_send_association_request(client):
result = client.post("/association-requests/request", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request sent!"}
def test_receive_association_request(client):
result = client.post("/association-requests/receive", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request received!"}
def test_reply_association_request(client):
result = client.post("/association-requests/respond", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request was replied!"}
def get_all_association_requests(client):
result = client.get("/association-requests/")
assert result.status_code == 200
assert result.get_json() == {"association-requests": ["Network A", "Network B", "Network C"]}
def get_specific_association_requests(client):
result = client.get("/association-requests/51613546")
assert result.status_code == 200
assert result.get_json() == {
"association-request": {
"ID": "51613546",
"address": "156.89.33.200",
}
}
def delete_association_requests(client):
result = client.get("/association-requests/51661659")
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request deleted!"}
|
<commit_before><commit_msg>ADD Network association_requests unit tests<commit_after>
def test_send_association_request(client):
result = client.post("/association-requests/request", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request sent!"}
def test_receive_association_request(client):
result = client.post("/association-requests/receive", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request received!"}
def test_reply_association_request(client):
result = client.post("/association-requests/respond", data={"id": "54623156", "address": "159.15.223.162"})
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request was replied!"}
def get_all_association_requests(client):
result = client.get("/association-requests/")
assert result.status_code == 200
assert result.get_json() == {"association-requests": ["Network A", "Network B", "Network C"]}
def get_specific_association_requests(client):
result = client.get("/association-requests/51613546")
assert result.status_code == 200
assert result.get_json() == {
"association-request": {
"ID": "51613546",
"address": "156.89.33.200",
}
}
def delete_association_requests(client):
result = client.get("/association-requests/51661659")
assert result.status_code == 200
assert result.get_json() == {"msg": "Association request deleted!"}
|
|
aedb8c78b1512cb0b0f887c5dc05686170313d74
|
journal/migrations/0016_auto_20170110_1737.py
|
journal/migrations/0016_auto_20170110_1737.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-10 17:37
from __future__ import unicode_literals
from django.conf import settings
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('journal', '0015_remove_uid_null'),
]
operations = [
migrations.AlterField(
model_name='entry',
name='uid',
field=models.CharField(db_index=True, max_length=64, validators=[django.core.validators.RegexValidator(message='Not a sha256 value.', regex='[a-fA-F0-9]{64}')]),
),
migrations.AlterField(
model_name='journal',
name='uid',
field=models.CharField(db_index=True, max_length=64, validators=[django.core.validators.RegexValidator(message='Not a sha256 value.', regex='[a-fA-F0-9]{64}')]),
),
migrations.AlterUniqueTogether(
name='entry',
unique_together=set([('uid', 'journal')]),
),
migrations.AlterUniqueTogether(
name='journal',
unique_together=set([('uid', 'owner')]),
),
]
|
Make uid unique together with journal/user, not on its own.
|
Make uid unique together with journal/user, not on its own.
|
Python
|
agpl-3.0
|
etesync/journal-manager
|
Make uid unique together with journal/user, not on its own.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-10 17:37
from __future__ import unicode_literals
from django.conf import settings
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('journal', '0015_remove_uid_null'),
]
operations = [
migrations.AlterField(
model_name='entry',
name='uid',
field=models.CharField(db_index=True, max_length=64, validators=[django.core.validators.RegexValidator(message='Not a sha256 value.', regex='[a-fA-F0-9]{64}')]),
),
migrations.AlterField(
model_name='journal',
name='uid',
field=models.CharField(db_index=True, max_length=64, validators=[django.core.validators.RegexValidator(message='Not a sha256 value.', regex='[a-fA-F0-9]{64}')]),
),
migrations.AlterUniqueTogether(
name='entry',
unique_together=set([('uid', 'journal')]),
),
migrations.AlterUniqueTogether(
name='journal',
unique_together=set([('uid', 'owner')]),
),
]
|
<commit_before><commit_msg>Make uid unique together with journal/user, not on its own.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-10 17:37
from __future__ import unicode_literals
from django.conf import settings
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('journal', '0015_remove_uid_null'),
]
operations = [
migrations.AlterField(
model_name='entry',
name='uid',
field=models.CharField(db_index=True, max_length=64, validators=[django.core.validators.RegexValidator(message='Not a sha256 value.', regex='[a-fA-F0-9]{64}')]),
),
migrations.AlterField(
model_name='journal',
name='uid',
field=models.CharField(db_index=True, max_length=64, validators=[django.core.validators.RegexValidator(message='Not a sha256 value.', regex='[a-fA-F0-9]{64}')]),
),
migrations.AlterUniqueTogether(
name='entry',
unique_together=set([('uid', 'journal')]),
),
migrations.AlterUniqueTogether(
name='journal',
unique_together=set([('uid', 'owner')]),
),
]
|
Make uid unique together with journal/user, not on its own.# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-10 17:37
from __future__ import unicode_literals
from django.conf import settings
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('journal', '0015_remove_uid_null'),
]
operations = [
migrations.AlterField(
model_name='entry',
name='uid',
field=models.CharField(db_index=True, max_length=64, validators=[django.core.validators.RegexValidator(message='Not a sha256 value.', regex='[a-fA-F0-9]{64}')]),
),
migrations.AlterField(
model_name='journal',
name='uid',
field=models.CharField(db_index=True, max_length=64, validators=[django.core.validators.RegexValidator(message='Not a sha256 value.', regex='[a-fA-F0-9]{64}')]),
),
migrations.AlterUniqueTogether(
name='entry',
unique_together=set([('uid', 'journal')]),
),
migrations.AlterUniqueTogether(
name='journal',
unique_together=set([('uid', 'owner')]),
),
]
|
<commit_before><commit_msg>Make uid unique together with journal/user, not on its own.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-10 17:37
from __future__ import unicode_literals
from django.conf import settings
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('journal', '0015_remove_uid_null'),
]
operations = [
migrations.AlterField(
model_name='entry',
name='uid',
field=models.CharField(db_index=True, max_length=64, validators=[django.core.validators.RegexValidator(message='Not a sha256 value.', regex='[a-fA-F0-9]{64}')]),
),
migrations.AlterField(
model_name='journal',
name='uid',
field=models.CharField(db_index=True, max_length=64, validators=[django.core.validators.RegexValidator(message='Not a sha256 value.', regex='[a-fA-F0-9]{64}')]),
),
migrations.AlterUniqueTogether(
name='entry',
unique_together=set([('uid', 'journal')]),
),
migrations.AlterUniqueTogether(
name='journal',
unique_together=set([('uid', 'owner')]),
),
]
|
|
5fbf720c97d08144a3518c8995a3b76d2b923377
|
python/turbodbc_test/test_cursor_unsupported_features.py
|
python/turbodbc_test/test_cursor_unsupported_features.py
|
from unittest import TestCase
from turbodbc import connect
dsn = "Exasol R&D test database"
class TestCursorUnsupportedFeatures(TestCase):
"""
Test optional features mentioned in PEP-249 "behave" as specified
"""
def test_callproc_unsupported(self):
cursor = connect(dsn).cursor()
with self.assertRaises(AttributeError):
cursor.callproc()
def test_nextset_unsupported(self):
cursor = connect(dsn).cursor()
with self.assertRaises(AttributeError):
cursor.nextset()
|
Add test for optional Python database API features which are not implemented
|
Add test for optional Python database API features which are not implemented
|
Python
|
mit
|
blue-yonder/turbodbc,blue-yonder/turbodbc,blue-yonder/turbodbc,blue-yonder/turbodbc
|
Add test for optional Python database API features which are not implemented
|
from unittest import TestCase
from turbodbc import connect
dsn = "Exasol R&D test database"
class TestCursorUnsupportedFeatures(TestCase):
"""
Test optional features mentioned in PEP-249 "behave" as specified
"""
def test_callproc_unsupported(self):
cursor = connect(dsn).cursor()
with self.assertRaises(AttributeError):
cursor.callproc()
def test_nextset_unsupported(self):
cursor = connect(dsn).cursor()
with self.assertRaises(AttributeError):
cursor.nextset()
|
<commit_before><commit_msg>Add test for optional Python database API features which are not implemented<commit_after>
|
from unittest import TestCase
from turbodbc import connect
dsn = "Exasol R&D test database"
class TestCursorUnsupportedFeatures(TestCase):
"""
Test optional features mentioned in PEP-249 "behave" as specified
"""
def test_callproc_unsupported(self):
cursor = connect(dsn).cursor()
with self.assertRaises(AttributeError):
cursor.callproc()
def test_nextset_unsupported(self):
cursor = connect(dsn).cursor()
with self.assertRaises(AttributeError):
cursor.nextset()
|
Add test for optional Python database API features which are not implementedfrom unittest import TestCase
from turbodbc import connect
dsn = "Exasol R&D test database"
class TestCursorUnsupportedFeatures(TestCase):
"""
Test optional features mentioned in PEP-249 "behave" as specified
"""
def test_callproc_unsupported(self):
cursor = connect(dsn).cursor()
with self.assertRaises(AttributeError):
cursor.callproc()
def test_nextset_unsupported(self):
cursor = connect(dsn).cursor()
with self.assertRaises(AttributeError):
cursor.nextset()
|
<commit_before><commit_msg>Add test for optional Python database API features which are not implemented<commit_after>from unittest import TestCase
from turbodbc import connect
dsn = "Exasol R&D test database"
class TestCursorUnsupportedFeatures(TestCase):
"""
Test optional features mentioned in PEP-249 "behave" as specified
"""
def test_callproc_unsupported(self):
cursor = connect(dsn).cursor()
with self.assertRaises(AttributeError):
cursor.callproc()
def test_nextset_unsupported(self):
cursor = connect(dsn).cursor()
with self.assertRaises(AttributeError):
cursor.nextset()
|
|
afa3abdf6cb98db93feb39825dd434adb4c7965f
|
documents/management/commands/import_from_csv.py
|
documents/management/commands/import_from_csv.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.core.management.base import BaseCommand
from django.core.files import File
import csv
from users.models import User
from catalog.models import Course
from documents import logic
class Command(BaseCommand):
help = 'Import documents from a csv'
def add_arguments(self, parser):
parser.add_argument(action='store', dest='path', default='', help='Documents path')
parser.add_argument('--user', action='store', dest='username', default='tverhaegen', help='user owning the documents')
def handle(self, *args, **options):
netid = options["username"]
self.stdout.write('Looking for user "{}"'.format(netid))
user = User.objects.filter(netid=netid).first()
if user is None:
self.stdout.write('Could not find user.')
return
path = options['path']
self.stdout.write('Reading csv from "{}"'.format(path))
if not os.path.exists(path):
self.stdout.write("CSV does not exist")
return
with open(path) as fd:
reader = csv.DictReader(fd)
for row in reader:
try:
import_document(row, user)
self.stdout.write('.', ending='')
self.stdout.flush()
except:
print('Fail for %r' % row)
def import_document(row, user):
"""row has keys ['name', 'slug', 'path']"""
path = row['path']
filename = os.path.split(path)[1]
_, extension = os.path.splitext(filename)
name = row['name']
name = logic.clean_filename(name)
course = Course.objects.get(slug=row['slug'])
with open(path, 'rb') as fd:
document = logic.add_file_to_course(
file=File(fd),
name=name,
extension=extension,
course=course,
tags=[],
user=user
)
document.tag_from_name()
document.add_to_queue()
|
Add import from csv command
|
Add import from csv command
|
Python
|
agpl-3.0
|
UrLab/DocHub,UrLab/beta402,UrLab/DocHub,UrLab/beta402,UrLab/beta402,UrLab/DocHub,UrLab/DocHub
|
Add import from csv command
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.core.management.base import BaseCommand
from django.core.files import File
import csv
from users.models import User
from catalog.models import Course
from documents import logic
class Command(BaseCommand):
help = 'Import documents from a csv'
def add_arguments(self, parser):
parser.add_argument(action='store', dest='path', default='', help='Documents path')
parser.add_argument('--user', action='store', dest='username', default='tverhaegen', help='user owning the documents')
def handle(self, *args, **options):
netid = options["username"]
self.stdout.write('Looking for user "{}"'.format(netid))
user = User.objects.filter(netid=netid).first()
if user is None:
self.stdout.write('Could not find user.')
return
path = options['path']
self.stdout.write('Reading csv from "{}"'.format(path))
if not os.path.exists(path):
self.stdout.write("CSV does not exist")
return
with open(path) as fd:
reader = csv.DictReader(fd)
for row in reader:
try:
import_document(row, user)
self.stdout.write('.', ending='')
self.stdout.flush()
except:
print('Fail for %r' % row)
def import_document(row, user):
"""row has keys ['name', 'slug', 'path']"""
path = row['path']
filename = os.path.split(path)[1]
_, extension = os.path.splitext(filename)
name = row['name']
name = logic.clean_filename(name)
course = Course.objects.get(slug=row['slug'])
with open(path, 'rb') as fd:
document = logic.add_file_to_course(
file=File(fd),
name=name,
extension=extension,
course=course,
tags=[],
user=user
)
document.tag_from_name()
document.add_to_queue()
|
<commit_before><commit_msg>Add import from csv command<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.core.management.base import BaseCommand
from django.core.files import File
import csv
from users.models import User
from catalog.models import Course
from documents import logic
class Command(BaseCommand):
help = 'Import documents from a csv'
def add_arguments(self, parser):
parser.add_argument(action='store', dest='path', default='', help='Documents path')
parser.add_argument('--user', action='store', dest='username', default='tverhaegen', help='user owning the documents')
def handle(self, *args, **options):
netid = options["username"]
self.stdout.write('Looking for user "{}"'.format(netid))
user = User.objects.filter(netid=netid).first()
if user is None:
self.stdout.write('Could not find user.')
return
path = options['path']
self.stdout.write('Reading csv from "{}"'.format(path))
if not os.path.exists(path):
self.stdout.write("CSV does not exist")
return
with open(path) as fd:
reader = csv.DictReader(fd)
for row in reader:
try:
import_document(row, user)
self.stdout.write('.', ending='')
self.stdout.flush()
except:
print('Fail for %r' % row)
def import_document(row, user):
"""row has keys ['name', 'slug', 'path']"""
path = row['path']
filename = os.path.split(path)[1]
_, extension = os.path.splitext(filename)
name = row['name']
name = logic.clean_filename(name)
course = Course.objects.get(slug=row['slug'])
with open(path, 'rb') as fd:
document = logic.add_file_to_course(
file=File(fd),
name=name,
extension=extension,
course=course,
tags=[],
user=user
)
document.tag_from_name()
document.add_to_queue()
|
Add import from csv command# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.core.management.base import BaseCommand
from django.core.files import File
import csv
from users.models import User
from catalog.models import Course
from documents import logic
class Command(BaseCommand):
help = 'Import documents from a csv'
def add_arguments(self, parser):
parser.add_argument(action='store', dest='path', default='', help='Documents path')
parser.add_argument('--user', action='store', dest='username', default='tverhaegen', help='user owning the documents')
def handle(self, *args, **options):
netid = options["username"]
self.stdout.write('Looking for user "{}"'.format(netid))
user = User.objects.filter(netid=netid).first()
if user is None:
self.stdout.write('Could not find user.')
return
path = options['path']
self.stdout.write('Reading csv from "{}"'.format(path))
if not os.path.exists(path):
self.stdout.write("CSV does not exist")
return
with open(path) as fd:
reader = csv.DictReader(fd)
for row in reader:
try:
import_document(row, user)
self.stdout.write('.', ending='')
self.stdout.flush()
except:
print('Fail for %r' % row)
def import_document(row, user):
"""row has keys ['name', 'slug', 'path']"""
path = row['path']
filename = os.path.split(path)[1]
_, extension = os.path.splitext(filename)
name = row['name']
name = logic.clean_filename(name)
course = Course.objects.get(slug=row['slug'])
with open(path, 'rb') as fd:
document = logic.add_file_to_course(
file=File(fd),
name=name,
extension=extension,
course=course,
tags=[],
user=user
)
document.tag_from_name()
document.add_to_queue()
|
<commit_before><commit_msg>Add import from csv command<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.core.management.base import BaseCommand
from django.core.files import File
import csv
from users.models import User
from catalog.models import Course
from documents import logic
class Command(BaseCommand):
help = 'Import documents from a csv'
def add_arguments(self, parser):
parser.add_argument(action='store', dest='path', default='', help='Documents path')
parser.add_argument('--user', action='store', dest='username', default='tverhaegen', help='user owning the documents')
def handle(self, *args, **options):
netid = options["username"]
self.stdout.write('Looking for user "{}"'.format(netid))
user = User.objects.filter(netid=netid).first()
if user is None:
self.stdout.write('Could not find user.')
return
path = options['path']
self.stdout.write('Reading csv from "{}"'.format(path))
if not os.path.exists(path):
self.stdout.write("CSV does not exist")
return
with open(path) as fd:
reader = csv.DictReader(fd)
for row in reader:
try:
import_document(row, user)
self.stdout.write('.', ending='')
self.stdout.flush()
except:
print('Fail for %r' % row)
def import_document(row, user):
"""row has keys ['name', 'slug', 'path']"""
path = row['path']
filename = os.path.split(path)[1]
_, extension = os.path.splitext(filename)
name = row['name']
name = logic.clean_filename(name)
course = Course.objects.get(slug=row['slug'])
with open(path, 'rb') as fd:
document = logic.add_file_to_course(
file=File(fd),
name=name,
extension=extension,
course=course,
tags=[],
user=user
)
document.tag_from_name()
document.add_to_queue()
|
|
53c9b988d2ccda253080deaa35b70d814309a4aa
|
src/algorithms/simple_hybrid.py
|
src/algorithms/simple_hybrid.py
|
def predict(predictions_vector_0, predictions_vector_1, mixing_variable=0.5, num_partitions=30):
"""Apply a simple linear hybrid recommender.
This function implements the simple linear hybrid recommender Zhou et. al:
"Solving the apparent diversity-accuracy dilemma of recommender systems"
http://arxiv.org/pdf/0808.2670.pdf Equation 5
It takes the weighted linear average of two previous recommendations and
uses the new average as its own predicted rating:
new_rating = mixing_variable * rating_a + (1 - mixing_variable) * rating_b
Args:
predictions_vector_0 (rdd): Contains prediction tuples of the form
(user_id, item_id, predicted_rating) generated from a recommender
algorithm.
predictions_vector_1 (rdd): Contains prediction tuples of the form
(user_id, item_id, predicted_rating) generated from a second
recommender algorithm.
mixing_variable (float): A float in the range [0., 1.] which determines
how to weight the two predictions. If `mixing_variable` is 0 then
`predictions_vector_0` is given all the weight (and
`predictions_vector_1` is ignored). If `mixing_variable` is 1 then
`predictions_vector_1` is given all the weight (and
`predictions_vector_0` is ignored). Defaults to 0.5.
num_partitions (int): The number of partitions to use for the returned
data. Defaults to 30.
Returns:
rdd: An rdd containing prediction tuples of the form
(user_id, item_id, rating)
Raises:
ValueError: If `mixing_variable` is not within the range [0, 1]
"""
# Check the mixing_variable is set to an acceptable value
if not 0 <= mixing_variable <= 1:
raise ValueError('mixing_variable must be within the range [0, 1]')
# Short-circuit in the trivial cases
if mixing_variable == 0:
return predictions_vector_0
elif mixing_variable == 1:
return predictions_vector_1
# Otherwise calculate the linear average
keyed_vector_0 = predictions_vector_0\
.map(lambda (u, i, r): ((u, i), r))
keyed_vector_1 = predictions_vector_1\
.map(lambda (u, i, r): ((u, i), r))
predictions = keyed_vector_0.join(keyed_vector_1)\
.map(lambda ((u, i), (r0, r1)): (u, i, (1. - mixing_variable) * r0 + mixing_variable * r1))\
.coalesce(num_partitions)
return predictions
|
Add a simple hybrid recommender from Zhou et. al
|
Add a simple hybrid recommender from Zhou et. al
|
Python
|
apache-2.0
|
tiffanyj41/hermes,tiffanyj41/hermes,tiffanyj41/hermes,tiffanyj41/hermes
|
Add a simple hybrid recommender from Zhou et. al
|
def predict(predictions_vector_0, predictions_vector_1, mixing_variable=0.5, num_partitions=30):
"""Apply a simple linear hybrid recommender.
This function implements the simple linear hybrid recommender Zhou et. al:
"Solving the apparent diversity-accuracy dilemma of recommender systems"
http://arxiv.org/pdf/0808.2670.pdf Equation 5
It takes the weighted linear average of two previous recommendations and
uses the new average as its own predicted rating:
new_rating = mixing_variable * rating_a + (1 - mixing_variable) * rating_b
Args:
predictions_vector_0 (rdd): Contains prediction tuples of the form
(user_id, item_id, predicted_rating) generated from a recommender
algorithm.
predictions_vector_1 (rdd): Contains prediction tuples of the form
(user_id, item_id, predicted_rating) generated from a second
recommender algorithm.
mixing_variable (float): A float in the range [0., 1.] which determines
how to weight the two predictions. If `mixing_variable` is 0 then
`predictions_vector_0` is given all the weight (and
`predictions_vector_1` is ignored). If `mixing_variable` is 1 then
`predictions_vector_1` is given all the weight (and
`predictions_vector_0` is ignored). Defaults to 0.5.
num_partitions (int): The number of partitions to use for the returned
data. Defaults to 30.
Returns:
rdd: An rdd containing prediction tuples of the form
(user_id, item_id, rating)
Raises:
ValueError: If `mixing_variable` is not within the range [0, 1]
"""
# Check the mixing_variable is set to an acceptable value
if not 0 <= mixing_variable <= 1:
raise ValueError('mixing_variable must be within the range [0, 1]')
# Short-circuit in the trivial cases
if mixing_variable == 0:
return predictions_vector_0
elif mixing_variable == 1:
return predictions_vector_1
# Otherwise calculate the linear average
keyed_vector_0 = predictions_vector_0\
.map(lambda (u, i, r): ((u, i), r))
keyed_vector_1 = predictions_vector_1\
.map(lambda (u, i, r): ((u, i), r))
predictions = keyed_vector_0.join(keyed_vector_1)\
.map(lambda ((u, i), (r0, r1)): (u, i, (1. - mixing_variable) * r0 + mixing_variable * r1))\
.coalesce(num_partitions)
return predictions
|
<commit_before><commit_msg>Add a simple hybrid recommender from Zhou et. al<commit_after>
|
def predict(predictions_vector_0, predictions_vector_1, mixing_variable=0.5, num_partitions=30):
"""Apply a simple linear hybrid recommender.
This function implements the simple linear hybrid recommender Zhou et. al:
"Solving the apparent diversity-accuracy dilemma of recommender systems"
http://arxiv.org/pdf/0808.2670.pdf Equation 5
It takes the weighted linear average of two previous recommendations and
uses the new average as its own predicted rating:
new_rating = mixing_variable * rating_a + (1 - mixing_variable) * rating_b
Args:
predictions_vector_0 (rdd): Contains prediction tuples of the form
(user_id, item_id, predicted_rating) generated from a recommender
algorithm.
predictions_vector_1 (rdd): Contains prediction tuples of the form
(user_id, item_id, predicted_rating) generated from a second
recommender algorithm.
mixing_variable (float): A float in the range [0., 1.] which determines
how to weight the two predictions. If `mixing_variable` is 0 then
`predictions_vector_0` is given all the weight (and
`predictions_vector_1` is ignored). If `mixing_variable` is 1 then
`predictions_vector_1` is given all the weight (and
`predictions_vector_0` is ignored). Defaults to 0.5.
num_partitions (int): The number of partitions to use for the returned
data. Defaults to 30.
Returns:
rdd: An rdd containing prediction tuples of the form
(user_id, item_id, rating)
Raises:
ValueError: If `mixing_variable` is not within the range [0, 1]
"""
# Check the mixing_variable is set to an acceptable value
if not 0 <= mixing_variable <= 1:
raise ValueError('mixing_variable must be within the range [0, 1]')
# Short-circuit in the trivial cases
if mixing_variable == 0:
return predictions_vector_0
elif mixing_variable == 1:
return predictions_vector_1
# Otherwise calculate the linear average
keyed_vector_0 = predictions_vector_0\
.map(lambda (u, i, r): ((u, i), r))
keyed_vector_1 = predictions_vector_1\
.map(lambda (u, i, r): ((u, i), r))
predictions = keyed_vector_0.join(keyed_vector_1)\
.map(lambda ((u, i), (r0, r1)): (u, i, (1. - mixing_variable) * r0 + mixing_variable * r1))\
.coalesce(num_partitions)
return predictions
|
Add a simple hybrid recommender from Zhou et. aldef predict(predictions_vector_0, predictions_vector_1, mixing_variable=0.5, num_partitions=30):
"""Apply a simple linear hybrid recommender.
This function implements the simple linear hybrid recommender Zhou et. al:
"Solving the apparent diversity-accuracy dilemma of recommender systems"
http://arxiv.org/pdf/0808.2670.pdf Equation 5
It takes the weighted linear average of two previous recommendations and
uses the new average as its own predicted rating:
new_rating = mixing_variable * rating_a + (1 - mixing_variable) * rating_b
Args:
predictions_vector_0 (rdd): Contains prediction tuples of the form
(user_id, item_id, predicted_rating) generated from a recommender
algorithm.
predictions_vector_1 (rdd): Contains prediction tuples of the form
(user_id, item_id, predicted_rating) generated from a second
recommender algorithm.
mixing_variable (float): A float in the range [0., 1.] which determines
how to weight the two predictions. If `mixing_variable` is 0 then
`predictions_vector_0` is given all the weight (and
`predictions_vector_1` is ignored). If `mixing_variable` is 1 then
`predictions_vector_1` is given all the weight (and
`predictions_vector_0` is ignored). Defaults to 0.5.
num_partitions (int): The number of partitions to use for the returned
data. Defaults to 30.
Returns:
rdd: An rdd containing prediction tuples of the form
(user_id, item_id, rating)
Raises:
ValueError: If `mixing_variable` is not within the range [0, 1]
"""
# Check the mixing_variable is set to an acceptable value
if not 0 <= mixing_variable <= 1:
raise ValueError('mixing_variable must be within the range [0, 1]')
# Short-circuit in the trivial cases
if mixing_variable == 0:
return predictions_vector_0
elif mixing_variable == 1:
return predictions_vector_1
# Otherwise calculate the linear average
keyed_vector_0 = predictions_vector_0\
.map(lambda (u, i, r): ((u, i), r))
keyed_vector_1 = predictions_vector_1\
.map(lambda (u, i, r): ((u, i), r))
predictions = keyed_vector_0.join(keyed_vector_1)\
.map(lambda ((u, i), (r0, r1)): (u, i, (1. - mixing_variable) * r0 + mixing_variable * r1))\
.coalesce(num_partitions)
return predictions
|
<commit_before><commit_msg>Add a simple hybrid recommender from Zhou et. al<commit_after>def predict(predictions_vector_0, predictions_vector_1, mixing_variable=0.5, num_partitions=30):
"""Apply a simple linear hybrid recommender.
This function implements the simple linear hybrid recommender Zhou et. al:
"Solving the apparent diversity-accuracy dilemma of recommender systems"
http://arxiv.org/pdf/0808.2670.pdf Equation 5
It takes the weighted linear average of two previous recommendations and
uses the new average as its own predicted rating:
new_rating = mixing_variable * rating_a + (1 - mixing_variable) * rating_b
Args:
predictions_vector_0 (rdd): Contains prediction tuples of the form
(user_id, item_id, predicted_rating) generated from a recommender
algorithm.
predictions_vector_1 (rdd): Contains prediction tuples of the form
(user_id, item_id, predicted_rating) generated from a second
recommender algorithm.
mixing_variable (float): A float in the range [0., 1.] which determines
how to weight the two predictions. If `mixing_variable` is 0 then
`predictions_vector_0` is given all the weight (and
`predictions_vector_1` is ignored). If `mixing_variable` is 1 then
`predictions_vector_1` is given all the weight (and
`predictions_vector_0` is ignored). Defaults to 0.5.
num_partitions (int): The number of partitions to use for the returned
data. Defaults to 30.
Returns:
rdd: An rdd containing prediction tuples of the form
(user_id, item_id, rating)
Raises:
ValueError: If `mixing_variable` is not within the range [0, 1]
"""
# Check the mixing_variable is set to an acceptable value
if not 0 <= mixing_variable <= 1:
raise ValueError('mixing_variable must be within the range [0, 1]')
# Short-circuit in the trivial cases
if mixing_variable == 0:
return predictions_vector_0
elif mixing_variable == 1:
return predictions_vector_1
# Otherwise calculate the linear average
keyed_vector_0 = predictions_vector_0\
.map(lambda (u, i, r): ((u, i), r))
keyed_vector_1 = predictions_vector_1\
.map(lambda (u, i, r): ((u, i), r))
predictions = keyed_vector_0.join(keyed_vector_1)\
.map(lambda ((u, i), (r0, r1)): (u, i, (1. - mixing_variable) * r0 + mixing_variable * r1))\
.coalesce(num_partitions)
return predictions
|
|
9710af9d9e350d8331736d76f27af1fb46671aa2
|
salt/beacons/twilio_txt_msg.py
|
salt/beacons/twilio_txt_msg.py
|
# -*- coding: utf-8 -*-
'''
Beacon to emit Twilio text messages
'''
# Import Python libs
from __future__ import absolute_import
from datetime import datetime
import logging
# Import 3rd Party libs
try:
from twilio.rest import TwilioRestClient
HAS_TWILIO = True
except ImportError:
HAS_TWILIO = False
log = logging.getLogger(__name__)
__virtualname__ = 'twilio_txt_msg'
def __virtual__():
if HAS_TWILIO:
return __virtualname__
else:
return False
def beacon(config):
'''
Emit a dict name "texts" whose value is a list
of texts.
code_block:: yaml
beacons:
twilio_txt_msg:
account_sid: "<account sid>"
auth_token: "<auth token>"
twilio_number: "+15555555555"
poll_interval: 10
poll_interval defaults to 10 seconds
'''
log.trace('twilio_txt_msg beacon starting')
ret = []
if not all([config['account_sid'], config['auth_token'], config['twilio_number']]):
return ret
output = {}
poll_interval = config.get('poll_interval')
if not poll_interval:
# Let's default to polling every 10 secons
poll_interval = 10
now = datetime.now()
if 'twilio_txt_msg' in __context__:
timedelta = now - __context__['twilio_txt_msg']
if timedelta.seconds < poll_interval:
log.trace('Twilio beacon poll interval not met.')
log.trace('Twilio polling in {0}'.format(poll_interval - timedelta.seconds))
return ret
output['texts'] = []
client = TwilioRestClient(config['account_sid'], config['auth_token'])
messages = client.messages.list(to=config['twilio_number'])
log.trace('Num messages: {0}'.format(len(messages)))
if len(messages) < 1:
log.trace('Twilio beacon has no texts')
__context__['twilio_txt_msg'] = now
return ret
for message in messages:
item = {}
item['id'] = str(message.sid)
item['body'] = str(message.body)
item['from'] = str(message.from_)
item['sent'] = str(message.date_sent)
item['images'] = []
if int(message.num_media):
media = client.media(message.sid).list()
if len(media):
for pic in media:
item['images'].append(str(pic.uri))
output['texts'].append(item)
message.delete()
__context__['twilio_txt_msg'] = now
ret.append(output)
return ret
|
Add Twilio text message beacon
|
Add Twilio text message beacon
This beacon will poll a Twilio account for text messages
and emit an event on Salt's event bus as texts are received.
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Add Twilio text message beacon
This beacon will poll a Twilio account for text messages
and emit an event on Salt's event bus as texts are received.
|
# -*- coding: utf-8 -*-
'''
Beacon to emit Twilio text messages
'''
# Import Python libs
from __future__ import absolute_import
from datetime import datetime
import logging
# Import 3rd Party libs
try:
from twilio.rest import TwilioRestClient
HAS_TWILIO = True
except ImportError:
HAS_TWILIO = False
log = logging.getLogger(__name__)
__virtualname__ = 'twilio_txt_msg'
def __virtual__():
if HAS_TWILIO:
return __virtualname__
else:
return False
def beacon(config):
'''
Emit a dict name "texts" whose value is a list
of texts.
code_block:: yaml
beacons:
twilio_txt_msg:
account_sid: "<account sid>"
auth_token: "<auth token>"
twilio_number: "+15555555555"
poll_interval: 10
poll_interval defaults to 10 seconds
'''
log.trace('twilio_txt_msg beacon starting')
ret = []
if not all([config['account_sid'], config['auth_token'], config['twilio_number']]):
return ret
output = {}
poll_interval = config.get('poll_interval')
if not poll_interval:
# Let's default to polling every 10 secons
poll_interval = 10
now = datetime.now()
if 'twilio_txt_msg' in __context__:
timedelta = now - __context__['twilio_txt_msg']
if timedelta.seconds < poll_interval:
log.trace('Twilio beacon poll interval not met.')
log.trace('Twilio polling in {0}'.format(poll_interval - timedelta.seconds))
return ret
output['texts'] = []
client = TwilioRestClient(config['account_sid'], config['auth_token'])
messages = client.messages.list(to=config['twilio_number'])
log.trace('Num messages: {0}'.format(len(messages)))
if len(messages) < 1:
log.trace('Twilio beacon has no texts')
__context__['twilio_txt_msg'] = now
return ret
for message in messages:
item = {}
item['id'] = str(message.sid)
item['body'] = str(message.body)
item['from'] = str(message.from_)
item['sent'] = str(message.date_sent)
item['images'] = []
if int(message.num_media):
media = client.media(message.sid).list()
if len(media):
for pic in media:
item['images'].append(str(pic.uri))
output['texts'].append(item)
message.delete()
__context__['twilio_txt_msg'] = now
ret.append(output)
return ret
|
<commit_before><commit_msg>Add Twilio text message beacon
This beacon will poll a Twilio account for text messages
and emit an event on Salt's event bus as texts are received.<commit_after>
|
# -*- coding: utf-8 -*-
'''
Beacon to emit Twilio text messages
'''
# Import Python libs
from __future__ import absolute_import
from datetime import datetime
import logging
# Import 3rd Party libs
try:
from twilio.rest import TwilioRestClient
HAS_TWILIO = True
except ImportError:
HAS_TWILIO = False
log = logging.getLogger(__name__)
__virtualname__ = 'twilio_txt_msg'
def __virtual__():
if HAS_TWILIO:
return __virtualname__
else:
return False
def beacon(config):
'''
Emit a dict name "texts" whose value is a list
of texts.
code_block:: yaml
beacons:
twilio_txt_msg:
account_sid: "<account sid>"
auth_token: "<auth token>"
twilio_number: "+15555555555"
poll_interval: 10
poll_interval defaults to 10 seconds
'''
log.trace('twilio_txt_msg beacon starting')
ret = []
if not all([config['account_sid'], config['auth_token'], config['twilio_number']]):
return ret
output = {}
poll_interval = config.get('poll_interval')
if not poll_interval:
# Let's default to polling every 10 secons
poll_interval = 10
now = datetime.now()
if 'twilio_txt_msg' in __context__:
timedelta = now - __context__['twilio_txt_msg']
if timedelta.seconds < poll_interval:
log.trace('Twilio beacon poll interval not met.')
log.trace('Twilio polling in {0}'.format(poll_interval - timedelta.seconds))
return ret
output['texts'] = []
client = TwilioRestClient(config['account_sid'], config['auth_token'])
messages = client.messages.list(to=config['twilio_number'])
log.trace('Num messages: {0}'.format(len(messages)))
if len(messages) < 1:
log.trace('Twilio beacon has no texts')
__context__['twilio_txt_msg'] = now
return ret
for message in messages:
item = {}
item['id'] = str(message.sid)
item['body'] = str(message.body)
item['from'] = str(message.from_)
item['sent'] = str(message.date_sent)
item['images'] = []
if int(message.num_media):
media = client.media(message.sid).list()
if len(media):
for pic in media:
item['images'].append(str(pic.uri))
output['texts'].append(item)
message.delete()
__context__['twilio_txt_msg'] = now
ret.append(output)
return ret
|
Add Twilio text message beacon
This beacon will poll a Twilio account for text messages
and emit an event on Salt's event bus as texts are received.# -*- coding: utf-8 -*-
'''
Beacon to emit Twilio text messages
'''
# Import Python libs
from __future__ import absolute_import
from datetime import datetime
import logging
# Import 3rd Party libs
try:
from twilio.rest import TwilioRestClient
HAS_TWILIO = True
except ImportError:
HAS_TWILIO = False
log = logging.getLogger(__name__)
__virtualname__ = 'twilio_txt_msg'
def __virtual__():
if HAS_TWILIO:
return __virtualname__
else:
return False
def beacon(config):
'''
Emit a dict name "texts" whose value is a list
of texts.
code_block:: yaml
beacons:
twilio_txt_msg:
account_sid: "<account sid>"
auth_token: "<auth token>"
twilio_number: "+15555555555"
poll_interval: 10
poll_interval defaults to 10 seconds
'''
log.trace('twilio_txt_msg beacon starting')
ret = []
if not all([config['account_sid'], config['auth_token'], config['twilio_number']]):
return ret
output = {}
poll_interval = config.get('poll_interval')
if not poll_interval:
# Let's default to polling every 10 secons
poll_interval = 10
now = datetime.now()
if 'twilio_txt_msg' in __context__:
timedelta = now - __context__['twilio_txt_msg']
if timedelta.seconds < poll_interval:
log.trace('Twilio beacon poll interval not met.')
log.trace('Twilio polling in {0}'.format(poll_interval - timedelta.seconds))
return ret
output['texts'] = []
client = TwilioRestClient(config['account_sid'], config['auth_token'])
messages = client.messages.list(to=config['twilio_number'])
log.trace('Num messages: {0}'.format(len(messages)))
if len(messages) < 1:
log.trace('Twilio beacon has no texts')
__context__['twilio_txt_msg'] = now
return ret
for message in messages:
item = {}
item['id'] = str(message.sid)
item['body'] = str(message.body)
item['from'] = str(message.from_)
item['sent'] = str(message.date_sent)
item['images'] = []
if int(message.num_media):
media = client.media(message.sid).list()
if len(media):
for pic in media:
item['images'].append(str(pic.uri))
output['texts'].append(item)
message.delete()
__context__['twilio_txt_msg'] = now
ret.append(output)
return ret
|
<commit_before><commit_msg>Add Twilio text message beacon
This beacon will poll a Twilio account for text messages
and emit an event on Salt's event bus as texts are received.<commit_after># -*- coding: utf-8 -*-
'''
Beacon to emit Twilio text messages
'''
# Import Python libs
from __future__ import absolute_import
from datetime import datetime
import logging
# Import 3rd Party libs
try:
from twilio.rest import TwilioRestClient
HAS_TWILIO = True
except ImportError:
HAS_TWILIO = False
log = logging.getLogger(__name__)
__virtualname__ = 'twilio_txt_msg'
def __virtual__():
if HAS_TWILIO:
return __virtualname__
else:
return False
def beacon(config):
'''
Emit a dict name "texts" whose value is a list
of texts.
code_block:: yaml
beacons:
twilio_txt_msg:
account_sid: "<account sid>"
auth_token: "<auth token>"
twilio_number: "+15555555555"
poll_interval: 10
poll_interval defaults to 10 seconds
'''
log.trace('twilio_txt_msg beacon starting')
ret = []
if not all([config['account_sid'], config['auth_token'], config['twilio_number']]):
return ret
output = {}
poll_interval = config.get('poll_interval')
if not poll_interval:
# Let's default to polling every 10 secons
poll_interval = 10
now = datetime.now()
if 'twilio_txt_msg' in __context__:
timedelta = now - __context__['twilio_txt_msg']
if timedelta.seconds < poll_interval:
log.trace('Twilio beacon poll interval not met.')
log.trace('Twilio polling in {0}'.format(poll_interval - timedelta.seconds))
return ret
output['texts'] = []
client = TwilioRestClient(config['account_sid'], config['auth_token'])
messages = client.messages.list(to=config['twilio_number'])
log.trace('Num messages: {0}'.format(len(messages)))
if len(messages) < 1:
log.trace('Twilio beacon has no texts')
__context__['twilio_txt_msg'] = now
return ret
for message in messages:
item = {}
item['id'] = str(message.sid)
item['body'] = str(message.body)
item['from'] = str(message.from_)
item['sent'] = str(message.date_sent)
item['images'] = []
if int(message.num_media):
media = client.media(message.sid).list()
if len(media):
for pic in media:
item['images'].append(str(pic.uri))
output['texts'].append(item)
message.delete()
__context__['twilio_txt_msg'] = now
ret.append(output)
return ret
|
|
f81a2f9e3f1123ec474bce3278107a94c70e0dc3
|
python/helpers/pydev/_pydev_bundle/_pydev_filesystem_encoding.py
|
python/helpers/pydev/_pydev_bundle/_pydev_filesystem_encoding.py
|
def __getfilesystemencoding():
'''
Note: there's a copy of this method in interpreterInfo.py
'''
import sys
try:
ret = sys.getfilesystemencoding()
if not ret:
raise RuntimeError('Unable to get encoding.')
return ret
except:
try:
#Handle Jython
from java.lang import System # @UnresolvedImport
env = System.getProperty("os.name").lower()
if env.find('win') != -1:
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
return 'utf-8'
except:
pass
#Only available from 2.3 onwards.
if sys.platform == 'win32':
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
''.decode(ret)
return ret
except:
return 'utf-8'
|
import sys
def __getfilesystemencoding():
'''
Note: there's a copy of this method in interpreterInfo.py
'''
try:
ret = sys.getfilesystemencoding()
if not ret:
raise RuntimeError('Unable to get encoding.')
return ret
except:
try:
#Handle Jython
from java.lang import System # @UnresolvedImport
env = System.getProperty("os.name").lower()
if env.find('win') != -1:
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
return 'utf-8'
except:
pass
#Only available from 2.3 onwards.
if sys.platform == 'win32':
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
''.decode(ret)
return ret
except:
return 'utf-8'
|
Fix deadlock in remote debugger (PY-18546)
|
Fix deadlock in remote debugger (PY-18546)
|
Python
|
apache-2.0
|
salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,vvv1559/intellij-community,allotria/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,hurricup/intellij-community,xfournet/intellij-community,fitermay/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,da1z/intellij-community,hurricup/intellij-community,xfournet/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,FHannes/intellij-community,retomerz/intellij-community,apixandru/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,allotria/intellij-community,xfournet/intellij-community,signed/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,vvv1559/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,signed/intellij-community,xfournet/intellij-community,semonte/intellij-community,apixandru/intellij-community,fitermay/intellij-community,retomerz/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,signed/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,da1z/intellij-community,allotria/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,FHannes/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,da1z/intellij-community,signed/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,hurricup/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,da1z/intellij-community,apixandru/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,FHannes/intellij-community,xfournet/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,hurricup/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,signed/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,xfournet/intellij-community,retomerz/intellij-community,ibinti/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,signed/intellij-community,FHannes/intellij-community,asedunov/intellij-community,fitermay/intellij-community,da1z/intellij-community,signed/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,retomerz/intellij-community,xfournet/intellij-community,allotria/intellij-community,hurricup/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,da1z/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,da1z/intellij-community,allotria/intellij-community,fitermay/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,ibinti/intellij-community,fitermay/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,asedunov/intellij-community,ibinti/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,allotria/intellij-community,ibinti/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,asedunov/intellij-community,xfournet/intellij-community,da1z/intellij-community,hurricup/intellij-community,asedunov/intellij-community,apixandru/intellij-community,semonte/intellij-community
|
def __getfilesystemencoding():
'''
Note: there's a copy of this method in interpreterInfo.py
'''
import sys
try:
ret = sys.getfilesystemencoding()
if not ret:
raise RuntimeError('Unable to get encoding.')
return ret
except:
try:
#Handle Jython
from java.lang import System # @UnresolvedImport
env = System.getProperty("os.name").lower()
if env.find('win') != -1:
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
return 'utf-8'
except:
pass
#Only available from 2.3 onwards.
if sys.platform == 'win32':
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
''.decode(ret)
return ret
except:
return 'utf-8'
Fix deadlock in remote debugger (PY-18546)
|
import sys
def __getfilesystemencoding():
'''
Note: there's a copy of this method in interpreterInfo.py
'''
try:
ret = sys.getfilesystemencoding()
if not ret:
raise RuntimeError('Unable to get encoding.')
return ret
except:
try:
#Handle Jython
from java.lang import System # @UnresolvedImport
env = System.getProperty("os.name").lower()
if env.find('win') != -1:
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
return 'utf-8'
except:
pass
#Only available from 2.3 onwards.
if sys.platform == 'win32':
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
''.decode(ret)
return ret
except:
return 'utf-8'
|
<commit_before>def __getfilesystemencoding():
'''
Note: there's a copy of this method in interpreterInfo.py
'''
import sys
try:
ret = sys.getfilesystemencoding()
if not ret:
raise RuntimeError('Unable to get encoding.')
return ret
except:
try:
#Handle Jython
from java.lang import System # @UnresolvedImport
env = System.getProperty("os.name").lower()
if env.find('win') != -1:
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
return 'utf-8'
except:
pass
#Only available from 2.3 onwards.
if sys.platform == 'win32':
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
''.decode(ret)
return ret
except:
return 'utf-8'
<commit_msg>Fix deadlock in remote debugger (PY-18546)<commit_after>
|
import sys
def __getfilesystemencoding():
'''
Note: there's a copy of this method in interpreterInfo.py
'''
try:
ret = sys.getfilesystemencoding()
if not ret:
raise RuntimeError('Unable to get encoding.')
return ret
except:
try:
#Handle Jython
from java.lang import System # @UnresolvedImport
env = System.getProperty("os.name").lower()
if env.find('win') != -1:
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
return 'utf-8'
except:
pass
#Only available from 2.3 onwards.
if sys.platform == 'win32':
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
''.decode(ret)
return ret
except:
return 'utf-8'
|
def __getfilesystemencoding():
'''
Note: there's a copy of this method in interpreterInfo.py
'''
import sys
try:
ret = sys.getfilesystemencoding()
if not ret:
raise RuntimeError('Unable to get encoding.')
return ret
except:
try:
#Handle Jython
from java.lang import System # @UnresolvedImport
env = System.getProperty("os.name").lower()
if env.find('win') != -1:
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
return 'utf-8'
except:
pass
#Only available from 2.3 onwards.
if sys.platform == 'win32':
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
''.decode(ret)
return ret
except:
return 'utf-8'
Fix deadlock in remote debugger (PY-18546)import sys
def __getfilesystemencoding():
'''
Note: there's a copy of this method in interpreterInfo.py
'''
try:
ret = sys.getfilesystemencoding()
if not ret:
raise RuntimeError('Unable to get encoding.')
return ret
except:
try:
#Handle Jython
from java.lang import System # @UnresolvedImport
env = System.getProperty("os.name").lower()
if env.find('win') != -1:
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
return 'utf-8'
except:
pass
#Only available from 2.3 onwards.
if sys.platform == 'win32':
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
''.decode(ret)
return ret
except:
return 'utf-8'
|
<commit_before>def __getfilesystemencoding():
'''
Note: there's a copy of this method in interpreterInfo.py
'''
import sys
try:
ret = sys.getfilesystemencoding()
if not ret:
raise RuntimeError('Unable to get encoding.')
return ret
except:
try:
#Handle Jython
from java.lang import System # @UnresolvedImport
env = System.getProperty("os.name").lower()
if env.find('win') != -1:
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
return 'utf-8'
except:
pass
#Only available from 2.3 onwards.
if sys.platform == 'win32':
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
''.decode(ret)
return ret
except:
return 'utf-8'
<commit_msg>Fix deadlock in remote debugger (PY-18546)<commit_after>import sys
def __getfilesystemencoding():
'''
Note: there's a copy of this method in interpreterInfo.py
'''
try:
ret = sys.getfilesystemencoding()
if not ret:
raise RuntimeError('Unable to get encoding.')
return ret
except:
try:
#Handle Jython
from java.lang import System # @UnresolvedImport
env = System.getProperty("os.name").lower()
if env.find('win') != -1:
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
return 'utf-8'
except:
pass
#Only available from 2.3 onwards.
if sys.platform == 'win32':
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
''.decode(ret)
return ret
except:
return 'utf-8'
|
195edf0e5578e0d30677b4da7375d8f04e9a91a1
|
alembic/versions/18ebf3181f87_add_a_negated_column_for_rules.py
|
alembic/versions/18ebf3181f87_add_a_negated_column_for_rules.py
|
"""Add a negated column for rules.
Revision ID: 18ebf3181f87
Revises: 4a95022fd7f3
Create Date: 2014-08-22 15:48:08.952913
"""
# revision identifiers, used by Alembic.
revision = '18ebf3181f87'
down_revision = '4a95022fd7f3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('rules', sa.Column('negated', sa.Boolean(), default=False))
def downgrade():
op.drop_column('rules', 'negated')
|
Add alembic revision for that.
|
Add alembic revision for that.
|
Python
|
lgpl-2.1
|
jeremycline/fmn,jeremycline/fmn,jeremycline/fmn
|
Add alembic revision for that.
|
"""Add a negated column for rules.
Revision ID: 18ebf3181f87
Revises: 4a95022fd7f3
Create Date: 2014-08-22 15:48:08.952913
"""
# revision identifiers, used by Alembic.
revision = '18ebf3181f87'
down_revision = '4a95022fd7f3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('rules', sa.Column('negated', sa.Boolean(), default=False))
def downgrade():
op.drop_column('rules', 'negated')
|
<commit_before><commit_msg>Add alembic revision for that.<commit_after>
|
"""Add a negated column for rules.
Revision ID: 18ebf3181f87
Revises: 4a95022fd7f3
Create Date: 2014-08-22 15:48:08.952913
"""
# revision identifiers, used by Alembic.
revision = '18ebf3181f87'
down_revision = '4a95022fd7f3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('rules', sa.Column('negated', sa.Boolean(), default=False))
def downgrade():
op.drop_column('rules', 'negated')
|
Add alembic revision for that."""Add a negated column for rules.
Revision ID: 18ebf3181f87
Revises: 4a95022fd7f3
Create Date: 2014-08-22 15:48:08.952913
"""
# revision identifiers, used by Alembic.
revision = '18ebf3181f87'
down_revision = '4a95022fd7f3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('rules', sa.Column('negated', sa.Boolean(), default=False))
def downgrade():
op.drop_column('rules', 'negated')
|
<commit_before><commit_msg>Add alembic revision for that.<commit_after>"""Add a negated column for rules.
Revision ID: 18ebf3181f87
Revises: 4a95022fd7f3
Create Date: 2014-08-22 15:48:08.952913
"""
# revision identifiers, used by Alembic.
revision = '18ebf3181f87'
down_revision = '4a95022fd7f3'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('rules', sa.Column('negated', sa.Boolean(), default=False))
def downgrade():
op.drop_column('rules', 'negated')
|
|
698273ac6863eeaa3963c84fac6a49a918cc261b
|
freelancefinder/freelancefinder/tests/test_xforwardedfor_middleware.py
|
freelancefinder/freelancefinder/tests/test_xforwardedfor_middleware.py
|
"""Test the X-Forwarded-For middleware."""
from ..middleware.xforwardedfor import xforwardedfor
def get_response_method(thing):
"""Do nothing."""
return thing
def test_setting_correctly(rf):
"""Override REMOTE_ADDR if HTTP_X_FORWARDED_FOR is present."""
request = rf.get('/')
request.META['REMOTE_ADDR'] = '192.168.1.1'
request.META['HTTP_X_FORWARDED_FOR'] = '192.168.1.2'
xforwardedfor_middleware = xforwardedfor(get_response_method)
response = xforwardedfor_middleware(request)
assert response is not None
assert request.META['REMOTE_ADDR'] == '192.168.1.2'
def test_nothing_on_missing_value(rf):
"""Don't override REMOTE_ADDR if HTTP_X_FORWARDED_FOR is not present."""
request = rf.get('/')
request.META['REMOTE_ADDR'] = '192.168.1.1'
xforwardedfor_middleware = xforwardedfor(get_response_method)
response = xforwardedfor_middleware(request)
assert response is not None
assert request.META['REMOTE_ADDR'] == '192.168.1.1'
|
Add tests for xforwardedfor middleware
|
Add tests for xforwardedfor middleware
|
Python
|
bsd-3-clause
|
ScorpionResponse/freelancefinder,ScorpionResponse/freelancefinder,ScorpionResponse/freelancefinder
|
Add tests for xforwardedfor middleware
|
"""Test the X-Forwarded-For middleware."""
from ..middleware.xforwardedfor import xforwardedfor
def get_response_method(thing):
"""Do nothing."""
return thing
def test_setting_correctly(rf):
"""Override REMOTE_ADDR if HTTP_X_FORWARDED_FOR is present."""
request = rf.get('/')
request.META['REMOTE_ADDR'] = '192.168.1.1'
request.META['HTTP_X_FORWARDED_FOR'] = '192.168.1.2'
xforwardedfor_middleware = xforwardedfor(get_response_method)
response = xforwardedfor_middleware(request)
assert response is not None
assert request.META['REMOTE_ADDR'] == '192.168.1.2'
def test_nothing_on_missing_value(rf):
"""Don't override REMOTE_ADDR if HTTP_X_FORWARDED_FOR is not present."""
request = rf.get('/')
request.META['REMOTE_ADDR'] = '192.168.1.1'
xforwardedfor_middleware = xforwardedfor(get_response_method)
response = xforwardedfor_middleware(request)
assert response is not None
assert request.META['REMOTE_ADDR'] == '192.168.1.1'
|
<commit_before><commit_msg>Add tests for xforwardedfor middleware<commit_after>
|
"""Test the X-Forwarded-For middleware."""
from ..middleware.xforwardedfor import xforwardedfor
def get_response_method(thing):
"""Do nothing."""
return thing
def test_setting_correctly(rf):
"""Override REMOTE_ADDR if HTTP_X_FORWARDED_FOR is present."""
request = rf.get('/')
request.META['REMOTE_ADDR'] = '192.168.1.1'
request.META['HTTP_X_FORWARDED_FOR'] = '192.168.1.2'
xforwardedfor_middleware = xforwardedfor(get_response_method)
response = xforwardedfor_middleware(request)
assert response is not None
assert request.META['REMOTE_ADDR'] == '192.168.1.2'
def test_nothing_on_missing_value(rf):
"""Don't override REMOTE_ADDR if HTTP_X_FORWARDED_FOR is not present."""
request = rf.get('/')
request.META['REMOTE_ADDR'] = '192.168.1.1'
xforwardedfor_middleware = xforwardedfor(get_response_method)
response = xforwardedfor_middleware(request)
assert response is not None
assert request.META['REMOTE_ADDR'] == '192.168.1.1'
|
Add tests for xforwardedfor middleware"""Test the X-Forwarded-For middleware."""
from ..middleware.xforwardedfor import xforwardedfor
def get_response_method(thing):
"""Do nothing."""
return thing
def test_setting_correctly(rf):
"""Override REMOTE_ADDR if HTTP_X_FORWARDED_FOR is present."""
request = rf.get('/')
request.META['REMOTE_ADDR'] = '192.168.1.1'
request.META['HTTP_X_FORWARDED_FOR'] = '192.168.1.2'
xforwardedfor_middleware = xforwardedfor(get_response_method)
response = xforwardedfor_middleware(request)
assert response is not None
assert request.META['REMOTE_ADDR'] == '192.168.1.2'
def test_nothing_on_missing_value(rf):
"""Don't override REMOTE_ADDR if HTTP_X_FORWARDED_FOR is not present."""
request = rf.get('/')
request.META['REMOTE_ADDR'] = '192.168.1.1'
xforwardedfor_middleware = xforwardedfor(get_response_method)
response = xforwardedfor_middleware(request)
assert response is not None
assert request.META['REMOTE_ADDR'] == '192.168.1.1'
|
<commit_before><commit_msg>Add tests for xforwardedfor middleware<commit_after>"""Test the X-Forwarded-For middleware."""
from ..middleware.xforwardedfor import xforwardedfor
def get_response_method(thing):
"""Do nothing."""
return thing
def test_setting_correctly(rf):
"""Override REMOTE_ADDR if HTTP_X_FORWARDED_FOR is present."""
request = rf.get('/')
request.META['REMOTE_ADDR'] = '192.168.1.1'
request.META['HTTP_X_FORWARDED_FOR'] = '192.168.1.2'
xforwardedfor_middleware = xforwardedfor(get_response_method)
response = xforwardedfor_middleware(request)
assert response is not None
assert request.META['REMOTE_ADDR'] == '192.168.1.2'
def test_nothing_on_missing_value(rf):
"""Don't override REMOTE_ADDR if HTTP_X_FORWARDED_FOR is not present."""
request = rf.get('/')
request.META['REMOTE_ADDR'] = '192.168.1.1'
xforwardedfor_middleware = xforwardedfor(get_response_method)
response = xforwardedfor_middleware(request)
assert response is not None
assert request.META['REMOTE_ADDR'] == '192.168.1.1'
|
|
bff3f017a889da1922355e05b598895f59d841de
|
horizontalIlluminance.py
|
horizontalIlluminance.py
|
#!/usr/bin/env python
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
# This code plot the horizontal illuminance inside a room using a SISO array.
# Semi-angle at half illuminance (degree)
tethaHalf = 70
# Lambertian emission order (adimensional)
m = -np.log(2)/np.log10(np.cos(np.deg2rad(tethaHalf)))
# Center luminous intensity (cd)
I0 = 0.73
# Room's dimensions
dimZ = 3
dimX = 5
dimY = dimX
# LED's position
xt = dimX*0.5
yt = dimY*0.5
# Grid number in the receiver plane
ngx = dimX*10
ngy = dimY*10
# Generate the grid vectors
x = np.linspace(0,dimX,ngx)
y = np.linspace(0,dimY,ngy)
# Distance between the transmitter and receiver plane (m)
ht = 3
hr = 0.85
htr = ht - hr
# Numbers of LEDs per array
nLed = 60
# Generate the receiver plane based on grid vectors
[xr, yr] = np.meshgrid(x,y)
# Create a zero matrix to store values of horizontal iluminance
E = np.zeros((ngx,ngy))
# Distance vector from source to receiver plane
d = np.sqrt(np.square(xr-xt) + np.square(yr-yt) + np.square(htr))
# Cos(tetha)
cosTetha = htr/d
# Get individual horizontal illuminace per LED
E = (I0*(cosTetha)**(m+1) )/np.square(d)
# Get the horizontal illuminance per LED array
E = E*nLed*nLed
fig = plt.figure()
figE = fig.add_subplot(111, projection='3d')
figE.plot_surface(x,y,E)
figE.set_xlabel('X (m)')
figE.set_ylabel('Y (m)')
figE.set_zlabel('Horizontal Illuminance')
plt.show()
|
Create code to plot Horizontal Illuminance.
|
Create code to plot Horizontal Illuminance.
|
Python
|
mit
|
sophiekovalevsky/Visible-Light-Communication
|
Create code to plot Horizontal Illuminance.
|
#!/usr/bin/env python
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
# This code plot the horizontal illuminance inside a room using a SISO array.
# Semi-angle at half illuminance (degree)
tethaHalf = 70
# Lambertian emission order (adimensional)
m = -np.log(2)/np.log10(np.cos(np.deg2rad(tethaHalf)))
# Center luminous intensity (cd)
I0 = 0.73
# Room's dimensions
dimZ = 3
dimX = 5
dimY = dimX
# LED's position
xt = dimX*0.5
yt = dimY*0.5
# Grid number in the receiver plane
ngx = dimX*10
ngy = dimY*10
# Generate the grid vectors
x = np.linspace(0,dimX,ngx)
y = np.linspace(0,dimY,ngy)
# Distance between the transmitter and receiver plane (m)
ht = 3
hr = 0.85
htr = ht - hr
# Numbers of LEDs per array
nLed = 60
# Generate the receiver plane based on grid vectors
[xr, yr] = np.meshgrid(x,y)
# Create a zero matrix to store values of horizontal iluminance
E = np.zeros((ngx,ngy))
# Distance vector from source to receiver plane
d = np.sqrt(np.square(xr-xt) + np.square(yr-yt) + np.square(htr))
# Cos(tetha)
cosTetha = htr/d
# Get individual horizontal illuminace per LED
E = (I0*(cosTetha)**(m+1) )/np.square(d)
# Get the horizontal illuminance per LED array
E = E*nLed*nLed
fig = plt.figure()
figE = fig.add_subplot(111, projection='3d')
figE.plot_surface(x,y,E)
figE.set_xlabel('X (m)')
figE.set_ylabel('Y (m)')
figE.set_zlabel('Horizontal Illuminance')
plt.show()
|
<commit_before><commit_msg>Create code to plot Horizontal Illuminance.<commit_after>
|
#!/usr/bin/env python
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
# This code plot the horizontal illuminance inside a room using a SISO array.
# Semi-angle at half illuminance (degree)
tethaHalf = 70
# Lambertian emission order (adimensional)
m = -np.log(2)/np.log10(np.cos(np.deg2rad(tethaHalf)))
# Center luminous intensity (cd)
I0 = 0.73
# Room's dimensions
dimZ = 3
dimX = 5
dimY = dimX
# LED's position
xt = dimX*0.5
yt = dimY*0.5
# Grid number in the receiver plane
ngx = dimX*10
ngy = dimY*10
# Generate the grid vectors
x = np.linspace(0,dimX,ngx)
y = np.linspace(0,dimY,ngy)
# Distance between the transmitter and receiver plane (m)
ht = 3
hr = 0.85
htr = ht - hr
# Numbers of LEDs per array
nLed = 60
# Generate the receiver plane based on grid vectors
[xr, yr] = np.meshgrid(x,y)
# Create a zero matrix to store values of horizontal iluminance
E = np.zeros((ngx,ngy))
# Distance vector from source to receiver plane
d = np.sqrt(np.square(xr-xt) + np.square(yr-yt) + np.square(htr))
# Cos(tetha)
cosTetha = htr/d
# Get individual horizontal illuminace per LED
E = (I0*(cosTetha)**(m+1) )/np.square(d)
# Get the horizontal illuminance per LED array
E = E*nLed*nLed
fig = plt.figure()
figE = fig.add_subplot(111, projection='3d')
figE.plot_surface(x,y,E)
figE.set_xlabel('X (m)')
figE.set_ylabel('Y (m)')
figE.set_zlabel('Horizontal Illuminance')
plt.show()
|
Create code to plot Horizontal Illuminance.#!/usr/bin/env python
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
# This code plot the horizontal illuminance inside a room using a SISO array.
# Semi-angle at half illuminance (degree)
tethaHalf = 70
# Lambertian emission order (adimensional)
m = -np.log(2)/np.log10(np.cos(np.deg2rad(tethaHalf)))
# Center luminous intensity (cd)
I0 = 0.73
# Room's dimensions
dimZ = 3
dimX = 5
dimY = dimX
# LED's position
xt = dimX*0.5
yt = dimY*0.5
# Grid number in the receiver plane
ngx = dimX*10
ngy = dimY*10
# Generate the grid vectors
x = np.linspace(0,dimX,ngx)
y = np.linspace(0,dimY,ngy)
# Distance between the transmitter and receiver plane (m)
ht = 3
hr = 0.85
htr = ht - hr
# Numbers of LEDs per array
nLed = 60
# Generate the receiver plane based on grid vectors
[xr, yr] = np.meshgrid(x,y)
# Create a zero matrix to store values of horizontal iluminance
E = np.zeros((ngx,ngy))
# Distance vector from source to receiver plane
d = np.sqrt(np.square(xr-xt) + np.square(yr-yt) + np.square(htr))
# Cos(tetha)
cosTetha = htr/d
# Get individual horizontal illuminace per LED
E = (I0*(cosTetha)**(m+1) )/np.square(d)
# Get the horizontal illuminance per LED array
E = E*nLed*nLed
fig = plt.figure()
figE = fig.add_subplot(111, projection='3d')
figE.plot_surface(x,y,E)
figE.set_xlabel('X (m)')
figE.set_ylabel('Y (m)')
figE.set_zlabel('Horizontal Illuminance')
plt.show()
|
<commit_before><commit_msg>Create code to plot Horizontal Illuminance.<commit_after>#!/usr/bin/env python
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
# This code plot the horizontal illuminance inside a room using a SISO array.
# Semi-angle at half illuminance (degree)
tethaHalf = 70
# Lambertian emission order (adimensional)
m = -np.log(2)/np.log10(np.cos(np.deg2rad(tethaHalf)))
# Center luminous intensity (cd)
I0 = 0.73
# Room's dimensions
dimZ = 3
dimX = 5
dimY = dimX
# LED's position
xt = dimX*0.5
yt = dimY*0.5
# Grid number in the receiver plane
ngx = dimX*10
ngy = dimY*10
# Generate the grid vectors
x = np.linspace(0,dimX,ngx)
y = np.linspace(0,dimY,ngy)
# Distance between the transmitter and receiver plane (m)
ht = 3
hr = 0.85
htr = ht - hr
# Numbers of LEDs per array
nLed = 60
# Generate the receiver plane based on grid vectors
[xr, yr] = np.meshgrid(x,y)
# Create a zero matrix to store values of horizontal iluminance
E = np.zeros((ngx,ngy))
# Distance vector from source to receiver plane
d = np.sqrt(np.square(xr-xt) + np.square(yr-yt) + np.square(htr))
# Cos(tetha)
cosTetha = htr/d
# Get individual horizontal illuminace per LED
E = (I0*(cosTetha)**(m+1) )/np.square(d)
# Get the horizontal illuminance per LED array
E = E*nLed*nLed
fig = plt.figure()
figE = fig.add_subplot(111, projection='3d')
figE.plot_surface(x,y,E)
figE.set_xlabel('X (m)')
figE.set_ylabel('Y (m)')
figE.set_zlabel('Horizontal Illuminance')
plt.show()
|
|
014010bec210bc4ebb7d24dbdd21a0cdf75bee2f
|
migrations/versions/990_add_missing_nice_to_have_requirements_field.py
|
migrations/versions/990_add_missing_nice_to_have_requirements_field.py
|
"""Adds missing 'niceToHaveRequirements' to old published Brief data blobs.
Revision ID: 990
Revises: 980
Create Date: 2017-09-05 17:08:57.947569
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '990'
down_revision = '980'
briefs_table = sa.Table(
'briefs',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('data', sa.JSON, nullable=True),
sa.Column('published_at', sa.DateTime, nullable=True)
)
def upgrade():
conn = op.get_bind()
# SELECT id, data FROM briefs WHERE briefs.published_at IS NOT null
query = briefs_table.select(
briefs_table.c.published_at != sa.null()
).with_only_columns(
(
briefs_table.c.id,
briefs_table.c.data
)
)
results = conn.execute(query).fetchall()
for brief_id, brief_data in results:
if 'niceToHaveRequirements' not in brief_data:
brief_data['niceToHaveRequirements'] = []
# UPDATE briefs SET data = brief_data WHERE id = brief_id;
query = briefs_table.update().where(briefs_table.c.id == brief_id).values(data=brief_data)
conn.execute(query)
def downgrade():
pass
|
Fix old Briefs missing niceToHaveRequirements field
|
Fix old Briefs missing niceToHaveRequirements field
This was causing errors when suppliers applied to these Briefs
and any copies of them.
|
Python
|
mit
|
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
|
Fix old Briefs missing niceToHaveRequirements field
This was causing errors when suppliers applied to these Briefs
and any copies of them.
|
"""Adds missing 'niceToHaveRequirements' to old published Brief data blobs.
Revision ID: 990
Revises: 980
Create Date: 2017-09-05 17:08:57.947569
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '990'
down_revision = '980'
briefs_table = sa.Table(
'briefs',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('data', sa.JSON, nullable=True),
sa.Column('published_at', sa.DateTime, nullable=True)
)
def upgrade():
conn = op.get_bind()
# SELECT id, data FROM briefs WHERE briefs.published_at IS NOT null
query = briefs_table.select(
briefs_table.c.published_at != sa.null()
).with_only_columns(
(
briefs_table.c.id,
briefs_table.c.data
)
)
results = conn.execute(query).fetchall()
for brief_id, brief_data in results:
if 'niceToHaveRequirements' not in brief_data:
brief_data['niceToHaveRequirements'] = []
# UPDATE briefs SET data = brief_data WHERE id = brief_id;
query = briefs_table.update().where(briefs_table.c.id == brief_id).values(data=brief_data)
conn.execute(query)
def downgrade():
pass
|
<commit_before><commit_msg>Fix old Briefs missing niceToHaveRequirements field
This was causing errors when suppliers applied to these Briefs
and any copies of them.<commit_after>
|
"""Adds missing 'niceToHaveRequirements' to old published Brief data blobs.
Revision ID: 990
Revises: 980
Create Date: 2017-09-05 17:08:57.947569
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '990'
down_revision = '980'
briefs_table = sa.Table(
'briefs',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('data', sa.JSON, nullable=True),
sa.Column('published_at', sa.DateTime, nullable=True)
)
def upgrade():
conn = op.get_bind()
# SELECT id, data FROM briefs WHERE briefs.published_at IS NOT null
query = briefs_table.select(
briefs_table.c.published_at != sa.null()
).with_only_columns(
(
briefs_table.c.id,
briefs_table.c.data
)
)
results = conn.execute(query).fetchall()
for brief_id, brief_data in results:
if 'niceToHaveRequirements' not in brief_data:
brief_data['niceToHaveRequirements'] = []
# UPDATE briefs SET data = brief_data WHERE id = brief_id;
query = briefs_table.update().where(briefs_table.c.id == brief_id).values(data=brief_data)
conn.execute(query)
def downgrade():
pass
|
Fix old Briefs missing niceToHaveRequirements field
This was causing errors when suppliers applied to these Briefs
and any copies of them."""Adds missing 'niceToHaveRequirements' to old published Brief data blobs.
Revision ID: 990
Revises: 980
Create Date: 2017-09-05 17:08:57.947569
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '990'
down_revision = '980'
briefs_table = sa.Table(
'briefs',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('data', sa.JSON, nullable=True),
sa.Column('published_at', sa.DateTime, nullable=True)
)
def upgrade():
conn = op.get_bind()
# SELECT id, data FROM briefs WHERE briefs.published_at IS NOT null
query = briefs_table.select(
briefs_table.c.published_at != sa.null()
).with_only_columns(
(
briefs_table.c.id,
briefs_table.c.data
)
)
results = conn.execute(query).fetchall()
for brief_id, brief_data in results:
if 'niceToHaveRequirements' not in brief_data:
brief_data['niceToHaveRequirements'] = []
# UPDATE briefs SET data = brief_data WHERE id = brief_id;
query = briefs_table.update().where(briefs_table.c.id == brief_id).values(data=brief_data)
conn.execute(query)
def downgrade():
pass
|
<commit_before><commit_msg>Fix old Briefs missing niceToHaveRequirements field
This was causing errors when suppliers applied to these Briefs
and any copies of them.<commit_after>"""Adds missing 'niceToHaveRequirements' to old published Brief data blobs.
Revision ID: 990
Revises: 980
Create Date: 2017-09-05 17:08:57.947569
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '990'
down_revision = '980'
briefs_table = sa.Table(
'briefs',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('data', sa.JSON, nullable=True),
sa.Column('published_at', sa.DateTime, nullable=True)
)
def upgrade():
conn = op.get_bind()
# SELECT id, data FROM briefs WHERE briefs.published_at IS NOT null
query = briefs_table.select(
briefs_table.c.published_at != sa.null()
).with_only_columns(
(
briefs_table.c.id,
briefs_table.c.data
)
)
results = conn.execute(query).fetchall()
for brief_id, brief_data in results:
if 'niceToHaveRequirements' not in brief_data:
brief_data['niceToHaveRequirements'] = []
# UPDATE briefs SET data = brief_data WHERE id = brief_id;
query = briefs_table.update().where(briefs_table.c.id == brief_id).values(data=brief_data)
conn.execute(query)
def downgrade():
pass
|
|
b7da0da12d74d111cff72e74ae487864c53fa808
|
django_backend_test/django_backend_test/celery.py
|
django_backend_test/django_backend_test/celery.py
|
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_backend_test.local_settings')
from django.conf import settings
app = Celery('django_backend_test')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
|
Add Celery file to async tasks
|
Add Celery file to async tasks
|
Python
|
mit
|
semorale/backend-test,semorale/backend-test,semorale/backend-test
|
Add Celery file to async tasks
|
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_backend_test.local_settings')
from django.conf import settings
app = Celery('django_backend_test')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
|
<commit_before><commit_msg>Add Celery file to async tasks<commit_after>
|
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_backend_test.local_settings')
from django.conf import settings
app = Celery('django_backend_test')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
|
Add Celery file to async tasksfrom __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_backend_test.local_settings')
from django.conf import settings
app = Celery('django_backend_test')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
|
<commit_before><commit_msg>Add Celery file to async tasks<commit_after>from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_backend_test.local_settings')
from django.conf import settings
app = Celery('django_backend_test')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
|
|
dd1d51ba29b2f3729431c1552fd955697d04a0f7
|
museum_site/migrations/0004_alter_file_company.py
|
museum_site/migrations/0004_alter_file_company.py
|
# Generated by Django 3.2.7 on 2021-10-28 19:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0003_auto_20211028_1858'),
]
operations = [
migrations.AlterField(
model_name='file',
name='company',
field=models.CharField(blank=True, default='', max_length=255),
),
]
|
Increase length of company field
|
Increase length of company field
|
Python
|
mit
|
DrDos0016/z2,DrDos0016/z2,DrDos0016/z2
|
Increase length of company field
|
# Generated by Django 3.2.7 on 2021-10-28 19:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0003_auto_20211028_1858'),
]
operations = [
migrations.AlterField(
model_name='file',
name='company',
field=models.CharField(blank=True, default='', max_length=255),
),
]
|
<commit_before><commit_msg>Increase length of company field<commit_after>
|
# Generated by Django 3.2.7 on 2021-10-28 19:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0003_auto_20211028_1858'),
]
operations = [
migrations.AlterField(
model_name='file',
name='company',
field=models.CharField(blank=True, default='', max_length=255),
),
]
|
Increase length of company field# Generated by Django 3.2.7 on 2021-10-28 19:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0003_auto_20211028_1858'),
]
operations = [
migrations.AlterField(
model_name='file',
name='company',
field=models.CharField(blank=True, default='', max_length=255),
),
]
|
<commit_before><commit_msg>Increase length of company field<commit_after># Generated by Django 3.2.7 on 2021-10-28 19:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0003_auto_20211028_1858'),
]
operations = [
migrations.AlterField(
model_name='file',
name='company',
field=models.CharField(blank=True, default='', max_length=255),
),
]
|
|
81e3cc0800793b9e540066831b72ab6df3a1d358
|
scripts/add_user.py
|
scripts/add_user.py
|
# !/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Script to add a new user."""
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from apps import db
from apps.models.models import *
def main():
# Add user
name = input("Name: ")
passwd = input("Password: ")
user = User(name=name, passwd=passwd)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
main()
|
Add a script to add manually a new user
|
[UPD] Add a script to add manually a new user
|
Python
|
mit
|
frapac/bibtex-browser,frapac/bibtex-browser,frapac/bibtex-browser
|
[UPD] Add a script to add manually a new user
|
# !/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Script to add a new user."""
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from apps import db
from apps.models.models import *
def main():
# Add user
name = input("Name: ")
passwd = input("Password: ")
user = User(name=name, passwd=passwd)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>[UPD] Add a script to add manually a new user<commit_after>
|
# !/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Script to add a new user."""
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from apps import db
from apps.models.models import *
def main():
# Add user
name = input("Name: ")
passwd = input("Password: ")
user = User(name=name, passwd=passwd)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
main()
|
[UPD] Add a script to add manually a new user# !/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Script to add a new user."""
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from apps import db
from apps.models.models import *
def main():
# Add user
name = input("Name: ")
passwd = input("Password: ")
user = User(name=name, passwd=passwd)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>[UPD] Add a script to add manually a new user<commit_after># !/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Script to add a new user."""
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from apps import db
from apps.models.models import *
def main():
# Add user
name = input("Name: ")
passwd = input("Password: ")
user = User(name=name, passwd=passwd)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
main()
|
|
0e562045ad47f55f799054dd29c51a465ac926a3
|
python/array/RemoveDuplicatesFromSortedArrayII.py
|
python/array/RemoveDuplicatesFromSortedArrayII.py
|
#Too many mistakes were made.
#0. should use while loop instead of for.
#1. messed up with index. maybe with the sleepy head at 3pm
class Solution:
# @param a list of integers
# @return an integer
def removeDuplicates(self, A):
result = len(A)
if result == 0:
return result
curr = 0
next_index = 1
start_index = 1
if next_index < len(A) and A[next_index] == A[curr]:
next_index += 1
start_index += 1
i = start_index
while i < len(A):
if A[i] > A[curr]:
temp = A[next_index]
A[next_index] = A[i]
A[i] = temp
curr = next_index
next_index += 1
if (i + 1) < len(A) and A[i+1] == A[curr]:
A[next_index] = A[i+1]
next_index += 1
i += 1
else:
result -= 1
i += 1
print A
return result
if __name__ == "__main__":
solution = Solution()
A = [1, 1, 1, 1, 2, 2, 2, 2, 3]
result = solution.removeDuplicates(A)
print A[:result]
|
Remove Duplicates From Sorted Array II
|
Remove Duplicates From Sorted Array II
|
Python
|
mit
|
sureleo/leetcode,sureleo/leetcode,lsingal/leetcode,sureleo/leetcode,lsingal/leetcode,lsingal/leetcode
|
Remove Duplicates From Sorted Array II
|
#Too many mistakes were made.
#0. should use while loop instead of for.
#1. messed up with index. maybe with the sleepy head at 3pm
class Solution:
# @param a list of integers
# @return an integer
def removeDuplicates(self, A):
result = len(A)
if result == 0:
return result
curr = 0
next_index = 1
start_index = 1
if next_index < len(A) and A[next_index] == A[curr]:
next_index += 1
start_index += 1
i = start_index
while i < len(A):
if A[i] > A[curr]:
temp = A[next_index]
A[next_index] = A[i]
A[i] = temp
curr = next_index
next_index += 1
if (i + 1) < len(A) and A[i+1] == A[curr]:
A[next_index] = A[i+1]
next_index += 1
i += 1
else:
result -= 1
i += 1
print A
return result
if __name__ == "__main__":
solution = Solution()
A = [1, 1, 1, 1, 2, 2, 2, 2, 3]
result = solution.removeDuplicates(A)
print A[:result]
|
<commit_before><commit_msg>Remove Duplicates From Sorted Array II<commit_after>
|
#Too many mistakes were made.
#0. should use while loop instead of for.
#1. messed up with index. maybe with the sleepy head at 3pm
class Solution:
# @param a list of integers
# @return an integer
def removeDuplicates(self, A):
result = len(A)
if result == 0:
return result
curr = 0
next_index = 1
start_index = 1
if next_index < len(A) and A[next_index] == A[curr]:
next_index += 1
start_index += 1
i = start_index
while i < len(A):
if A[i] > A[curr]:
temp = A[next_index]
A[next_index] = A[i]
A[i] = temp
curr = next_index
next_index += 1
if (i + 1) < len(A) and A[i+1] == A[curr]:
A[next_index] = A[i+1]
next_index += 1
i += 1
else:
result -= 1
i += 1
print A
return result
if __name__ == "__main__":
solution = Solution()
A = [1, 1, 1, 1, 2, 2, 2, 2, 3]
result = solution.removeDuplicates(A)
print A[:result]
|
Remove Duplicates From Sorted Array II#Too many mistakes were made.
#0. should use while loop instead of for.
#1. messed up with index. maybe with the sleepy head at 3pm
class Solution:
# @param a list of integers
# @return an integer
def removeDuplicates(self, A):
result = len(A)
if result == 0:
return result
curr = 0
next_index = 1
start_index = 1
if next_index < len(A) and A[next_index] == A[curr]:
next_index += 1
start_index += 1
i = start_index
while i < len(A):
if A[i] > A[curr]:
temp = A[next_index]
A[next_index] = A[i]
A[i] = temp
curr = next_index
next_index += 1
if (i + 1) < len(A) and A[i+1] == A[curr]:
A[next_index] = A[i+1]
next_index += 1
i += 1
else:
result -= 1
i += 1
print A
return result
if __name__ == "__main__":
solution = Solution()
A = [1, 1, 1, 1, 2, 2, 2, 2, 3]
result = solution.removeDuplicates(A)
print A[:result]
|
<commit_before><commit_msg>Remove Duplicates From Sorted Array II<commit_after>#Too many mistakes were made.
#0. should use while loop instead of for.
#1. messed up with index. maybe with the sleepy head at 3pm
class Solution:
# @param a list of integers
# @return an integer
def removeDuplicates(self, A):
result = len(A)
if result == 0:
return result
curr = 0
next_index = 1
start_index = 1
if next_index < len(A) and A[next_index] == A[curr]:
next_index += 1
start_index += 1
i = start_index
while i < len(A):
if A[i] > A[curr]:
temp = A[next_index]
A[next_index] = A[i]
A[i] = temp
curr = next_index
next_index += 1
if (i + 1) < len(A) and A[i+1] == A[curr]:
A[next_index] = A[i+1]
next_index += 1
i += 1
else:
result -= 1
i += 1
print A
return result
if __name__ == "__main__":
solution = Solution()
A = [1, 1, 1, 1, 2, 2, 2, 2, 3]
result = solution.removeDuplicates(A)
print A[:result]
|
|
bb9e626b3d8d8d83aa3d0eb951b5610041ce6070
|
paasta_tools/contrib/check_registered_slaves_aws.py
|
paasta_tools/contrib/check_registered_slaves_aws.py
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
import argparse
import sys
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr_slaves
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_spot_fleet_instances
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.utils import load_system_paasta_config
def check_registration(threshold_percentage):
mesos_state = get_mesos_master().state
autoscaling_resources = load_system_paasta_config().get_cluster_autoscaling_resources()
for resource in autoscaling_resources.values():
if resource['type'] == 'aws_spot_fleet_request':
resource['sfr'] = get_sfr(resource['id'], region=resource['region'])
instances = get_spot_fleet_instances(resource['id'], region=resource['region'])
resource['sfr']['ActiveInstances'] = instances
slaves = get_sfr_slaves(resource, mesos_state)
if len(instances) == 0:
continue
else:
percent_registered = float(float(len(slaves)) / float(len(instances))) * 100
if percent_registered < float(threshold_percentage):
print "CRIT: Only found {0}% of instances in {1} registered in mesos. "\
"Please check for puppet or AMI baking problems!".format(percent_registered,
resource['id'])
return False
print "OK: Found more than {0}% of instances registered for all paasta resources in this "\
"superregion".format(threshold_percentage)
return True
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--threshold", help="percentage threshold for registered instances",
default="75")
threshold = parser.parse_args().threshold
if check_registration(threshold):
sys.exit(0)
sys.exit(2)
if __name__ == "__main__":
main()
|
Check AWS instances register in mesos
|
Check AWS instances register in mesos
This is a quick attempt to give us visibility of AWS resources that
don't end up registering in mesos.
This covers a lot of potential problems with the bootstrap process.
This could be better:
* maybe should check how long a resource has been hanging around for
* also check ASGs (I will update this when I merge my ASG branch)
|
Python
|
apache-2.0
|
somic/paasta,Yelp/paasta,Yelp/paasta,somic/paasta
|
Check AWS instances register in mesos
This is a quick attempt to give us visibility of AWS resources that
don't end up registering in mesos.
This covers a lot of potential problems with the bootstrap process.
This could be better:
* maybe should check how long a resource has been hanging around for
* also check ASGs (I will update this when I merge my ASG branch)
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
import argparse
import sys
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr_slaves
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_spot_fleet_instances
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.utils import load_system_paasta_config
def check_registration(threshold_percentage):
mesos_state = get_mesos_master().state
autoscaling_resources = load_system_paasta_config().get_cluster_autoscaling_resources()
for resource in autoscaling_resources.values():
if resource['type'] == 'aws_spot_fleet_request':
resource['sfr'] = get_sfr(resource['id'], region=resource['region'])
instances = get_spot_fleet_instances(resource['id'], region=resource['region'])
resource['sfr']['ActiveInstances'] = instances
slaves = get_sfr_slaves(resource, mesos_state)
if len(instances) == 0:
continue
else:
percent_registered = float(float(len(slaves)) / float(len(instances))) * 100
if percent_registered < float(threshold_percentage):
print "CRIT: Only found {0}% of instances in {1} registered in mesos. "\
"Please check for puppet or AMI baking problems!".format(percent_registered,
resource['id'])
return False
print "OK: Found more than {0}% of instances registered for all paasta resources in this "\
"superregion".format(threshold_percentage)
return True
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--threshold", help="percentage threshold for registered instances",
default="75")
threshold = parser.parse_args().threshold
if check_registration(threshold):
sys.exit(0)
sys.exit(2)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Check AWS instances register in mesos
This is a quick attempt to give us visibility of AWS resources that
don't end up registering in mesos.
This covers a lot of potential problems with the bootstrap process.
This could be better:
* maybe should check how long a resource has been hanging around for
* also check ASGs (I will update this when I merge my ASG branch)<commit_after>
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
import argparse
import sys
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr_slaves
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_spot_fleet_instances
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.utils import load_system_paasta_config
def check_registration(threshold_percentage):
mesos_state = get_mesos_master().state
autoscaling_resources = load_system_paasta_config().get_cluster_autoscaling_resources()
for resource in autoscaling_resources.values():
if resource['type'] == 'aws_spot_fleet_request':
resource['sfr'] = get_sfr(resource['id'], region=resource['region'])
instances = get_spot_fleet_instances(resource['id'], region=resource['region'])
resource['sfr']['ActiveInstances'] = instances
slaves = get_sfr_slaves(resource, mesos_state)
if len(instances) == 0:
continue
else:
percent_registered = float(float(len(slaves)) / float(len(instances))) * 100
if percent_registered < float(threshold_percentage):
print "CRIT: Only found {0}% of instances in {1} registered in mesos. "\
"Please check for puppet or AMI baking problems!".format(percent_registered,
resource['id'])
return False
print "OK: Found more than {0}% of instances registered for all paasta resources in this "\
"superregion".format(threshold_percentage)
return True
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--threshold", help="percentage threshold for registered instances",
default="75")
threshold = parser.parse_args().threshold
if check_registration(threshold):
sys.exit(0)
sys.exit(2)
if __name__ == "__main__":
main()
|
Check AWS instances register in mesos
This is a quick attempt to give us visibility of AWS resources that
don't end up registering in mesos.
This covers a lot of potential problems with the bootstrap process.
This could be better:
* maybe should check how long a resource has been hanging around for
* also check ASGs (I will update this when I merge my ASG branch)#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
import argparse
import sys
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr_slaves
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_spot_fleet_instances
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.utils import load_system_paasta_config
def check_registration(threshold_percentage):
mesos_state = get_mesos_master().state
autoscaling_resources = load_system_paasta_config().get_cluster_autoscaling_resources()
for resource in autoscaling_resources.values():
if resource['type'] == 'aws_spot_fleet_request':
resource['sfr'] = get_sfr(resource['id'], region=resource['region'])
instances = get_spot_fleet_instances(resource['id'], region=resource['region'])
resource['sfr']['ActiveInstances'] = instances
slaves = get_sfr_slaves(resource, mesos_state)
if len(instances) == 0:
continue
else:
percent_registered = float(float(len(slaves)) / float(len(instances))) * 100
if percent_registered < float(threshold_percentage):
print "CRIT: Only found {0}% of instances in {1} registered in mesos. "\
"Please check for puppet or AMI baking problems!".format(percent_registered,
resource['id'])
return False
print "OK: Found more than {0}% of instances registered for all paasta resources in this "\
"superregion".format(threshold_percentage)
return True
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--threshold", help="percentage threshold for registered instances",
default="75")
threshold = parser.parse_args().threshold
if check_registration(threshold):
sys.exit(0)
sys.exit(2)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Check AWS instances register in mesos
This is a quick attempt to give us visibility of AWS resources that
don't end up registering in mesos.
This covers a lot of potential problems with the bootstrap process.
This could be better:
* maybe should check how long a resource has been hanging around for
* also check ASGs (I will update this when I merge my ASG branch)<commit_after>#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
import argparse
import sys
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr_slaves
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_spot_fleet_instances
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.utils import load_system_paasta_config
def check_registration(threshold_percentage):
mesos_state = get_mesos_master().state
autoscaling_resources = load_system_paasta_config().get_cluster_autoscaling_resources()
for resource in autoscaling_resources.values():
if resource['type'] == 'aws_spot_fleet_request':
resource['sfr'] = get_sfr(resource['id'], region=resource['region'])
instances = get_spot_fleet_instances(resource['id'], region=resource['region'])
resource['sfr']['ActiveInstances'] = instances
slaves = get_sfr_slaves(resource, mesos_state)
if len(instances) == 0:
continue
else:
percent_registered = float(float(len(slaves)) / float(len(instances))) * 100
if percent_registered < float(threshold_percentage):
print "CRIT: Only found {0}% of instances in {1} registered in mesos. "\
"Please check for puppet or AMI baking problems!".format(percent_registered,
resource['id'])
return False
print "OK: Found more than {0}% of instances registered for all paasta resources in this "\
"superregion".format(threshold_percentage)
return True
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--threshold", help="percentage threshold for registered instances",
default="75")
threshold = parser.parse_args().threshold
if check_registration(threshold):
sys.exit(0)
sys.exit(2)
if __name__ == "__main__":
main()
|
|
b5abd635f5aee8c6a89aa51136e49913e41d256a
|
pyxform/tests_v1/test_validate_unicode_exception.py
|
pyxform/tests_v1/test_validate_unicode_exception.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class ValidateUnicodeException(PyxformTestCase):
"""
Validation errors may include non-ASCII characters. In particular, ODK Validate
uses ͎ (small arrow) to indicate where a problem starts.
"""
def test_validate_unicode_exception(self):
self.assertPyxformXform(
md="""
| survey | | | | |
| | type | name | label | calculation |
| | calculate | bad | bad | $(myField)='1' |
""",
run_odk_validate=True,
odk_validate_error__contains=[
u"Invalid calculate for the bind attached to \"${bad}\" : Couldn't understand the expression starting at this point:",
])
|
Add test for unicode characters in Validate errors
|
Add test for unicode characters in Validate errors
|
Python
|
bsd-2-clause
|
XLSForm/pyxform,XLSForm/pyxform
|
Add test for unicode characters in Validate errors
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class ValidateUnicodeException(PyxformTestCase):
"""
Validation errors may include non-ASCII characters. In particular, ODK Validate
uses ͎ (small arrow) to indicate where a problem starts.
"""
def test_validate_unicode_exception(self):
self.assertPyxformXform(
md="""
| survey | | | | |
| | type | name | label | calculation |
| | calculate | bad | bad | $(myField)='1' |
""",
run_odk_validate=True,
odk_validate_error__contains=[
u"Invalid calculate for the bind attached to \"${bad}\" : Couldn't understand the expression starting at this point:",
])
|
<commit_before><commit_msg>Add test for unicode characters in Validate errors<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class ValidateUnicodeException(PyxformTestCase):
"""
Validation errors may include non-ASCII characters. In particular, ODK Validate
uses ͎ (small arrow) to indicate where a problem starts.
"""
def test_validate_unicode_exception(self):
self.assertPyxformXform(
md="""
| survey | | | | |
| | type | name | label | calculation |
| | calculate | bad | bad | $(myField)='1' |
""",
run_odk_validate=True,
odk_validate_error__contains=[
u"Invalid calculate for the bind attached to \"${bad}\" : Couldn't understand the expression starting at this point:",
])
|
Add test for unicode characters in Validate errors#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class ValidateUnicodeException(PyxformTestCase):
"""
Validation errors may include non-ASCII characters. In particular, ODK Validate
uses ͎ (small arrow) to indicate where a problem starts.
"""
def test_validate_unicode_exception(self):
self.assertPyxformXform(
md="""
| survey | | | | |
| | type | name | label | calculation |
| | calculate | bad | bad | $(myField)='1' |
""",
run_odk_validate=True,
odk_validate_error__contains=[
u"Invalid calculate for the bind attached to \"${bad}\" : Couldn't understand the expression starting at this point:",
])
|
<commit_before><commit_msg>Add test for unicode characters in Validate errors<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class ValidateUnicodeException(PyxformTestCase):
"""
Validation errors may include non-ASCII characters. In particular, ODK Validate
uses ͎ (small arrow) to indicate where a problem starts.
"""
def test_validate_unicode_exception(self):
self.assertPyxformXform(
md="""
| survey | | | | |
| | type | name | label | calculation |
| | calculate | bad | bad | $(myField)='1' |
""",
run_odk_validate=True,
odk_validate_error__contains=[
u"Invalid calculate for the bind attached to \"${bad}\" : Couldn't understand the expression starting at this point:",
])
|
|
5d1bad7ebb1121349d08260554368553c02d1a37
|
fuzzing/fuzz_websocket_parser.py
|
fuzzing/fuzz_websocket_parser.py
|
import sys
import atheris
with atheris.instrument_imports():
from websockets.exceptions import PayloadTooBig, ProtocolError
from websockets.frames import Frame
from websockets.streams import StreamReader
def test_one_input(data):
fdp = atheris.FuzzedDataProvider(data)
mask = fdp.ConsumeBool()
max_size_enabled = fdp.ConsumeBool()
max_size = fdp.ConsumeInt(4)
payload = fdp.ConsumeBytes(atheris.ALL_REMAINING)
reader = StreamReader()
reader.feed_data(payload)
reader.feed_eof()
parser = Frame.parse(
reader.read_exact,
mask=mask,
max_size=max_size if max_size_enabled else None,
)
try:
next(parser)
except StopIteration:
pass # response is available in exc.value
except (
PayloadTooBig, # frame's payload size exceeds ``max_size``
ProtocolError, # frame contains incorrect values
):
pass
def main():
atheris.Setup(sys.argv, test_one_input)
atheris.Fuzz()
if __name__ == "__main__":
main()
|
Add fuzz target for WebSocket parser.
|
Add fuzz target for WebSocket parser.
|
Python
|
bsd-3-clause
|
aaugustin/websockets,aaugustin/websockets,aaugustin/websockets,aaugustin/websockets
|
Add fuzz target for WebSocket parser.
|
import sys
import atheris
with atheris.instrument_imports():
from websockets.exceptions import PayloadTooBig, ProtocolError
from websockets.frames import Frame
from websockets.streams import StreamReader
def test_one_input(data):
fdp = atheris.FuzzedDataProvider(data)
mask = fdp.ConsumeBool()
max_size_enabled = fdp.ConsumeBool()
max_size = fdp.ConsumeInt(4)
payload = fdp.ConsumeBytes(atheris.ALL_REMAINING)
reader = StreamReader()
reader.feed_data(payload)
reader.feed_eof()
parser = Frame.parse(
reader.read_exact,
mask=mask,
max_size=max_size if max_size_enabled else None,
)
try:
next(parser)
except StopIteration:
pass # response is available in exc.value
except (
PayloadTooBig, # frame's payload size exceeds ``max_size``
ProtocolError, # frame contains incorrect values
):
pass
def main():
atheris.Setup(sys.argv, test_one_input)
atheris.Fuzz()
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add fuzz target for WebSocket parser.<commit_after>
|
import sys
import atheris
with atheris.instrument_imports():
from websockets.exceptions import PayloadTooBig, ProtocolError
from websockets.frames import Frame
from websockets.streams import StreamReader
def test_one_input(data):
fdp = atheris.FuzzedDataProvider(data)
mask = fdp.ConsumeBool()
max_size_enabled = fdp.ConsumeBool()
max_size = fdp.ConsumeInt(4)
payload = fdp.ConsumeBytes(atheris.ALL_REMAINING)
reader = StreamReader()
reader.feed_data(payload)
reader.feed_eof()
parser = Frame.parse(
reader.read_exact,
mask=mask,
max_size=max_size if max_size_enabled else None,
)
try:
next(parser)
except StopIteration:
pass # response is available in exc.value
except (
PayloadTooBig, # frame's payload size exceeds ``max_size``
ProtocolError, # frame contains incorrect values
):
pass
def main():
atheris.Setup(sys.argv, test_one_input)
atheris.Fuzz()
if __name__ == "__main__":
main()
|
Add fuzz target for WebSocket parser.import sys
import atheris
with atheris.instrument_imports():
from websockets.exceptions import PayloadTooBig, ProtocolError
from websockets.frames import Frame
from websockets.streams import StreamReader
def test_one_input(data):
fdp = atheris.FuzzedDataProvider(data)
mask = fdp.ConsumeBool()
max_size_enabled = fdp.ConsumeBool()
max_size = fdp.ConsumeInt(4)
payload = fdp.ConsumeBytes(atheris.ALL_REMAINING)
reader = StreamReader()
reader.feed_data(payload)
reader.feed_eof()
parser = Frame.parse(
reader.read_exact,
mask=mask,
max_size=max_size if max_size_enabled else None,
)
try:
next(parser)
except StopIteration:
pass # response is available in exc.value
except (
PayloadTooBig, # frame's payload size exceeds ``max_size``
ProtocolError, # frame contains incorrect values
):
pass
def main():
atheris.Setup(sys.argv, test_one_input)
atheris.Fuzz()
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add fuzz target for WebSocket parser.<commit_after>import sys
import atheris
with atheris.instrument_imports():
from websockets.exceptions import PayloadTooBig, ProtocolError
from websockets.frames import Frame
from websockets.streams import StreamReader
def test_one_input(data):
fdp = atheris.FuzzedDataProvider(data)
mask = fdp.ConsumeBool()
max_size_enabled = fdp.ConsumeBool()
max_size = fdp.ConsumeInt(4)
payload = fdp.ConsumeBytes(atheris.ALL_REMAINING)
reader = StreamReader()
reader.feed_data(payload)
reader.feed_eof()
parser = Frame.parse(
reader.read_exact,
mask=mask,
max_size=max_size if max_size_enabled else None,
)
try:
next(parser)
except StopIteration:
pass # response is available in exc.value
except (
PayloadTooBig, # frame's payload size exceeds ``max_size``
ProtocolError, # frame contains incorrect values
):
pass
def main():
atheris.Setup(sys.argv, test_one_input)
atheris.Fuzz()
if __name__ == "__main__":
main()
|
|
632b5b3fedef17b908cb29d777b52b69d2127da1
|
actstream/migrations/0003_auto_20160528_1411.py
|
actstream/migrations/0003_auto_20160528_1411.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-28 14:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('actstream', '0002_remove_action_data'),
]
operations = [
migrations.AlterField(
model_name='action',
name='timestamp',
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
]
|
Add migration for changes to model
|
Add migration for changes to model
|
Python
|
bsd-3-clause
|
jrsupplee/django-activity-stream,jrsupplee/django-activity-stream
|
Add migration for changes to model
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-28 14:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('actstream', '0002_remove_action_data'),
]
operations = [
migrations.AlterField(
model_name='action',
name='timestamp',
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
]
|
<commit_before><commit_msg>Add migration for changes to model<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-28 14:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('actstream', '0002_remove_action_data'),
]
operations = [
migrations.AlterField(
model_name='action',
name='timestamp',
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
]
|
Add migration for changes to model# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-28 14:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('actstream', '0002_remove_action_data'),
]
operations = [
migrations.AlterField(
model_name='action',
name='timestamp',
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
]
|
<commit_before><commit_msg>Add migration for changes to model<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-28 14:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('actstream', '0002_remove_action_data'),
]
operations = [
migrations.AlterField(
model_name='action',
name='timestamp',
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.