commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a6d05f3c1a33381a07d459c1fdff93bc4ba30594
|
pidman/pid/migrations/0002_pid_sequence_initial_value.py
|
pidman/pid/migrations/0002_pid_sequence_initial_value.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid, encode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
# pid noids are generated in sequence, so the pid with the
# highest pk _should_ be the one with the highest noid
max_noid = Pid.objects.all().order_by('pk').last().pid
# (previously using aggregate max, but doesn't seem to find
# the highest pid value correctly)
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
Fix max noid detection when setting pid sequence
|
Fix max noid detection when setting pid sequence
|
Python
|
apache-2.0
|
emory-libraries/pidman,emory-libraries/pidman
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
Fix max noid detection when setting pid sequence
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid, encode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
# pid noids are generated in sequence, so the pid with the
# highest pk _should_ be the one with the highest noid
max_noid = Pid.objects.all().order_by('pk').last().pid
# (previously using aggregate max, but doesn't seem to find
# the highest pid value correctly)
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
<commit_msg>Fix max noid detection when setting pid sequence<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid, encode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
# pid noids are generated in sequence, so the pid with the
# highest pk _should_ be the one with the highest noid
max_noid = Pid.objects.all().order_by('pk').last().pid
# (previously using aggregate max, but doesn't seem to find
# the highest pid value correctly)
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
Fix max noid detection when setting pid sequence# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid, encode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
# pid noids are generated in sequence, so the pid with the
# highest pk _should_ be the one with the highest noid
max_noid = Pid.objects.all().order_by('pk').last().pid
# (previously using aggregate max, but doesn't seem to find
# the highest pid value correctly)
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
<commit_msg>Fix max noid detection when setting pid sequence<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid, encode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
# pid noids are generated in sequence, so the pid with the
# highest pk _should_ be the one with the highest noid
max_noid = Pid.objects.all().order_by('pk').last().pid
# (previously using aggregate max, but doesn't seem to find
# the highest pid value correctly)
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
eedb22b1be419130ffc4a349c3ec4b83879b44bd
|
client/demo_assignments/hw1_tests.py
|
client/demo_assignments/hw1_tests.py
|
"""Tests for hw1 demo assignment."""
TEST_INFO = {
'assignment': 'hw1',
'imports': ['from hw1 import *'],
}
TESTS = [
# Test square
{
'name': ('Q1', 'q1', '1'),
'suites': [
[
['square(4)', '16'],
['square(-5)', '25'],
],
],
},
# Test double
{
'name': ('Q2', 'q2', '2'),
'suites': [
[
['double(4)', '8'],
['double(-5)', '-10'],
],
],
},
]
|
"""Tests for hw1 demo assignment."""
assignment = {
'name': 'hw1',
'imports': ['from hw1 import *'],
'version': '1.0',
# Specify tests that should not be locked
'no_lock': {
},
'tests': [
# Test square
{
# The first name is the "official" name.
'name': ['Q1', 'q1', '1'],
# No explicit point value -- each test suite counts as 1 point
'suites': [
[
{
'type': 'code', # Code question.
'input': 'square(4)',
'output': ['16'], # List of outputs, even if only one
},
{
'type': 'concept', # Concept question.
'input': """
What type of input does the square function take?
""",
'output': [
# Denote multiple choice with a list, rather than
# a string.
[
'number', # Correct choice comes first.
'string',
'None',
]
],
},
{
# If type is omitted, default type is 'code'.
'input': """
x = -5
square(-5)
""",
# Last line in a multiline input is used as the prompt.
'output': ['25'],
# Additional statuses can be included here.
'status': {
'lock': False,
}
},
],
],
},
# Test double
{
'name': ['Q2', 'q2', '2'],
# Point value specified -- points are partitioned evenly across
# suites.
'points': 4,
'suites': [
[
{
'input': 'double(4)',
'output': ['8'],
}
],
[
{
# Cases with multiple outputs: lines with expected output
# are denoted by '$ '.
'input': """
x = double(4)
$ x
$ double(x)
""",
'output': ['8', '16']
},
{
'input': """
x = double(2)
$ x
$ square(x)
""",
'output': ['4', '16'],
},
],
],
},
],
}
|
Make proposed testing format with demo assignment
|
Make proposed testing format with demo assignment
|
Python
|
apache-2.0
|
jordonwii/ok,jackzhao-mj/ok,jackzhao-mj/ok,Cal-CS-61A-Staff/ok,jordonwii/ok,Cal-CS-61A-Staff/ok,jackzhao-mj/ok,Cal-CS-61A-Staff/ok,jordonwii/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,jordonwii/ok,jackzhao-mj/ok
|
"""Tests for hw1 demo assignment."""
TEST_INFO = {
'assignment': 'hw1',
'imports': ['from hw1 import *'],
}
TESTS = [
# Test square
{
'name': ('Q1', 'q1', '1'),
'suites': [
[
['square(4)', '16'],
['square(-5)', '25'],
],
],
},
# Test double
{
'name': ('Q2', 'q2', '2'),
'suites': [
[
['double(4)', '8'],
['double(-5)', '-10'],
],
],
},
]
Make proposed testing format with demo assignment
|
"""Tests for hw1 demo assignment."""
assignment = {
'name': 'hw1',
'imports': ['from hw1 import *'],
'version': '1.0',
# Specify tests that should not be locked
'no_lock': {
},
'tests': [
# Test square
{
# The first name is the "official" name.
'name': ['Q1', 'q1', '1'],
# No explicit point value -- each test suite counts as 1 point
'suites': [
[
{
'type': 'code', # Code question.
'input': 'square(4)',
'output': ['16'], # List of outputs, even if only one
},
{
'type': 'concept', # Concept question.
'input': """
What type of input does the square function take?
""",
'output': [
# Denote multiple choice with a list, rather than
# a string.
[
'number', # Correct choice comes first.
'string',
'None',
]
],
},
{
# If type is omitted, default type is 'code'.
'input': """
x = -5
square(-5)
""",
# Last line in a multiline input is used as the prompt.
'output': ['25'],
# Additional statuses can be included here.
'status': {
'lock': False,
}
},
],
],
},
# Test double
{
'name': ['Q2', 'q2', '2'],
# Point value specified -- points are partitioned evenly across
# suites.
'points': 4,
'suites': [
[
{
'input': 'double(4)',
'output': ['8'],
}
],
[
{
# Cases with multiple outputs: lines with expected output
# are denoted by '$ '.
'input': """
x = double(4)
$ x
$ double(x)
""",
'output': ['8', '16']
},
{
'input': """
x = double(2)
$ x
$ square(x)
""",
'output': ['4', '16'],
},
],
],
},
],
}
|
<commit_before>"""Tests for hw1 demo assignment."""
TEST_INFO = {
'assignment': 'hw1',
'imports': ['from hw1 import *'],
}
TESTS = [
# Test square
{
'name': ('Q1', 'q1', '1'),
'suites': [
[
['square(4)', '16'],
['square(-5)', '25'],
],
],
},
# Test double
{
'name': ('Q2', 'q2', '2'),
'suites': [
[
['double(4)', '8'],
['double(-5)', '-10'],
],
],
},
]
<commit_msg>Make proposed testing format with demo assignment<commit_after>
|
"""Tests for hw1 demo assignment."""
assignment = {
'name': 'hw1',
'imports': ['from hw1 import *'],
'version': '1.0',
# Specify tests that should not be locked
'no_lock': {
},
'tests': [
# Test square
{
# The first name is the "official" name.
'name': ['Q1', 'q1', '1'],
# No explicit point value -- each test suite counts as 1 point
'suites': [
[
{
'type': 'code', # Code question.
'input': 'square(4)',
'output': ['16'], # List of outputs, even if only one
},
{
'type': 'concept', # Concept question.
'input': """
What type of input does the square function take?
""",
'output': [
# Denote multiple choice with a list, rather than
# a string.
[
'number', # Correct choice comes first.
'string',
'None',
]
],
},
{
# If type is omitted, default type is 'code'.
'input': """
x = -5
square(-5)
""",
# Last line in a multiline input is used as the prompt.
'output': ['25'],
# Additional statuses can be included here.
'status': {
'lock': False,
}
},
],
],
},
# Test double
{
'name': ['Q2', 'q2', '2'],
# Point value specified -- points are partitioned evenly across
# suites.
'points': 4,
'suites': [
[
{
'input': 'double(4)',
'output': ['8'],
}
],
[
{
# Cases with multiple outputs: lines with expected output
# are denoted by '$ '.
'input': """
x = double(4)
$ x
$ double(x)
""",
'output': ['8', '16']
},
{
'input': """
x = double(2)
$ x
$ square(x)
""",
'output': ['4', '16'],
},
],
],
},
],
}
|
"""Tests for hw1 demo assignment."""
TEST_INFO = {
'assignment': 'hw1',
'imports': ['from hw1 import *'],
}
TESTS = [
# Test square
{
'name': ('Q1', 'q1', '1'),
'suites': [
[
['square(4)', '16'],
['square(-5)', '25'],
],
],
},
# Test double
{
'name': ('Q2', 'q2', '2'),
'suites': [
[
['double(4)', '8'],
['double(-5)', '-10'],
],
],
},
]
Make proposed testing format with demo assignment"""Tests for hw1 demo assignment."""
assignment = {
'name': 'hw1',
'imports': ['from hw1 import *'],
'version': '1.0',
# Specify tests that should not be locked
'no_lock': {
},
'tests': [
# Test square
{
# The first name is the "official" name.
'name': ['Q1', 'q1', '1'],
# No explicit point value -- each test suite counts as 1 point
'suites': [
[
{
'type': 'code', # Code question.
'input': 'square(4)',
'output': ['16'], # List of outputs, even if only one
},
{
'type': 'concept', # Concept question.
'input': """
What type of input does the square function take?
""",
'output': [
# Denote multiple choice with a list, rather than
# a string.
[
'number', # Correct choice comes first.
'string',
'None',
]
],
},
{
# If type is omitted, default type is 'code'.
'input': """
x = -5
square(-5)
""",
# Last line in a multiline input is used as the prompt.
'output': ['25'],
# Additional statuses can be included here.
'status': {
'lock': False,
}
},
],
],
},
# Test double
{
'name': ['Q2', 'q2', '2'],
# Point value specified -- points are partitioned evenly across
# suites.
'points': 4,
'suites': [
[
{
'input': 'double(4)',
'output': ['8'],
}
],
[
{
# Cases with multiple outputs: lines with expected output
# are denoted by '$ '.
'input': """
x = double(4)
$ x
$ double(x)
""",
'output': ['8', '16']
},
{
'input': """
x = double(2)
$ x
$ square(x)
""",
'output': ['4', '16'],
},
],
],
},
],
}
|
<commit_before>"""Tests for hw1 demo assignment."""
TEST_INFO = {
'assignment': 'hw1',
'imports': ['from hw1 import *'],
}
TESTS = [
# Test square
{
'name': ('Q1', 'q1', '1'),
'suites': [
[
['square(4)', '16'],
['square(-5)', '25'],
],
],
},
# Test double
{
'name': ('Q2', 'q2', '2'),
'suites': [
[
['double(4)', '8'],
['double(-5)', '-10'],
],
],
},
]
<commit_msg>Make proposed testing format with demo assignment<commit_after>"""Tests for hw1 demo assignment."""
assignment = {
'name': 'hw1',
'imports': ['from hw1 import *'],
'version': '1.0',
# Specify tests that should not be locked
'no_lock': {
},
'tests': [
# Test square
{
# The first name is the "official" name.
'name': ['Q1', 'q1', '1'],
# No explicit point value -- each test suite counts as 1 point
'suites': [
[
{
'type': 'code', # Code question.
'input': 'square(4)',
'output': ['16'], # List of outputs, even if only one
},
{
'type': 'concept', # Concept question.
'input': """
What type of input does the square function take?
""",
'output': [
# Denote multiple choice with a list, rather than
# a string.
[
'number', # Correct choice comes first.
'string',
'None',
]
],
},
{
# If type is omitted, default type is 'code'.
'input': """
x = -5
square(-5)
""",
# Last line in a multiline input is used as the prompt.
'output': ['25'],
# Additional statuses can be included here.
'status': {
'lock': False,
}
},
],
],
},
# Test double
{
'name': ['Q2', 'q2', '2'],
# Point value specified -- points are partitioned evenly across
# suites.
'points': 4,
'suites': [
[
{
'input': 'double(4)',
'output': ['8'],
}
],
[
{
# Cases with multiple outputs: lines with expected output
# are denoted by '$ '.
'input': """
x = double(4)
$ x
$ double(x)
""",
'output': ['8', '16']
},
{
'input': """
x = double(2)
$ x
$ square(x)
""",
'output': ['4', '16'],
},
],
],
},
],
}
|
7d6c5ead9f754606d732db8566311c4d3e6fe54f
|
tests.py
|
tests.py
|
"""Tests runner for modoboa_admin."""
import unittest
from modoboa.lib.test_utils import TestRunnerMixin
class TestRunner(TestRunnerMixin, unittest.TestCase):
"""The tests runner."""
extension = "modoboa_admin_limits"
|
"""Tests runner for modoboa_admin."""
import unittest
from modoboa.lib.test_utils import TestRunnerMixin
class TestRunner(TestRunnerMixin, unittest.TestCase):
"""The tests runner."""
extension = "modoboa_admin_limits"
dependencies = [
"modoboa_admin"
]
|
Make sure to activate modoboa_admin.
|
Make sure to activate modoboa_admin.
|
Python
|
mit
|
disko/modoboa-admin-limits,disko/modoboa-admin-limits
|
"""Tests runner for modoboa_admin."""
import unittest
from modoboa.lib.test_utils import TestRunnerMixin
class TestRunner(TestRunnerMixin, unittest.TestCase):
"""The tests runner."""
extension = "modoboa_admin_limits"
Make sure to activate modoboa_admin.
|
"""Tests runner for modoboa_admin."""
import unittest
from modoboa.lib.test_utils import TestRunnerMixin
class TestRunner(TestRunnerMixin, unittest.TestCase):
"""The tests runner."""
extension = "modoboa_admin_limits"
dependencies = [
"modoboa_admin"
]
|
<commit_before>"""Tests runner for modoboa_admin."""
import unittest
from modoboa.lib.test_utils import TestRunnerMixin
class TestRunner(TestRunnerMixin, unittest.TestCase):
"""The tests runner."""
extension = "modoboa_admin_limits"
<commit_msg>Make sure to activate modoboa_admin.<commit_after>
|
"""Tests runner for modoboa_admin."""
import unittest
from modoboa.lib.test_utils import TestRunnerMixin
class TestRunner(TestRunnerMixin, unittest.TestCase):
"""The tests runner."""
extension = "modoboa_admin_limits"
dependencies = [
"modoboa_admin"
]
|
"""Tests runner for modoboa_admin."""
import unittest
from modoboa.lib.test_utils import TestRunnerMixin
class TestRunner(TestRunnerMixin, unittest.TestCase):
"""The tests runner."""
extension = "modoboa_admin_limits"
Make sure to activate modoboa_admin."""Tests runner for modoboa_admin."""
import unittest
from modoboa.lib.test_utils import TestRunnerMixin
class TestRunner(TestRunnerMixin, unittest.TestCase):
"""The tests runner."""
extension = "modoboa_admin_limits"
dependencies = [
"modoboa_admin"
]
|
<commit_before>"""Tests runner for modoboa_admin."""
import unittest
from modoboa.lib.test_utils import TestRunnerMixin
class TestRunner(TestRunnerMixin, unittest.TestCase):
"""The tests runner."""
extension = "modoboa_admin_limits"
<commit_msg>Make sure to activate modoboa_admin.<commit_after>"""Tests runner for modoboa_admin."""
import unittest
from modoboa.lib.test_utils import TestRunnerMixin
class TestRunner(TestRunnerMixin, unittest.TestCase):
"""The tests runner."""
extension = "modoboa_admin_limits"
dependencies = [
"modoboa_admin"
]
|
f5f7eb086aff7cdc61bbfa850b638db5b7e0d211
|
tests/test_order.py
|
tests/test_order.py
|
# -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
TESTS MUST START WITH "test"
"""
from flask import url_for
class TestBreakTheOrder:
"""
Breaking the order
"""
def test_order_is_not_not_found(self, testapp):
"""
There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
res = testapp.get('/orders/')
assert res.status_code != 404
def test_order_is_accessible(self, testapp):
"""
Breaching the order?! Success!
"""
# testapp made available from the tests module
res = testapp.get('/orders/')
assert res.status_code == 200
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
|
# -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
TESTS MUST START WITH "test"
"""
from flask import url_for
class TestBreakTheOrder:
"""
Breaking the order
"""
def test_order_gives_401_without_login(self, testapp):
"""
There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
res = testapp.get('/orders/', expect_errors=True)
print(res)
print(res.status_code)
assert res.status_code == 401
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
|
Update test order to check for 401.
|
Update test order to check for 401.
|
Python
|
bsd-3-clause
|
robin-lee/store,tankca/store,tankca/store,William93/store,boomcan90/store,tankca/store,William93/store,William93/store,robin-lee/store,boomcan90/store,robin-lee/store,boomcan90/store
|
# -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
TESTS MUST START WITH "test"
"""
from flask import url_for
class TestBreakTheOrder:
"""
Breaking the order
"""
def test_order_is_not_not_found(self, testapp):
"""
There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
res = testapp.get('/orders/')
assert res.status_code != 404
def test_order_is_accessible(self, testapp):
"""
Breaching the order?! Success!
"""
# testapp made available from the tests module
res = testapp.get('/orders/')
assert res.status_code == 200
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
Update test order to check for 401.
|
# -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
TESTS MUST START WITH "test"
"""
from flask import url_for
class TestBreakTheOrder:
"""
Breaking the order
"""
def test_order_gives_401_without_login(self, testapp):
"""
There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
res = testapp.get('/orders/', expect_errors=True)
print(res)
print(res.status_code)
assert res.status_code == 401
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
|
<commit_before># -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
TESTS MUST START WITH "test"
"""
from flask import url_for
class TestBreakTheOrder:
"""
Breaking the order
"""
def test_order_is_not_not_found(self, testapp):
"""
There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
res = testapp.get('/orders/')
assert res.status_code != 404
def test_order_is_accessible(self, testapp):
"""
Breaching the order?! Success!
"""
# testapp made available from the tests module
res = testapp.get('/orders/')
assert res.status_code == 200
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
<commit_msg>Update test order to check for 401.<commit_after>
|
# -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
TESTS MUST START WITH "test"
"""
from flask import url_for
class TestBreakTheOrder:
"""
Breaking the order
"""
def test_order_gives_401_without_login(self, testapp):
"""
There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
res = testapp.get('/orders/', expect_errors=True)
print(res)
print(res.status_code)
assert res.status_code == 401
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
|
# -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
TESTS MUST START WITH "test"
"""
from flask import url_for
class TestBreakTheOrder:
"""
Breaking the order
"""
def test_order_is_not_not_found(self, testapp):
"""
There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
res = testapp.get('/orders/')
assert res.status_code != 404
def test_order_is_accessible(self, testapp):
"""
Breaching the order?! Success!
"""
# testapp made available from the tests module
res = testapp.get('/orders/')
assert res.status_code == 200
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
Update test order to check for 401.# -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
TESTS MUST START WITH "test"
"""
from flask import url_for
class TestBreakTheOrder:
"""
Breaking the order
"""
def test_order_gives_401_without_login(self, testapp):
"""
There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
res = testapp.get('/orders/', expect_errors=True)
print(res)
print(res.status_code)
assert res.status_code == 401
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
|
<commit_before># -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
TESTS MUST START WITH "test"
"""
from flask import url_for
class TestBreakTheOrder:
"""
Breaking the order
"""
def test_order_is_not_not_found(self, testapp):
"""
There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
res = testapp.get('/orders/')
assert res.status_code != 404
def test_order_is_accessible(self, testapp):
"""
Breaching the order?! Success!
"""
# testapp made available from the tests module
res = testapp.get('/orders/')
assert res.status_code == 200
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
<commit_msg>Update test order to check for 401.<commit_after># -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
TESTS MUST START WITH "test"
"""
from flask import url_for
class TestBreakTheOrder:
"""
Breaking the order
"""
def test_order_gives_401_without_login(self, testapp):
"""
There actually is an order... Amazing.
I know, right?
"""
# !!! URL needs the / at the end.
res = testapp.get('/orders/', expect_errors=True)
print(res)
print(res.status_code)
assert res.status_code == 401
def test_order_has_list_of_not_beer(self, testapp):
"""
Range of beer is NOT available!
Do I look like Robin?
"""
res = testapp.get('/orders/orders')
# i have discovered that "string" in res is case sensitive
# in general to know more see:
# http://webtest.readthedocs.io/en/latest/api.html#webtest-response-testresponse
assert "List of NOT beer" in res
def test_browse_list_returns_empty_list(self, order, testapp):
res = testapp.get('/orders/ordersList')
assert "data" in res
|
007d081dcf790c92dfa44328474d17fca9a6592c
|
apitestcase/testcase.py
|
apitestcase/testcase.py
|
import requests
class TestCase(object):
"""
Add assetion methods for HTTP Requests to TestCase
"""
def assertRequest(self, method="GET", url="", status_code=200,
contains=None, **kwargs):
"""
Asserts requests on a given endpoint
"""
if contains is None:
cotains = []
if method is "GET":
request = requests.get
elif method is "POST":
request = requests.post
elif method is "PUT":
request = requests.put
elif method is "DELETE":
request = requests.delete
response = request(url, **kwargs)
self.assertEqual(response.status_code, status_code)
if contains:
for item in contains:
self.assertIn(item, response.content)
def assertGet(self, *args, **kwargs):
"""
Asserts GET requests on a URL
"""
self.assertRequest("GET", *args, **kwargs)
def assertPost(self, *args, **kwargs):
"""
Asserts POST requests on a URL
"""
self.assertRequest("POST", *args, **kwargs)
def assertPut(self, *args, **kwargs):
"""
Asserts PUT requests on a URL
"""
self.assertRequest("PUT", *args, **kwargs)
def assertDelete(self, *args, **kwargs):
"""
Asserts DELETE requests on a URL
"""
self.assertRequest("DELETE", *args, **kwargs)
|
import requests
class TestCase(object):
"""
Add assetion methods for HTTP Requests to TestCase
"""
def assertRequest(self, method="GET", url="", status_code=200,
contains=None, **kwargs):
"""
Asserts requests on a given endpoint
"""
if contains is None:
cotains = []
if method is "GET":
request = requests.get
elif method is "POST":
request = requests.post
elif method is "PUT":
request = requests.put
elif method is "DELETE":
request = requests.delete
response = request(url, **kwargs)
self.assertEqual(response.status_code, status_code)
if contains:
for item in contains:
self.assertIn(item, response.text)
def assertGet(self, *args, **kwargs):
"""
Asserts GET requests on a URL
"""
self.assertRequest("GET", *args, **kwargs)
def assertPost(self, *args, **kwargs):
"""
Asserts POST requests on a URL
"""
self.assertRequest("POST", *args, **kwargs)
def assertPut(self, *args, **kwargs):
"""
Asserts PUT requests on a URL
"""
self.assertRequest("PUT", *args, **kwargs)
def assertDelete(self, *args, **kwargs):
"""
Asserts DELETE requests on a URL
"""
self.assertRequest("DELETE", *args, **kwargs)
|
Use requests' reponse.text per documentation
|
Use requests' reponse.text per documentation
|
Python
|
mit
|
bramwelt/apitestcase
|
import requests
class TestCase(object):
"""
Add assetion methods for HTTP Requests to TestCase
"""
def assertRequest(self, method="GET", url="", status_code=200,
contains=None, **kwargs):
"""
Asserts requests on a given endpoint
"""
if contains is None:
cotains = []
if method is "GET":
request = requests.get
elif method is "POST":
request = requests.post
elif method is "PUT":
request = requests.put
elif method is "DELETE":
request = requests.delete
response = request(url, **kwargs)
self.assertEqual(response.status_code, status_code)
if contains:
for item in contains:
self.assertIn(item, response.content)
def assertGet(self, *args, **kwargs):
"""
Asserts GET requests on a URL
"""
self.assertRequest("GET", *args, **kwargs)
def assertPost(self, *args, **kwargs):
"""
Asserts POST requests on a URL
"""
self.assertRequest("POST", *args, **kwargs)
def assertPut(self, *args, **kwargs):
"""
Asserts PUT requests on a URL
"""
self.assertRequest("PUT", *args, **kwargs)
def assertDelete(self, *args, **kwargs):
"""
Asserts DELETE requests on a URL
"""
self.assertRequest("DELETE", *args, **kwargs)
Use requests' reponse.text per documentation
|
import requests
class TestCase(object):
"""
Add assetion methods for HTTP Requests to TestCase
"""
def assertRequest(self, method="GET", url="", status_code=200,
contains=None, **kwargs):
"""
Asserts requests on a given endpoint
"""
if contains is None:
cotains = []
if method is "GET":
request = requests.get
elif method is "POST":
request = requests.post
elif method is "PUT":
request = requests.put
elif method is "DELETE":
request = requests.delete
response = request(url, **kwargs)
self.assertEqual(response.status_code, status_code)
if contains:
for item in contains:
self.assertIn(item, response.text)
def assertGet(self, *args, **kwargs):
"""
Asserts GET requests on a URL
"""
self.assertRequest("GET", *args, **kwargs)
def assertPost(self, *args, **kwargs):
"""
Asserts POST requests on a URL
"""
self.assertRequest("POST", *args, **kwargs)
def assertPut(self, *args, **kwargs):
"""
Asserts PUT requests on a URL
"""
self.assertRequest("PUT", *args, **kwargs)
def assertDelete(self, *args, **kwargs):
"""
Asserts DELETE requests on a URL
"""
self.assertRequest("DELETE", *args, **kwargs)
|
<commit_before>import requests
class TestCase(object):
"""
Add assetion methods for HTTP Requests to TestCase
"""
def assertRequest(self, method="GET", url="", status_code=200,
contains=None, **kwargs):
"""
Asserts requests on a given endpoint
"""
if contains is None:
cotains = []
if method is "GET":
request = requests.get
elif method is "POST":
request = requests.post
elif method is "PUT":
request = requests.put
elif method is "DELETE":
request = requests.delete
response = request(url, **kwargs)
self.assertEqual(response.status_code, status_code)
if contains:
for item in contains:
self.assertIn(item, response.content)
def assertGet(self, *args, **kwargs):
"""
Asserts GET requests on a URL
"""
self.assertRequest("GET", *args, **kwargs)
def assertPost(self, *args, **kwargs):
"""
Asserts POST requests on a URL
"""
self.assertRequest("POST", *args, **kwargs)
def assertPut(self, *args, **kwargs):
"""
Asserts PUT requests on a URL
"""
self.assertRequest("PUT", *args, **kwargs)
def assertDelete(self, *args, **kwargs):
"""
Asserts DELETE requests on a URL
"""
self.assertRequest("DELETE", *args, **kwargs)
<commit_msg>Use requests' reponse.text per documentation<commit_after>
|
import requests
class TestCase(object):
"""
Add assetion methods for HTTP Requests to TestCase
"""
def assertRequest(self, method="GET", url="", status_code=200,
contains=None, **kwargs):
"""
Asserts requests on a given endpoint
"""
if contains is None:
cotains = []
if method is "GET":
request = requests.get
elif method is "POST":
request = requests.post
elif method is "PUT":
request = requests.put
elif method is "DELETE":
request = requests.delete
response = request(url, **kwargs)
self.assertEqual(response.status_code, status_code)
if contains:
for item in contains:
self.assertIn(item, response.text)
def assertGet(self, *args, **kwargs):
"""
Asserts GET requests on a URL
"""
self.assertRequest("GET", *args, **kwargs)
def assertPost(self, *args, **kwargs):
"""
Asserts POST requests on a URL
"""
self.assertRequest("POST", *args, **kwargs)
def assertPut(self, *args, **kwargs):
"""
Asserts PUT requests on a URL
"""
self.assertRequest("PUT", *args, **kwargs)
def assertDelete(self, *args, **kwargs):
"""
Asserts DELETE requests on a URL
"""
self.assertRequest("DELETE", *args, **kwargs)
|
import requests
class TestCase(object):
"""
Add assetion methods for HTTP Requests to TestCase
"""
def assertRequest(self, method="GET", url="", status_code=200,
contains=None, **kwargs):
"""
Asserts requests on a given endpoint
"""
if contains is None:
cotains = []
if method is "GET":
request = requests.get
elif method is "POST":
request = requests.post
elif method is "PUT":
request = requests.put
elif method is "DELETE":
request = requests.delete
response = request(url, **kwargs)
self.assertEqual(response.status_code, status_code)
if contains:
for item in contains:
self.assertIn(item, response.content)
def assertGet(self, *args, **kwargs):
"""
Asserts GET requests on a URL
"""
self.assertRequest("GET", *args, **kwargs)
def assertPost(self, *args, **kwargs):
"""
Asserts POST requests on a URL
"""
self.assertRequest("POST", *args, **kwargs)
def assertPut(self, *args, **kwargs):
"""
Asserts PUT requests on a URL
"""
self.assertRequest("PUT", *args, **kwargs)
def assertDelete(self, *args, **kwargs):
"""
Asserts DELETE requests on a URL
"""
self.assertRequest("DELETE", *args, **kwargs)
Use requests' reponse.text per documentationimport requests
class TestCase(object):
"""
Add assetion methods for HTTP Requests to TestCase
"""
def assertRequest(self, method="GET", url="", status_code=200,
contains=None, **kwargs):
"""
Asserts requests on a given endpoint
"""
if contains is None:
cotains = []
if method is "GET":
request = requests.get
elif method is "POST":
request = requests.post
elif method is "PUT":
request = requests.put
elif method is "DELETE":
request = requests.delete
response = request(url, **kwargs)
self.assertEqual(response.status_code, status_code)
if contains:
for item in contains:
self.assertIn(item, response.text)
def assertGet(self, *args, **kwargs):
"""
Asserts GET requests on a URL
"""
self.assertRequest("GET", *args, **kwargs)
def assertPost(self, *args, **kwargs):
"""
Asserts POST requests on a URL
"""
self.assertRequest("POST", *args, **kwargs)
def assertPut(self, *args, **kwargs):
"""
Asserts PUT requests on a URL
"""
self.assertRequest("PUT", *args, **kwargs)
def assertDelete(self, *args, **kwargs):
"""
Asserts DELETE requests on a URL
"""
self.assertRequest("DELETE", *args, **kwargs)
|
<commit_before>import requests
class TestCase(object):
"""
Add assetion methods for HTTP Requests to TestCase
"""
def assertRequest(self, method="GET", url="", status_code=200,
contains=None, **kwargs):
"""
Asserts requests on a given endpoint
"""
if contains is None:
cotains = []
if method is "GET":
request = requests.get
elif method is "POST":
request = requests.post
elif method is "PUT":
request = requests.put
elif method is "DELETE":
request = requests.delete
response = request(url, **kwargs)
self.assertEqual(response.status_code, status_code)
if contains:
for item in contains:
self.assertIn(item, response.content)
def assertGet(self, *args, **kwargs):
"""
Asserts GET requests on a URL
"""
self.assertRequest("GET", *args, **kwargs)
def assertPost(self, *args, **kwargs):
"""
Asserts POST requests on a URL
"""
self.assertRequest("POST", *args, **kwargs)
def assertPut(self, *args, **kwargs):
"""
Asserts PUT requests on a URL
"""
self.assertRequest("PUT", *args, **kwargs)
def assertDelete(self, *args, **kwargs):
"""
Asserts DELETE requests on a URL
"""
self.assertRequest("DELETE", *args, **kwargs)
<commit_msg>Use requests' reponse.text per documentation<commit_after>import requests
class TestCase(object):
"""
Add assetion methods for HTTP Requests to TestCase
"""
def assertRequest(self, method="GET", url="", status_code=200,
contains=None, **kwargs):
"""
Asserts requests on a given endpoint
"""
if contains is None:
cotains = []
if method is "GET":
request = requests.get
elif method is "POST":
request = requests.post
elif method is "PUT":
request = requests.put
elif method is "DELETE":
request = requests.delete
response = request(url, **kwargs)
self.assertEqual(response.status_code, status_code)
if contains:
for item in contains:
self.assertIn(item, response.text)
def assertGet(self, *args, **kwargs):
"""
Asserts GET requests on a URL
"""
self.assertRequest("GET", *args, **kwargs)
def assertPost(self, *args, **kwargs):
"""
Asserts POST requests on a URL
"""
self.assertRequest("POST", *args, **kwargs)
def assertPut(self, *args, **kwargs):
"""
Asserts PUT requests on a URL
"""
self.assertRequest("PUT", *args, **kwargs)
def assertDelete(self, *args, **kwargs):
"""
Asserts DELETE requests on a URL
"""
self.assertRequest("DELETE", *args, **kwargs)
|
f914e1a58d0817ab35eb884a9280d2d4d9a0f579
|
dsub/_dsub_version.py
|
dsub/_dsub_version.py
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.7.dev0'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.7'
|
Update dsub version to 0.3.7
|
Update dsub version to 0.3.7
PiperOrigin-RevId: 292945859
|
Python
|
apache-2.0
|
DataBiosphere/dsub,DataBiosphere/dsub
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.7.dev0'
Update dsub version to 0.3.7
PiperOrigin-RevId: 292945859
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.7'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.7.dev0'
<commit_msg>Update dsub version to 0.3.7
PiperOrigin-RevId: 292945859<commit_after>
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.7'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.7.dev0'
Update dsub version to 0.3.7
PiperOrigin-RevId: 292945859# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.7'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.7.dev0'
<commit_msg>Update dsub version to 0.3.7
PiperOrigin-RevId: 292945859<commit_after># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.7'
|
7a057ba74a5914f8d7f8db3646feb5cb06a74cef
|
ml/pytorch/image_classification/image_classifier.py
|
ml/pytorch/image_classification/image_classifier.py
|
# %load_ext autoreload
# %autoreload 2
import torch
from torch import nn
from torch.autograd import Variable
def accuracy(preds, labels):
return (preds==labels).mean()
def n_correct(preds, labels):
return (preds==labels).sum()
class ImageClassifier(object):
def __init__(self, net, n_classes):
"""
Args:
net: A pytorch network module that will computer a forward pass
n_classes: number of output classes.
"""
self.history = []
self.n_classes = n_classes
self.net = net
# LOSS FUNCTION
if n_classes <= 2:
# Binary classification
self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None)
else:
# multiclass classification
self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits
#self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs
# OPTIMIZER
self.optimizer = None
|
# %load_ext autoreload
# %autoreload 2
import torch
from torch import nn
from torch.autograd import Variable
def accuracy(preds, labels):
return (preds==labels).mean()
def n_correct(preds, labels):
return (preds==labels).sum()
class ImageClassifier(object):
def __init__(self, net, n_classes):
"""
Args:
net: A pytorch network module that will computer a forward pass
n_classes: number of output classes.
"""
self.history = []
self.n_classes = n_classes
self.net = net
# LOSS FUNCTION
if n_classes <= 2:
# Binary classification
self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None)
else:
# multiclass classification
self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits
#self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs
# OPTIMIZER
self.optimizer = None
def set_optimizer(self, opt_func=torch.optim.Adam, **kwargs):
"""
Args:
opt_func: (function class) the optimization function creator to use
**kwargs: The keyword arguments to pass to opt_func
eg: lr=1e-3, weight_decay=0
"""
self.opt_func = opt_func
self.opt_args = kwargs
self.optimizer = opt_func(self.net.parameters(), **kwargs)
|
Add set_optimizer method to pytorch ImageClassifier class
|
FEAT: Add set_optimizer method to pytorch ImageClassifier class
|
Python
|
apache-2.0
|
ronrest/convenience_py,ronrest/convenience_py
|
# %load_ext autoreload
# %autoreload 2
import torch
from torch import nn
from torch.autograd import Variable
def accuracy(preds, labels):
return (preds==labels).mean()
def n_correct(preds, labels):
return (preds==labels).sum()
class ImageClassifier(object):
def __init__(self, net, n_classes):
"""
Args:
net: A pytorch network module that will computer a forward pass
n_classes: number of output classes.
"""
self.history = []
self.n_classes = n_classes
self.net = net
# LOSS FUNCTION
if n_classes <= 2:
# Binary classification
self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None)
else:
# multiclass classification
self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits
#self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs
# OPTIMIZER
self.optimizer = None
FEAT: Add set_optimizer method to pytorch ImageClassifier class
|
# %load_ext autoreload
# %autoreload 2
import torch
from torch import nn
from torch.autograd import Variable
def accuracy(preds, labels):
return (preds==labels).mean()
def n_correct(preds, labels):
return (preds==labels).sum()
class ImageClassifier(object):
def __init__(self, net, n_classes):
"""
Args:
net: A pytorch network module that will computer a forward pass
n_classes: number of output classes.
"""
self.history = []
self.n_classes = n_classes
self.net = net
# LOSS FUNCTION
if n_classes <= 2:
# Binary classification
self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None)
else:
# multiclass classification
self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits
#self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs
# OPTIMIZER
self.optimizer = None
def set_optimizer(self, opt_func=torch.optim.Adam, **kwargs):
"""
Args:
opt_func: (function class) the optimization function creator to use
**kwargs: The keyword arguments to pass to opt_func
eg: lr=1e-3, weight_decay=0
"""
self.opt_func = opt_func
self.opt_args = kwargs
self.optimizer = opt_func(self.net.parameters(), **kwargs)
|
<commit_before># %load_ext autoreload
# %autoreload 2
import torch
from torch import nn
from torch.autograd import Variable
def accuracy(preds, labels):
return (preds==labels).mean()
def n_correct(preds, labels):
return (preds==labels).sum()
class ImageClassifier(object):
def __init__(self, net, n_classes):
"""
Args:
net: A pytorch network module that will computer a forward pass
n_classes: number of output classes.
"""
self.history = []
self.n_classes = n_classes
self.net = net
# LOSS FUNCTION
if n_classes <= 2:
# Binary classification
self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None)
else:
# multiclass classification
self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits
#self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs
# OPTIMIZER
self.optimizer = None
<commit_msg>FEAT: Add set_optimizer method to pytorch ImageClassifier class<commit_after>
|
# %load_ext autoreload
# %autoreload 2
import torch
from torch import nn
from torch.autograd import Variable
def accuracy(preds, labels):
return (preds==labels).mean()
def n_correct(preds, labels):
return (preds==labels).sum()
class ImageClassifier(object):
def __init__(self, net, n_classes):
"""
Args:
net: A pytorch network module that will computer a forward pass
n_classes: number of output classes.
"""
self.history = []
self.n_classes = n_classes
self.net = net
# LOSS FUNCTION
if n_classes <= 2:
# Binary classification
self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None)
else:
# multiclass classification
self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits
#self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs
# OPTIMIZER
self.optimizer = None
def set_optimizer(self, opt_func=torch.optim.Adam, **kwargs):
"""
Args:
opt_func: (function class) the optimization function creator to use
**kwargs: The keyword arguments to pass to opt_func
eg: lr=1e-3, weight_decay=0
"""
self.opt_func = opt_func
self.opt_args = kwargs
self.optimizer = opt_func(self.net.parameters(), **kwargs)
|
# %load_ext autoreload
# %autoreload 2
import torch
from torch import nn
from torch.autograd import Variable
def accuracy(preds, labels):
return (preds==labels).mean()
def n_correct(preds, labels):
return (preds==labels).sum()
class ImageClassifier(object):
def __init__(self, net, n_classes):
"""
Args:
net: A pytorch network module that will computer a forward pass
n_classes: number of output classes.
"""
self.history = []
self.n_classes = n_classes
self.net = net
# LOSS FUNCTION
if n_classes <= 2:
# Binary classification
self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None)
else:
# multiclass classification
self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits
#self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs
# OPTIMIZER
self.optimizer = None
FEAT: Add set_optimizer method to pytorch ImageClassifier class# %load_ext autoreload
# %autoreload 2
import torch
from torch import nn
from torch.autograd import Variable
def accuracy(preds, labels):
return (preds==labels).mean()
def n_correct(preds, labels):
return (preds==labels).sum()
class ImageClassifier(object):
def __init__(self, net, n_classes):
"""
Args:
net: A pytorch network module that will computer a forward pass
n_classes: number of output classes.
"""
self.history = []
self.n_classes = n_classes
self.net = net
# LOSS FUNCTION
if n_classes <= 2:
# Binary classification
self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None)
else:
# multiclass classification
self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits
#self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs
# OPTIMIZER
self.optimizer = None
def set_optimizer(self, opt_func=torch.optim.Adam, **kwargs):
"""
Args:
opt_func: (function class) the optimization function creator to use
**kwargs: The keyword arguments to pass to opt_func
eg: lr=1e-3, weight_decay=0
"""
self.opt_func = opt_func
self.opt_args = kwargs
self.optimizer = opt_func(self.net.parameters(), **kwargs)
|
<commit_before># %load_ext autoreload
# %autoreload 2
import torch
from torch import nn
from torch.autograd import Variable
def accuracy(preds, labels):
return (preds==labels).mean()
def n_correct(preds, labels):
return (preds==labels).sum()
class ImageClassifier(object):
def __init__(self, net, n_classes):
"""
Args:
net: A pytorch network module that will computer a forward pass
n_classes: number of output classes.
"""
self.history = []
self.n_classes = n_classes
self.net = net
# LOSS FUNCTION
if n_classes <= 2:
# Binary classification
self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None)
else:
# multiclass classification
self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits
#self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs
# OPTIMIZER
self.optimizer = None
<commit_msg>FEAT: Add set_optimizer method to pytorch ImageClassifier class<commit_after># %load_ext autoreload
# %autoreload 2
import torch
from torch import nn
from torch.autograd import Variable
def accuracy(preds, labels):
return (preds==labels).mean()
def n_correct(preds, labels):
return (preds==labels).sum()
class ImageClassifier(object):
def __init__(self, net, n_classes):
"""
Args:
net: A pytorch network module that will computer a forward pass
n_classes: number of output classes.
"""
self.history = []
self.n_classes = n_classes
self.net = net
# LOSS FUNCTION
if n_classes <= 2:
# Binary classification
self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None)
else:
# multiclass classification
self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits
#self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs
# OPTIMIZER
self.optimizer = None
def set_optimizer(self, opt_func=torch.optim.Adam, **kwargs):
"""
Args:
opt_func: (function class) the optimization function creator to use
**kwargs: The keyword arguments to pass to opt_func
eg: lr=1e-3, weight_decay=0
"""
self.opt_func = opt_func
self.opt_args = kwargs
self.optimizer = opt_func(self.net.parameters(), **kwargs)
|
d163644d3c2f0a9f5d08da753e0b97506f6ff6b3
|
rollbar/test/async_helper.py
|
rollbar/test/async_helper.py
|
import asyncio
import inspect
import sys
from rollbar.contrib.asgi import ASGIApp
def run(coro):
if sys.version_info >= (3, 7):
return asyncio.run(coro)
assert inspect.iscoroutine(coro)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return loop.run_until_complete(coro)
finally:
loop.close()
asyncio.set_event_loop(None)
@ASGIApp
class FailingTestASGIApp:
def __call__(self, scope, receive, send):
run(self._asgi_app(scope, receive, send))
async def app(self, scope, receive, send):
raise RuntimeError("Invoked only for testing")
|
import asyncio
import inspect
import sys
from rollbar.contrib.asgi import ASGIApp
def run(coro):
if sys.version_info >= (3, 7):
return asyncio.run(coro)
assert inspect.iscoroutine(coro)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return loop.run_until_complete(coro)
finally:
loop.close()
asyncio.set_event_loop(None)
def async_receive(message):
async def receive():
return message
assert message["type"] == "http.request"
return receive
@ASGIApp
class FailingTestASGIApp:
def __call__(self, scope, receive, send):
run(self._asgi_app(scope, receive, send))
async def app(self, scope, receive, send):
raise RuntimeError("Invoked only for testing")
|
Add wrapper for async receive event handler
|
Add wrapper for async receive event handler
|
Python
|
mit
|
rollbar/pyrollbar
|
import asyncio
import inspect
import sys
from rollbar.contrib.asgi import ASGIApp
def run(coro):
if sys.version_info >= (3, 7):
return asyncio.run(coro)
assert inspect.iscoroutine(coro)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return loop.run_until_complete(coro)
finally:
loop.close()
asyncio.set_event_loop(None)
@ASGIApp
class FailingTestASGIApp:
def __call__(self, scope, receive, send):
run(self._asgi_app(scope, receive, send))
async def app(self, scope, receive, send):
raise RuntimeError("Invoked only for testing")
Add wrapper for async receive event handler
|
import asyncio
import inspect
import sys
from rollbar.contrib.asgi import ASGIApp
def run(coro):
if sys.version_info >= (3, 7):
return asyncio.run(coro)
assert inspect.iscoroutine(coro)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return loop.run_until_complete(coro)
finally:
loop.close()
asyncio.set_event_loop(None)
def async_receive(message):
async def receive():
return message
assert message["type"] == "http.request"
return receive
@ASGIApp
class FailingTestASGIApp:
def __call__(self, scope, receive, send):
run(self._asgi_app(scope, receive, send))
async def app(self, scope, receive, send):
raise RuntimeError("Invoked only for testing")
|
<commit_before>import asyncio
import inspect
import sys
from rollbar.contrib.asgi import ASGIApp
def run(coro):
if sys.version_info >= (3, 7):
return asyncio.run(coro)
assert inspect.iscoroutine(coro)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return loop.run_until_complete(coro)
finally:
loop.close()
asyncio.set_event_loop(None)
@ASGIApp
class FailingTestASGIApp:
def __call__(self, scope, receive, send):
run(self._asgi_app(scope, receive, send))
async def app(self, scope, receive, send):
raise RuntimeError("Invoked only for testing")
<commit_msg>Add wrapper for async receive event handler<commit_after>
|
import asyncio
import inspect
import sys
from rollbar.contrib.asgi import ASGIApp
def run(coro):
if sys.version_info >= (3, 7):
return asyncio.run(coro)
assert inspect.iscoroutine(coro)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return loop.run_until_complete(coro)
finally:
loop.close()
asyncio.set_event_loop(None)
def async_receive(message):
async def receive():
return message
assert message["type"] == "http.request"
return receive
@ASGIApp
class FailingTestASGIApp:
def __call__(self, scope, receive, send):
run(self._asgi_app(scope, receive, send))
async def app(self, scope, receive, send):
raise RuntimeError("Invoked only for testing")
|
import asyncio
import inspect
import sys
from rollbar.contrib.asgi import ASGIApp
def run(coro):
if sys.version_info >= (3, 7):
return asyncio.run(coro)
assert inspect.iscoroutine(coro)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return loop.run_until_complete(coro)
finally:
loop.close()
asyncio.set_event_loop(None)
@ASGIApp
class FailingTestASGIApp:
def __call__(self, scope, receive, send):
run(self._asgi_app(scope, receive, send))
async def app(self, scope, receive, send):
raise RuntimeError("Invoked only for testing")
Add wrapper for async receive event handlerimport asyncio
import inspect
import sys
from rollbar.contrib.asgi import ASGIApp
def run(coro):
if sys.version_info >= (3, 7):
return asyncio.run(coro)
assert inspect.iscoroutine(coro)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return loop.run_until_complete(coro)
finally:
loop.close()
asyncio.set_event_loop(None)
def async_receive(message):
async def receive():
return message
assert message["type"] == "http.request"
return receive
@ASGIApp
class FailingTestASGIApp:
def __call__(self, scope, receive, send):
run(self._asgi_app(scope, receive, send))
async def app(self, scope, receive, send):
raise RuntimeError("Invoked only for testing")
|
<commit_before>import asyncio
import inspect
import sys
from rollbar.contrib.asgi import ASGIApp
def run(coro):
if sys.version_info >= (3, 7):
return asyncio.run(coro)
assert inspect.iscoroutine(coro)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return loop.run_until_complete(coro)
finally:
loop.close()
asyncio.set_event_loop(None)
@ASGIApp
class FailingTestASGIApp:
def __call__(self, scope, receive, send):
run(self._asgi_app(scope, receive, send))
async def app(self, scope, receive, send):
raise RuntimeError("Invoked only for testing")
<commit_msg>Add wrapper for async receive event handler<commit_after>import asyncio
import inspect
import sys
from rollbar.contrib.asgi import ASGIApp
def run(coro):
if sys.version_info >= (3, 7):
return asyncio.run(coro)
assert inspect.iscoroutine(coro)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return loop.run_until_complete(coro)
finally:
loop.close()
asyncio.set_event_loop(None)
def async_receive(message):
async def receive():
return message
assert message["type"] == "http.request"
return receive
@ASGIApp
class FailingTestASGIApp:
def __call__(self, scope, receive, send):
run(self._asgi_app(scope, receive, send))
async def app(self, scope, receive, send):
raise RuntimeError("Invoked only for testing")
|
4d19cc36e866c8e21a526cd228f170ffd177292b
|
run_ctest.py
|
run_ctest.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import platform
import os
import subprocess
import sys
if platform.system() == "Windows":
import distutils.msvc9compiler as msvc
if __name__ == "__main__":
CITOOLS_PATH = os.path.join(os.getcwd(), "ci-tools")
CMAKE_PATH = os.path.join(CITOOLS_PATH, "cmake")
if platform.system() == "Linux":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Windows":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+";"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if len(sys.argv) > 1:
msvc.find_vcvarsall = lambda _: sys.argv[1]
envs = msvc.query_vcvarsall(sys.argv[2])
for k,v in envs.items():
k = k.upper()
v = ":".join(subprocess.check_output(["cygpath","-u",p]).rstrip() for p in v.split(";"))
v = v.replace("'\''",r"'\'\\\'\''")
print "export %(k)s='\''%(v)s'\''" % locals()
elif platform.system() == "Darwin":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "CMake.app", "Contents", "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if subprocess.call("ctest -VV -S ci-tools/run_ctest.cmake", shell=True) != 0:
raise Exception("CTest returned an error.")
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import platform
import os
import subprocess
if __name__ == "__main__":
CITOOLS_PATH = os.path.join(os.getcwd(), "ci-tools")
CMAKE_PATH = os.path.join(CITOOLS_PATH, "cmake")
if platform.system() == "Linux":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Windows":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+";"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Darwin":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "CMake.app", "Contents", "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if subprocess.call("ctest -VV -S ci-tools/run_ctest.cmake", shell=True) != 0:
raise Exception("CTest returned an error.")
|
Test travisci windows build workaround. (3)
|
Test travisci windows build workaround. (3)
|
Python
|
unlicense
|
h-s-c/ci-tools
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import platform
import os
import subprocess
import sys
if platform.system() == "Windows":
import distutils.msvc9compiler as msvc
if __name__ == "__main__":
CITOOLS_PATH = os.path.join(os.getcwd(), "ci-tools")
CMAKE_PATH = os.path.join(CITOOLS_PATH, "cmake")
if platform.system() == "Linux":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Windows":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+";"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if len(sys.argv) > 1:
msvc.find_vcvarsall = lambda _: sys.argv[1]
envs = msvc.query_vcvarsall(sys.argv[2])
for k,v in envs.items():
k = k.upper()
v = ":".join(subprocess.check_output(["cygpath","-u",p]).rstrip() for p in v.split(";"))
v = v.replace("'\''",r"'\'\\\'\''")
print "export %(k)s='\''%(v)s'\''" % locals()
elif platform.system() == "Darwin":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "CMake.app", "Contents", "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if subprocess.call("ctest -VV -S ci-tools/run_ctest.cmake", shell=True) != 0:
raise Exception("CTest returned an error.")
Test travisci windows build workaround. (3)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import platform
import os
import subprocess
if __name__ == "__main__":
CITOOLS_PATH = os.path.join(os.getcwd(), "ci-tools")
CMAKE_PATH = os.path.join(CITOOLS_PATH, "cmake")
if platform.system() == "Linux":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Windows":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+";"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Darwin":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "CMake.app", "Contents", "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if subprocess.call("ctest -VV -S ci-tools/run_ctest.cmake", shell=True) != 0:
raise Exception("CTest returned an error.")
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import platform
import os
import subprocess
import sys
if platform.system() == "Windows":
import distutils.msvc9compiler as msvc
if __name__ == "__main__":
CITOOLS_PATH = os.path.join(os.getcwd(), "ci-tools")
CMAKE_PATH = os.path.join(CITOOLS_PATH, "cmake")
if platform.system() == "Linux":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Windows":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+";"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if len(sys.argv) > 1:
msvc.find_vcvarsall = lambda _: sys.argv[1]
envs = msvc.query_vcvarsall(sys.argv[2])
for k,v in envs.items():
k = k.upper()
v = ":".join(subprocess.check_output(["cygpath","-u",p]).rstrip() for p in v.split(";"))
v = v.replace("'\''",r"'\'\\\'\''")
print "export %(k)s='\''%(v)s'\''" % locals()
elif platform.system() == "Darwin":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "CMake.app", "Contents", "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if subprocess.call("ctest -VV -S ci-tools/run_ctest.cmake", shell=True) != 0:
raise Exception("CTest returned an error.")
<commit_msg>Test travisci windows build workaround. (3)<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import platform
import os
import subprocess
if __name__ == "__main__":
CITOOLS_PATH = os.path.join(os.getcwd(), "ci-tools")
CMAKE_PATH = os.path.join(CITOOLS_PATH, "cmake")
if platform.system() == "Linux":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Windows":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+";"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Darwin":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "CMake.app", "Contents", "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if subprocess.call("ctest -VV -S ci-tools/run_ctest.cmake", shell=True) != 0:
raise Exception("CTest returned an error.")
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import platform
import os
import subprocess
import sys
if platform.system() == "Windows":
import distutils.msvc9compiler as msvc
if __name__ == "__main__":
CITOOLS_PATH = os.path.join(os.getcwd(), "ci-tools")
CMAKE_PATH = os.path.join(CITOOLS_PATH, "cmake")
if platform.system() == "Linux":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Windows":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+";"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if len(sys.argv) > 1:
msvc.find_vcvarsall = lambda _: sys.argv[1]
envs = msvc.query_vcvarsall(sys.argv[2])
for k,v in envs.items():
k = k.upper()
v = ":".join(subprocess.check_output(["cygpath","-u",p]).rstrip() for p in v.split(";"))
v = v.replace("'\''",r"'\'\\\'\''")
print "export %(k)s='\''%(v)s'\''" % locals()
elif platform.system() == "Darwin":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "CMake.app", "Contents", "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if subprocess.call("ctest -VV -S ci-tools/run_ctest.cmake", shell=True) != 0:
raise Exception("CTest returned an error.")
Test travisci windows build workaround. (3)#!/usr/bin/python
# -*- coding: utf-8 -*-
import platform
import os
import subprocess
if __name__ == "__main__":
CITOOLS_PATH = os.path.join(os.getcwd(), "ci-tools")
CMAKE_PATH = os.path.join(CITOOLS_PATH, "cmake")
if platform.system() == "Linux":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Windows":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+";"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Darwin":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "CMake.app", "Contents", "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if subprocess.call("ctest -VV -S ci-tools/run_ctest.cmake", shell=True) != 0:
raise Exception("CTest returned an error.")
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import platform
import os
import subprocess
import sys
if platform.system() == "Windows":
import distutils.msvc9compiler as msvc
if __name__ == "__main__":
CITOOLS_PATH = os.path.join(os.getcwd(), "ci-tools")
CMAKE_PATH = os.path.join(CITOOLS_PATH, "cmake")
if platform.system() == "Linux":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Windows":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+";"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if len(sys.argv) > 1:
msvc.find_vcvarsall = lambda _: sys.argv[1]
envs = msvc.query_vcvarsall(sys.argv[2])
for k,v in envs.items():
k = k.upper()
v = ":".join(subprocess.check_output(["cygpath","-u",p]).rstrip() for p in v.split(";"))
v = v.replace("'\''",r"'\'\\\'\''")
print "export %(k)s='\''%(v)s'\''" % locals()
elif platform.system() == "Darwin":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "CMake.app", "Contents", "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if subprocess.call("ctest -VV -S ci-tools/run_ctest.cmake", shell=True) != 0:
raise Exception("CTest returned an error.")
<commit_msg>Test travisci windows build workaround. (3)<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
import platform
import os
import subprocess
if __name__ == "__main__":
CITOOLS_PATH = os.path.join(os.getcwd(), "ci-tools")
CMAKE_PATH = os.path.join(CITOOLS_PATH, "cmake")
if platform.system() == "Linux":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Windows":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "bin")+";"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
elif platform.system() == "Darwin":
os.environ["PATH"] = os.path.join(CMAKE_PATH, "CMake.app", "Contents", "bin")+":"+os.environ.get("PATH", os.path.join(CMAKE_PATH, "bin"))
if subprocess.call("ctest -VV -S ci-tools/run_ctest.cmake", shell=True) != 0:
raise Exception("CTest returned an error.")
|
ed7308df6fc324581482d7508394a34a35cbf65c
|
xorshift/__init__.py
|
xorshift/__init__.py
|
from operator import mul
import xorshift.xorgen
def _compute_n_elements(shape):
if shape is None:
shape = (1,)
try:
nelts = reduce(mul, shape)
except TypeError:
nelts = int(shape)
shape = (shape, )
return nelts, shape
class _Generator(object):
def uniform(self, low=0.0, high=1.0, size=None):
nelts, size = _compute_n_elements(size)
rvs = self.rng.uniform(nelts)
if (high - low) != 1.0:
rvs *= (high - low)
if low != 0.0:
rvs += low
return rvs.reshape(size)
def binomial(self, N, p, size=None):
nelts, size = _compute_n_elements(size)
return self.rng.binomial(N, p, nelts).reshape(size)
class Xoroshiro(_Generator):
def __init__(self, seed=None):
self.rng = xorgen.Xoroshiro(seed)
class Xorshift128plus(_Generator):
def __init__(self, seed=None):
self.rng = xorgen.Xorshift128plus(seed)
__all__ = [Xoroshiro, Xorshift128plus]
|
from operator import mul
import xorshift.xorgen
def _compute_n_elements(shape):
if shape is None:
shape = (1,)
try:
nelts = reduce(mul, shape)
except TypeError:
nelts = int(shape)
shape = (shape, )
return nelts, shape
class _Generator(object):
def uniform(self, low=0.0, high=1.0, size=None):
nelts, size = _compute_n_elements(size)
rvs = self.rng.uniform(nelts)
if (high - low) != 1.0:
rvs *= (high - low)
if low != 0.0:
rvs += low
rvs = rvs.reshape(size)
if self.copy:
return np.copy(rvs)
else:
return rvs
def binomial(self, N, p, size=None, copy=False):
nelts, size = _compute_n_elements(size)
rvs = self.rng.binomial(N, p, nelts).reshape(size)
if self.copy:
return np.copy(rvs)
else:
return rvs
class Xoroshiro(_Generator):
def __init__(self, seed=None, copy=True):
self.copy = copy
self.rng = xorgen.Xoroshiro(seed)
class Xorshift128plus(_Generator):
def __init__(self, seed=None, copy=True):
self.copy = copy
self.rng = xorgen.Xorshift128plus(seed)
__all__ = [Xoroshiro, Xorshift128plus]
|
Add optional copy on return
|
Add optional copy on return
|
Python
|
mit
|
ihaque/xorshift,ihaque/xorshift
|
from operator import mul
import xorshift.xorgen
def _compute_n_elements(shape):
if shape is None:
shape = (1,)
try:
nelts = reduce(mul, shape)
except TypeError:
nelts = int(shape)
shape = (shape, )
return nelts, shape
class _Generator(object):
def uniform(self, low=0.0, high=1.0, size=None):
nelts, size = _compute_n_elements(size)
rvs = self.rng.uniform(nelts)
if (high - low) != 1.0:
rvs *= (high - low)
if low != 0.0:
rvs += low
return rvs.reshape(size)
def binomial(self, N, p, size=None):
nelts, size = _compute_n_elements(size)
return self.rng.binomial(N, p, nelts).reshape(size)
class Xoroshiro(_Generator):
def __init__(self, seed=None):
self.rng = xorgen.Xoroshiro(seed)
class Xorshift128plus(_Generator):
def __init__(self, seed=None):
self.rng = xorgen.Xorshift128plus(seed)
__all__ = [Xoroshiro, Xorshift128plus]
Add optional copy on return
|
from operator import mul
import xorshift.xorgen
def _compute_n_elements(shape):
if shape is None:
shape = (1,)
try:
nelts = reduce(mul, shape)
except TypeError:
nelts = int(shape)
shape = (shape, )
return nelts, shape
class _Generator(object):
def uniform(self, low=0.0, high=1.0, size=None):
nelts, size = _compute_n_elements(size)
rvs = self.rng.uniform(nelts)
if (high - low) != 1.0:
rvs *= (high - low)
if low != 0.0:
rvs += low
rvs = rvs.reshape(size)
if self.copy:
return np.copy(rvs)
else:
return rvs
def binomial(self, N, p, size=None, copy=False):
nelts, size = _compute_n_elements(size)
rvs = self.rng.binomial(N, p, nelts).reshape(size)
if self.copy:
return np.copy(rvs)
else:
return rvs
class Xoroshiro(_Generator):
def __init__(self, seed=None, copy=True):
self.copy = copy
self.rng = xorgen.Xoroshiro(seed)
class Xorshift128plus(_Generator):
def __init__(self, seed=None, copy=True):
self.copy = copy
self.rng = xorgen.Xorshift128plus(seed)
__all__ = [Xoroshiro, Xorshift128plus]
|
<commit_before>from operator import mul
import xorshift.xorgen
def _compute_n_elements(shape):
if shape is None:
shape = (1,)
try:
nelts = reduce(mul, shape)
except TypeError:
nelts = int(shape)
shape = (shape, )
return nelts, shape
class _Generator(object):
def uniform(self, low=0.0, high=1.0, size=None):
nelts, size = _compute_n_elements(size)
rvs = self.rng.uniform(nelts)
if (high - low) != 1.0:
rvs *= (high - low)
if low != 0.0:
rvs += low
return rvs.reshape(size)
def binomial(self, N, p, size=None):
nelts, size = _compute_n_elements(size)
return self.rng.binomial(N, p, nelts).reshape(size)
class Xoroshiro(_Generator):
def __init__(self, seed=None):
self.rng = xorgen.Xoroshiro(seed)
class Xorshift128plus(_Generator):
def __init__(self, seed=None):
self.rng = xorgen.Xorshift128plus(seed)
__all__ = [Xoroshiro, Xorshift128plus]
<commit_msg>Add optional copy on return<commit_after>
|
from operator import mul
import xorshift.xorgen
def _compute_n_elements(shape):
if shape is None:
shape = (1,)
try:
nelts = reduce(mul, shape)
except TypeError:
nelts = int(shape)
shape = (shape, )
return nelts, shape
class _Generator(object):
def uniform(self, low=0.0, high=1.0, size=None):
nelts, size = _compute_n_elements(size)
rvs = self.rng.uniform(nelts)
if (high - low) != 1.0:
rvs *= (high - low)
if low != 0.0:
rvs += low
rvs = rvs.reshape(size)
if self.copy:
return np.copy(rvs)
else:
return rvs
def binomial(self, N, p, size=None, copy=False):
nelts, size = _compute_n_elements(size)
rvs = self.rng.binomial(N, p, nelts).reshape(size)
if self.copy:
return np.copy(rvs)
else:
return rvs
class Xoroshiro(_Generator):
def __init__(self, seed=None, copy=True):
self.copy = copy
self.rng = xorgen.Xoroshiro(seed)
class Xorshift128plus(_Generator):
def __init__(self, seed=None, copy=True):
self.copy = copy
self.rng = xorgen.Xorshift128plus(seed)
__all__ = [Xoroshiro, Xorshift128plus]
|
from operator import mul
import xorshift.xorgen
def _compute_n_elements(shape):
if shape is None:
shape = (1,)
try:
nelts = reduce(mul, shape)
except TypeError:
nelts = int(shape)
shape = (shape, )
return nelts, shape
class _Generator(object):
def uniform(self, low=0.0, high=1.0, size=None):
nelts, size = _compute_n_elements(size)
rvs = self.rng.uniform(nelts)
if (high - low) != 1.0:
rvs *= (high - low)
if low != 0.0:
rvs += low
return rvs.reshape(size)
def binomial(self, N, p, size=None):
nelts, size = _compute_n_elements(size)
return self.rng.binomial(N, p, nelts).reshape(size)
class Xoroshiro(_Generator):
def __init__(self, seed=None):
self.rng = xorgen.Xoroshiro(seed)
class Xorshift128plus(_Generator):
def __init__(self, seed=None):
self.rng = xorgen.Xorshift128plus(seed)
__all__ = [Xoroshiro, Xorshift128plus]
Add optional copy on returnfrom operator import mul
import xorshift.xorgen
def _compute_n_elements(shape):
if shape is None:
shape = (1,)
try:
nelts = reduce(mul, shape)
except TypeError:
nelts = int(shape)
shape = (shape, )
return nelts, shape
class _Generator(object):
def uniform(self, low=0.0, high=1.0, size=None):
nelts, size = _compute_n_elements(size)
rvs = self.rng.uniform(nelts)
if (high - low) != 1.0:
rvs *= (high - low)
if low != 0.0:
rvs += low
rvs = rvs.reshape(size)
if self.copy:
return np.copy(rvs)
else:
return rvs
def binomial(self, N, p, size=None, copy=False):
nelts, size = _compute_n_elements(size)
rvs = self.rng.binomial(N, p, nelts).reshape(size)
if self.copy:
return np.copy(rvs)
else:
return rvs
class Xoroshiro(_Generator):
def __init__(self, seed=None, copy=True):
self.copy = copy
self.rng = xorgen.Xoroshiro(seed)
class Xorshift128plus(_Generator):
def __init__(self, seed=None, copy=True):
self.copy = copy
self.rng = xorgen.Xorshift128plus(seed)
__all__ = [Xoroshiro, Xorshift128plus]
|
<commit_before>from operator import mul
import xorshift.xorgen
def _compute_n_elements(shape):
if shape is None:
shape = (1,)
try:
nelts = reduce(mul, shape)
except TypeError:
nelts = int(shape)
shape = (shape, )
return nelts, shape
class _Generator(object):
def uniform(self, low=0.0, high=1.0, size=None):
nelts, size = _compute_n_elements(size)
rvs = self.rng.uniform(nelts)
if (high - low) != 1.0:
rvs *= (high - low)
if low != 0.0:
rvs += low
return rvs.reshape(size)
def binomial(self, N, p, size=None):
nelts, size = _compute_n_elements(size)
return self.rng.binomial(N, p, nelts).reshape(size)
class Xoroshiro(_Generator):
def __init__(self, seed=None):
self.rng = xorgen.Xoroshiro(seed)
class Xorshift128plus(_Generator):
def __init__(self, seed=None):
self.rng = xorgen.Xorshift128plus(seed)
__all__ = [Xoroshiro, Xorshift128plus]
<commit_msg>Add optional copy on return<commit_after>from operator import mul
import xorshift.xorgen
def _compute_n_elements(shape):
if shape is None:
shape = (1,)
try:
nelts = reduce(mul, shape)
except TypeError:
nelts = int(shape)
shape = (shape, )
return nelts, shape
class _Generator(object):
def uniform(self, low=0.0, high=1.0, size=None):
nelts, size = _compute_n_elements(size)
rvs = self.rng.uniform(nelts)
if (high - low) != 1.0:
rvs *= (high - low)
if low != 0.0:
rvs += low
rvs = rvs.reshape(size)
if self.copy:
return np.copy(rvs)
else:
return rvs
def binomial(self, N, p, size=None, copy=False):
nelts, size = _compute_n_elements(size)
rvs = self.rng.binomial(N, p, nelts).reshape(size)
if self.copy:
return np.copy(rvs)
else:
return rvs
class Xoroshiro(_Generator):
def __init__(self, seed=None, copy=True):
self.copy = copy
self.rng = xorgen.Xoroshiro(seed)
class Xorshift128plus(_Generator):
def __init__(self, seed=None, copy=True):
self.copy = copy
self.rng = xorgen.Xorshift128plus(seed)
__all__ = [Xoroshiro, Xorshift128plus]
|
580dfb9de6d03ca7663e9d2708cd69e2cce7b2a6
|
falmer/content/serializers.py
|
falmer/content/serializers.py
|
from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
wagtail_image = serializers.SerializerMethodField()
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
fields = ('id', 'wagtail_image', 'resource')
def get_wagtail_image(self, image):
return generate_image_url(image, 'fill-400x400')
def get_resource(self, image):
return image.file.path
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
|
from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
wagtail_image = serializers.SerializerMethodField()
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
fields = ('id', 'wagtail_image', 'resource')
def get_wagtail_image(self, image):
return generate_image_url(image, 'fill-400x400')
def get_resource(self, image):
return image.file.name
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
|
Use name over path for s3 backend support
|
Use name over path for s3 backend support
|
Python
|
mit
|
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
|
from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
wagtail_image = serializers.SerializerMethodField()
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
fields = ('id', 'wagtail_image', 'resource')
def get_wagtail_image(self, image):
return generate_image_url(image, 'fill-400x400')
def get_resource(self, image):
return image.file.path
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
Use name over path for s3 backend support
|
from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
wagtail_image = serializers.SerializerMethodField()
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
fields = ('id', 'wagtail_image', 'resource')
def get_wagtail_image(self, image):
return generate_image_url(image, 'fill-400x400')
def get_resource(self, image):
return image.file.name
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
|
<commit_before>from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
wagtail_image = serializers.SerializerMethodField()
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
fields = ('id', 'wagtail_image', 'resource')
def get_wagtail_image(self, image):
return generate_image_url(image, 'fill-400x400')
def get_resource(self, image):
return image.file.path
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
<commit_msg>Use name over path for s3 backend support<commit_after>
|
from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
wagtail_image = serializers.SerializerMethodField()
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
fields = ('id', 'wagtail_image', 'resource')
def get_wagtail_image(self, image):
return generate_image_url(image, 'fill-400x400')
def get_resource(self, image):
return image.file.name
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
|
from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
wagtail_image = serializers.SerializerMethodField()
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
fields = ('id', 'wagtail_image', 'resource')
def get_wagtail_image(self, image):
return generate_image_url(image, 'fill-400x400')
def get_resource(self, image):
return image.file.path
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
Use name over path for s3 backend supportfrom django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
wagtail_image = serializers.SerializerMethodField()
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
fields = ('id', 'wagtail_image', 'resource')
def get_wagtail_image(self, image):
return generate_image_url(image, 'fill-400x400')
def get_resource(self, image):
return image.file.name
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
|
<commit_before>from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
wagtail_image = serializers.SerializerMethodField()
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
fields = ('id', 'wagtail_image', 'resource')
def get_wagtail_image(self, image):
return generate_image_url(image, 'fill-400x400')
def get_resource(self, image):
return image.file.path
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
<commit_msg>Use name over path for s3 backend support<commit_after>from django.conf import settings
from django.urls import reverse
from rest_framework import serializers
from falmer.content.models import StaffMemberSnippet
from falmer.matte.models import MatteImage
def generate_image_url(image, filter_spec):
from wagtail.wagtailimages.views.serve import generate_signature
signature = generate_signature(image.id, filter_spec)
url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec))
# Append image's original filename to the URL (optional)
# url += image.file.name[len('original_images/'):]
return settings.PUBLIC_HOST + url
class WagtailImageSerializer(serializers.ModelSerializer):
wagtail_image = serializers.SerializerMethodField()
resource = serializers.SerializerMethodField()
class Meta:
model = MatteImage
fields = ('id', 'wagtail_image', 'resource')
def get_wagtail_image(self, image):
return generate_image_url(image, 'fill-400x400')
def get_resource(self, image):
return image.file.name
class SnippetSerializer(serializers.ModelSerializer):
photo = WagtailImageSerializer()
class Meta:
model = StaffMemberSnippet
fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
|
f3974375e2c71c9c9bfba6fde356014a07e0b704
|
ee_plugin/ee_auth.py
|
ee_plugin/ee_auth.py
|
# -*- coding: utf-8 -*-
"""
Init and user authentication in Earth Engine
"""
import webbrowser
from qgis.PyQt.QtWidgets import QInputDialog
import ee
def init():
try:
ee.Initialize()
except ee.ee_exception.EEException:
authenticate()
ee.Initialize() # retry initialization once the user logs in
def authenticate():
auth_url = ee.oauth.get_authorization_url()
webbrowser.open_new(auth_url)
print('\nEarth Engine Authentication:\n'
'If the web browser does not start automatically, '
'please manually browse the URL below:\n"{}"'.format(auth_url))
token, ok = QInputDialog.getText(None, 'Earth Engine Authentication',
'To authorize access needed by Earth Engine, follow the\n'
'instructions and paste the token here:\n\n'
'(If the web browser does not start automatically\n'
'see the python shell).')
if ok and token:
ee.oauth._obtain_and_write_token(token.strip())
|
# -*- coding: utf-8 -*-
"""
Init and user authentication in Earth Engine
"""
import webbrowser
from qgis.PyQt.QtWidgets import QInputDialog
import ee
import logging
# fix the warnings/errors messages from 'file_cache is unavailable when using oauth2client'
# https://github.com/googleapis/google-api-python-client/issues/299
logging.getLogger('googleapiclient.discovery_cache').setLevel(logging.ERROR)
def init():
try:
ee.Initialize()
except ee.ee_exception.EEException:
authenticate()
ee.Initialize() # retry initialization once the user logs in
def authenticate():
auth_url = ee.oauth.get_authorization_url()
webbrowser.open_new(auth_url)
print('\nEarth Engine Authentication:\n'
'If the web browser does not start automatically, '
'please manually browse the URL below:\n"{}"'.format(auth_url))
token, ok = QInputDialog.getText(None, 'Earth Engine Authentication',
'To authorize access needed by Earth Engine, follow the\n'
'instructions and paste the token here:\n\n'
'(If the web browser does not start automatically\n'
'see the python shell).')
if ok and token:
ee.oauth._obtain_and_write_token(token.strip())
|
Fix the warnings/errors messages from 'file_cache is unavailable when using oauth2client'
|
Fix the warnings/errors messages from 'file_cache is unavailable when using oauth2client'
|
Python
|
mit
|
gena/qgis-earthengine-plugin,gena/qgis-earthengine-plugin
|
# -*- coding: utf-8 -*-
"""
Init and user authentication in Earth Engine
"""
import webbrowser
from qgis.PyQt.QtWidgets import QInputDialog
import ee
def init():
try:
ee.Initialize()
except ee.ee_exception.EEException:
authenticate()
ee.Initialize() # retry initialization once the user logs in
def authenticate():
auth_url = ee.oauth.get_authorization_url()
webbrowser.open_new(auth_url)
print('\nEarth Engine Authentication:\n'
'If the web browser does not start automatically, '
'please manually browse the URL below:\n"{}"'.format(auth_url))
token, ok = QInputDialog.getText(None, 'Earth Engine Authentication',
'To authorize access needed by Earth Engine, follow the\n'
'instructions and paste the token here:\n\n'
'(If the web browser does not start automatically\n'
'see the python shell).')
if ok and token:
ee.oauth._obtain_and_write_token(token.strip())
Fix the warnings/errors messages from 'file_cache is unavailable when using oauth2client'
|
# -*- coding: utf-8 -*-
"""
Init and user authentication in Earth Engine
"""
import webbrowser
from qgis.PyQt.QtWidgets import QInputDialog
import ee
import logging
# fix the warnings/errors messages from 'file_cache is unavailable when using oauth2client'
# https://github.com/googleapis/google-api-python-client/issues/299
logging.getLogger('googleapiclient.discovery_cache').setLevel(logging.ERROR)
def init():
try:
ee.Initialize()
except ee.ee_exception.EEException:
authenticate()
ee.Initialize() # retry initialization once the user logs in
def authenticate():
auth_url = ee.oauth.get_authorization_url()
webbrowser.open_new(auth_url)
print('\nEarth Engine Authentication:\n'
'If the web browser does not start automatically, '
'please manually browse the URL below:\n"{}"'.format(auth_url))
token, ok = QInputDialog.getText(None, 'Earth Engine Authentication',
'To authorize access needed by Earth Engine, follow the\n'
'instructions and paste the token here:\n\n'
'(If the web browser does not start automatically\n'
'see the python shell).')
if ok and token:
ee.oauth._obtain_and_write_token(token.strip())
|
<commit_before># -*- coding: utf-8 -*-
"""
Init and user authentication in Earth Engine
"""
import webbrowser
from qgis.PyQt.QtWidgets import QInputDialog
import ee
def init():
try:
ee.Initialize()
except ee.ee_exception.EEException:
authenticate()
ee.Initialize() # retry initialization once the user logs in
def authenticate():
auth_url = ee.oauth.get_authorization_url()
webbrowser.open_new(auth_url)
print('\nEarth Engine Authentication:\n'
'If the web browser does not start automatically, '
'please manually browse the URL below:\n"{}"'.format(auth_url))
token, ok = QInputDialog.getText(None, 'Earth Engine Authentication',
'To authorize access needed by Earth Engine, follow the\n'
'instructions and paste the token here:\n\n'
'(If the web browser does not start automatically\n'
'see the python shell).')
if ok and token:
ee.oauth._obtain_and_write_token(token.strip())
<commit_msg>Fix the warnings/errors messages from 'file_cache is unavailable when using oauth2client'<commit_after>
|
# -*- coding: utf-8 -*-
"""
Init and user authentication in Earth Engine
"""
import webbrowser
from qgis.PyQt.QtWidgets import QInputDialog
import ee
import logging
# fix the warnings/errors messages from 'file_cache is unavailable when using oauth2client'
# https://github.com/googleapis/google-api-python-client/issues/299
logging.getLogger('googleapiclient.discovery_cache').setLevel(logging.ERROR)
def init():
try:
ee.Initialize()
except ee.ee_exception.EEException:
authenticate()
ee.Initialize() # retry initialization once the user logs in
def authenticate():
auth_url = ee.oauth.get_authorization_url()
webbrowser.open_new(auth_url)
print('\nEarth Engine Authentication:\n'
'If the web browser does not start automatically, '
'please manually browse the URL below:\n"{}"'.format(auth_url))
token, ok = QInputDialog.getText(None, 'Earth Engine Authentication',
'To authorize access needed by Earth Engine, follow the\n'
'instructions and paste the token here:\n\n'
'(If the web browser does not start automatically\n'
'see the python shell).')
if ok and token:
ee.oauth._obtain_and_write_token(token.strip())
|
# -*- coding: utf-8 -*-
"""
Init and user authentication in Earth Engine
"""
import webbrowser
from qgis.PyQt.QtWidgets import QInputDialog
import ee
def init():
try:
ee.Initialize()
except ee.ee_exception.EEException:
authenticate()
ee.Initialize() # retry initialization once the user logs in
def authenticate():
auth_url = ee.oauth.get_authorization_url()
webbrowser.open_new(auth_url)
print('\nEarth Engine Authentication:\n'
'If the web browser does not start automatically, '
'please manually browse the URL below:\n"{}"'.format(auth_url))
token, ok = QInputDialog.getText(None, 'Earth Engine Authentication',
'To authorize access needed by Earth Engine, follow the\n'
'instructions and paste the token here:\n\n'
'(If the web browser does not start automatically\n'
'see the python shell).')
if ok and token:
ee.oauth._obtain_and_write_token(token.strip())
Fix the warnings/errors messages from 'file_cache is unavailable when using oauth2client'# -*- coding: utf-8 -*-
"""
Init and user authentication in Earth Engine
"""
import webbrowser
from qgis.PyQt.QtWidgets import QInputDialog
import ee
import logging
# fix the warnings/errors messages from 'file_cache is unavailable when using oauth2client'
# https://github.com/googleapis/google-api-python-client/issues/299
logging.getLogger('googleapiclient.discovery_cache').setLevel(logging.ERROR)
def init():
try:
ee.Initialize()
except ee.ee_exception.EEException:
authenticate()
ee.Initialize() # retry initialization once the user logs in
def authenticate():
auth_url = ee.oauth.get_authorization_url()
webbrowser.open_new(auth_url)
print('\nEarth Engine Authentication:\n'
'If the web browser does not start automatically, '
'please manually browse the URL below:\n"{}"'.format(auth_url))
token, ok = QInputDialog.getText(None, 'Earth Engine Authentication',
'To authorize access needed by Earth Engine, follow the\n'
'instructions and paste the token here:\n\n'
'(If the web browser does not start automatically\n'
'see the python shell).')
if ok and token:
ee.oauth._obtain_and_write_token(token.strip())
|
<commit_before># -*- coding: utf-8 -*-
"""
Init and user authentication in Earth Engine
"""
import webbrowser
from qgis.PyQt.QtWidgets import QInputDialog
import ee
def init():
try:
ee.Initialize()
except ee.ee_exception.EEException:
authenticate()
ee.Initialize() # retry initialization once the user logs in
def authenticate():
auth_url = ee.oauth.get_authorization_url()
webbrowser.open_new(auth_url)
print('\nEarth Engine Authentication:\n'
'If the web browser does not start automatically, '
'please manually browse the URL below:\n"{}"'.format(auth_url))
token, ok = QInputDialog.getText(None, 'Earth Engine Authentication',
'To authorize access needed by Earth Engine, follow the\n'
'instructions and paste the token here:\n\n'
'(If the web browser does not start automatically\n'
'see the python shell).')
if ok and token:
ee.oauth._obtain_and_write_token(token.strip())
<commit_msg>Fix the warnings/errors messages from 'file_cache is unavailable when using oauth2client'<commit_after># -*- coding: utf-8 -*-
"""
Init and user authentication in Earth Engine
"""
import webbrowser
from qgis.PyQt.QtWidgets import QInputDialog
import ee
import logging
# fix the warnings/errors messages from 'file_cache is unavailable when using oauth2client'
# https://github.com/googleapis/google-api-python-client/issues/299
logging.getLogger('googleapiclient.discovery_cache').setLevel(logging.ERROR)
def init():
try:
ee.Initialize()
except ee.ee_exception.EEException:
authenticate()
ee.Initialize() # retry initialization once the user logs in
def authenticate():
auth_url = ee.oauth.get_authorization_url()
webbrowser.open_new(auth_url)
print('\nEarth Engine Authentication:\n'
'If the web browser does not start automatically, '
'please manually browse the URL below:\n"{}"'.format(auth_url))
token, ok = QInputDialog.getText(None, 'Earth Engine Authentication',
'To authorize access needed by Earth Engine, follow the\n'
'instructions and paste the token here:\n\n'
'(If the web browser does not start automatically\n'
'see the python shell).')
if ok and token:
ee.oauth._obtain_and_write_token(token.strip())
|
34641c012d740d38e7ce8ef9619722177665da3f
|
fireplace/cards/tgt/shaman.py
|
fireplace/cards/tgt/shaman.py
|
from ..utils import *
##
# Hero Powers
# Lightning Jolt
class AT_050t:
play = Hit(TARGET, 2)
##
# Minions
# Tuskarr Totemic
class AT_046:
play = Summon(CONTROLLER, RandomTotem())
# Draenei Totemcarver
class AT_047:
play = Buff(SELF, "AT_047e") * Count(FRIENDLY_MINIONS + TOTEM)
# Thunder Bluff Valiant
class AT_049:
inspire = Buff(FRIENDLY_MINIONS + TOTEM, "AT_049e")
# The Mistcaller
class AT_054:
# The Enchantment ID is correct
play = Buff(FRIENDLY + (IN_DECK | IN_HAND), "AT_045e")
##
# Spells
# Healing Wave
class AT_048:
play = JOUST & Heal(TARGET, 7) | Heal(TARGET, 14)
# Elemental Destruction
class AT_051:
play = Hit(ALL_MINIONS, RandomNumber(4, 5))
# Ancestral Knowledge
class AT_053:
play = Draw(CONTROLLER) * 2
##
# Weapons
# Charged Hammer
class AT_050:
deathrattle = Summon(CONTROLLER, "AT_050t")
|
from ..utils import *
##
# Minions
# Tuskarr Totemic
class AT_046:
play = Summon(CONTROLLER, RandomTotem())
# Draenei Totemcarver
class AT_047:
play = Buff(SELF, "AT_047e") * Count(FRIENDLY_MINIONS + TOTEM)
# Thunder Bluff Valiant
class AT_049:
inspire = Buff(FRIENDLY_MINIONS + TOTEM, "AT_049e")
# The Mistcaller
class AT_054:
# The Enchantment ID is correct
play = Buff(FRIENDLY + (IN_DECK | IN_HAND), "AT_045e")
##
# Spells
# Healing Wave
class AT_048:
play = JOUST & Heal(TARGET, 7) | Heal(TARGET, 14)
# Elemental Destruction
class AT_051:
play = Hit(ALL_MINIONS, RandomNumber(4, 5))
# Ancestral Knowledge
class AT_053:
play = Draw(CONTROLLER) * 2
##
# Weapons
# Charged Hammer
class AT_050:
deathrattle = Summon(CONTROLLER, "AT_050t")
class AT_050t:
activate = Hit(TARGET, 2)
|
Fix Charged Hammer / Lightning Jolt
|
Fix Charged Hammer / Lightning Jolt
|
Python
|
agpl-3.0
|
liujimj/fireplace,Ragowit/fireplace,jleclanche/fireplace,NightKev/fireplace,Ragowit/fireplace,Meerkov/fireplace,oftc-ftw/fireplace,Meerkov/fireplace,smallnamespace/fireplace,amw2104/fireplace,beheh/fireplace,oftc-ftw/fireplace,liujimj/fireplace,smallnamespace/fireplace,amw2104/fireplace
|
from ..utils import *
##
# Hero Powers
# Lightning Jolt
class AT_050t:
play = Hit(TARGET, 2)
##
# Minions
# Tuskarr Totemic
class AT_046:
play = Summon(CONTROLLER, RandomTotem())
# Draenei Totemcarver
class AT_047:
play = Buff(SELF, "AT_047e") * Count(FRIENDLY_MINIONS + TOTEM)
# Thunder Bluff Valiant
class AT_049:
inspire = Buff(FRIENDLY_MINIONS + TOTEM, "AT_049e")
# The Mistcaller
class AT_054:
# The Enchantment ID is correct
play = Buff(FRIENDLY + (IN_DECK | IN_HAND), "AT_045e")
##
# Spells
# Healing Wave
class AT_048:
play = JOUST & Heal(TARGET, 7) | Heal(TARGET, 14)
# Elemental Destruction
class AT_051:
play = Hit(ALL_MINIONS, RandomNumber(4, 5))
# Ancestral Knowledge
class AT_053:
play = Draw(CONTROLLER) * 2
##
# Weapons
# Charged Hammer
class AT_050:
deathrattle = Summon(CONTROLLER, "AT_050t")
Fix Charged Hammer / Lightning Jolt
|
from ..utils import *
##
# Minions
# Tuskarr Totemic
class AT_046:
play = Summon(CONTROLLER, RandomTotem())
# Draenei Totemcarver
class AT_047:
play = Buff(SELF, "AT_047e") * Count(FRIENDLY_MINIONS + TOTEM)
# Thunder Bluff Valiant
class AT_049:
inspire = Buff(FRIENDLY_MINIONS + TOTEM, "AT_049e")
# The Mistcaller
class AT_054:
# The Enchantment ID is correct
play = Buff(FRIENDLY + (IN_DECK | IN_HAND), "AT_045e")
##
# Spells
# Healing Wave
class AT_048:
play = JOUST & Heal(TARGET, 7) | Heal(TARGET, 14)
# Elemental Destruction
class AT_051:
play = Hit(ALL_MINIONS, RandomNumber(4, 5))
# Ancestral Knowledge
class AT_053:
play = Draw(CONTROLLER) * 2
##
# Weapons
# Charged Hammer
class AT_050:
deathrattle = Summon(CONTROLLER, "AT_050t")
class AT_050t:
activate = Hit(TARGET, 2)
|
<commit_before>from ..utils import *
##
# Hero Powers
# Lightning Jolt
class AT_050t:
play = Hit(TARGET, 2)
##
# Minions
# Tuskarr Totemic
class AT_046:
play = Summon(CONTROLLER, RandomTotem())
# Draenei Totemcarver
class AT_047:
play = Buff(SELF, "AT_047e") * Count(FRIENDLY_MINIONS + TOTEM)
# Thunder Bluff Valiant
class AT_049:
inspire = Buff(FRIENDLY_MINIONS + TOTEM, "AT_049e")
# The Mistcaller
class AT_054:
# The Enchantment ID is correct
play = Buff(FRIENDLY + (IN_DECK | IN_HAND), "AT_045e")
##
# Spells
# Healing Wave
class AT_048:
play = JOUST & Heal(TARGET, 7) | Heal(TARGET, 14)
# Elemental Destruction
class AT_051:
play = Hit(ALL_MINIONS, RandomNumber(4, 5))
# Ancestral Knowledge
class AT_053:
play = Draw(CONTROLLER) * 2
##
# Weapons
# Charged Hammer
class AT_050:
deathrattle = Summon(CONTROLLER, "AT_050t")
<commit_msg>Fix Charged Hammer / Lightning Jolt<commit_after>
|
from ..utils import *
##
# Minions
# Tuskarr Totemic
class AT_046:
play = Summon(CONTROLLER, RandomTotem())
# Draenei Totemcarver
class AT_047:
play = Buff(SELF, "AT_047e") * Count(FRIENDLY_MINIONS + TOTEM)
# Thunder Bluff Valiant
class AT_049:
inspire = Buff(FRIENDLY_MINIONS + TOTEM, "AT_049e")
# The Mistcaller
class AT_054:
# The Enchantment ID is correct
play = Buff(FRIENDLY + (IN_DECK | IN_HAND), "AT_045e")
##
# Spells
# Healing Wave
class AT_048:
play = JOUST & Heal(TARGET, 7) | Heal(TARGET, 14)
# Elemental Destruction
class AT_051:
play = Hit(ALL_MINIONS, RandomNumber(4, 5))
# Ancestral Knowledge
class AT_053:
play = Draw(CONTROLLER) * 2
##
# Weapons
# Charged Hammer
class AT_050:
deathrattle = Summon(CONTROLLER, "AT_050t")
class AT_050t:
activate = Hit(TARGET, 2)
|
from ..utils import *
##
# Hero Powers
# Lightning Jolt
class AT_050t:
play = Hit(TARGET, 2)
##
# Minions
# Tuskarr Totemic
class AT_046:
play = Summon(CONTROLLER, RandomTotem())
# Draenei Totemcarver
class AT_047:
play = Buff(SELF, "AT_047e") * Count(FRIENDLY_MINIONS + TOTEM)
# Thunder Bluff Valiant
class AT_049:
inspire = Buff(FRIENDLY_MINIONS + TOTEM, "AT_049e")
# The Mistcaller
class AT_054:
# The Enchantment ID is correct
play = Buff(FRIENDLY + (IN_DECK | IN_HAND), "AT_045e")
##
# Spells
# Healing Wave
class AT_048:
play = JOUST & Heal(TARGET, 7) | Heal(TARGET, 14)
# Elemental Destruction
class AT_051:
play = Hit(ALL_MINIONS, RandomNumber(4, 5))
# Ancestral Knowledge
class AT_053:
play = Draw(CONTROLLER) * 2
##
# Weapons
# Charged Hammer
class AT_050:
deathrattle = Summon(CONTROLLER, "AT_050t")
Fix Charged Hammer / Lightning Joltfrom ..utils import *
##
# Minions
# Tuskarr Totemic
class AT_046:
play = Summon(CONTROLLER, RandomTotem())
# Draenei Totemcarver
class AT_047:
play = Buff(SELF, "AT_047e") * Count(FRIENDLY_MINIONS + TOTEM)
# Thunder Bluff Valiant
class AT_049:
inspire = Buff(FRIENDLY_MINIONS + TOTEM, "AT_049e")
# The Mistcaller
class AT_054:
# The Enchantment ID is correct
play = Buff(FRIENDLY + (IN_DECK | IN_HAND), "AT_045e")
##
# Spells
# Healing Wave
class AT_048:
play = JOUST & Heal(TARGET, 7) | Heal(TARGET, 14)
# Elemental Destruction
class AT_051:
play = Hit(ALL_MINIONS, RandomNumber(4, 5))
# Ancestral Knowledge
class AT_053:
play = Draw(CONTROLLER) * 2
##
# Weapons
# Charged Hammer
class AT_050:
deathrattle = Summon(CONTROLLER, "AT_050t")
class AT_050t:
activate = Hit(TARGET, 2)
|
<commit_before>from ..utils import *
##
# Hero Powers
# Lightning Jolt
class AT_050t:
play = Hit(TARGET, 2)
##
# Minions
# Tuskarr Totemic
class AT_046:
play = Summon(CONTROLLER, RandomTotem())
# Draenei Totemcarver
class AT_047:
play = Buff(SELF, "AT_047e") * Count(FRIENDLY_MINIONS + TOTEM)
# Thunder Bluff Valiant
class AT_049:
inspire = Buff(FRIENDLY_MINIONS + TOTEM, "AT_049e")
# The Mistcaller
class AT_054:
# The Enchantment ID is correct
play = Buff(FRIENDLY + (IN_DECK | IN_HAND), "AT_045e")
##
# Spells
# Healing Wave
class AT_048:
play = JOUST & Heal(TARGET, 7) | Heal(TARGET, 14)
# Elemental Destruction
class AT_051:
play = Hit(ALL_MINIONS, RandomNumber(4, 5))
# Ancestral Knowledge
class AT_053:
play = Draw(CONTROLLER) * 2
##
# Weapons
# Charged Hammer
class AT_050:
deathrattle = Summon(CONTROLLER, "AT_050t")
<commit_msg>Fix Charged Hammer / Lightning Jolt<commit_after>from ..utils import *
##
# Minions
# Tuskarr Totemic
class AT_046:
play = Summon(CONTROLLER, RandomTotem())
# Draenei Totemcarver
class AT_047:
play = Buff(SELF, "AT_047e") * Count(FRIENDLY_MINIONS + TOTEM)
# Thunder Bluff Valiant
class AT_049:
inspire = Buff(FRIENDLY_MINIONS + TOTEM, "AT_049e")
# The Mistcaller
class AT_054:
# The Enchantment ID is correct
play = Buff(FRIENDLY + (IN_DECK | IN_HAND), "AT_045e")
##
# Spells
# Healing Wave
class AT_048:
play = JOUST & Heal(TARGET, 7) | Heal(TARGET, 14)
# Elemental Destruction
class AT_051:
play = Hit(ALL_MINIONS, RandomNumber(4, 5))
# Ancestral Knowledge
class AT_053:
play = Draw(CONTROLLER) * 2
##
# Weapons
# Charged Hammer
class AT_050:
deathrattle = Summon(CONTROLLER, "AT_050t")
class AT_050t:
activate = Hit(TARGET, 2)
|
f915000cf88a80beadc725ab10e48d2b14d1be23
|
enlighten/counter.py
|
enlighten/counter.py
|
# -*- coding: utf-8 -*-
# Copyright 2017 - 2019 Avram Lubkin, All Rights Reserved
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
**Enlighten counter submodule**
Provides Counter class
"""
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
from enlighten._manager import Manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
manager = Manager(stream=kwargs.get('stream', sys.stdout),
counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
|
# -*- coding: utf-8 -*-
# Copyright 2017 - 2019 Avram Lubkin, All Rights Reserved
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
**Enlighten counter submodule**
Provides Counter class
"""
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
from enlighten._manager import get_manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
manager = get_manager(stream=kwargs.get('stream', sys.stdout),
counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
|
Use get_manager() for Counter direct
|
Use get_manager() for Counter direct
|
Python
|
mpl-2.0
|
Rockhopper-Technologies/enlighten
|
# -*- coding: utf-8 -*-
# Copyright 2017 - 2019 Avram Lubkin, All Rights Reserved
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
**Enlighten counter submodule**
Provides Counter class
"""
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
from enlighten._manager import Manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
manager = Manager(stream=kwargs.get('stream', sys.stdout),
counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
Use get_manager() for Counter direct
|
# -*- coding: utf-8 -*-
# Copyright 2017 - 2019 Avram Lubkin, All Rights Reserved
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
**Enlighten counter submodule**
Provides Counter class
"""
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
from enlighten._manager import get_manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
manager = get_manager(stream=kwargs.get('stream', sys.stdout),
counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2017 - 2019 Avram Lubkin, All Rights Reserved
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
**Enlighten counter submodule**
Provides Counter class
"""
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
from enlighten._manager import Manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
manager = Manager(stream=kwargs.get('stream', sys.stdout),
counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
<commit_msg>Use get_manager() for Counter direct<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2017 - 2019 Avram Lubkin, All Rights Reserved
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
**Enlighten counter submodule**
Provides Counter class
"""
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
from enlighten._manager import get_manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
manager = get_manager(stream=kwargs.get('stream', sys.stdout),
counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
|
# -*- coding: utf-8 -*-
# Copyright 2017 - 2019 Avram Lubkin, All Rights Reserved
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
**Enlighten counter submodule**
Provides Counter class
"""
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
from enlighten._manager import Manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
manager = Manager(stream=kwargs.get('stream', sys.stdout),
counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
Use get_manager() for Counter direct# -*- coding: utf-8 -*-
# Copyright 2017 - 2019 Avram Lubkin, All Rights Reserved
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
**Enlighten counter submodule**
Provides Counter class
"""
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
from enlighten._manager import get_manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
manager = get_manager(stream=kwargs.get('stream', sys.stdout),
counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2017 - 2019 Avram Lubkin, All Rights Reserved
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
**Enlighten counter submodule**
Provides Counter class
"""
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
from enlighten._manager import Manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
manager = Manager(stream=kwargs.get('stream', sys.stdout),
counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
<commit_msg>Use get_manager() for Counter direct<commit_after># -*- coding: utf-8 -*-
# Copyright 2017 - 2019 Avram Lubkin, All Rights Reserved
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
**Enlighten counter submodule**
Provides Counter class
"""
import sys
from enlighten._counter import Counter as _Counter
from enlighten._counter import SubCounter # pylint: disable=unused-import # noqa: F401
from enlighten._manager import get_manager
# Counter is defined here to avoid circular dependencies
class Counter(_Counter): # pylint: disable=missing-docstring
__doc__ = _Counter.__doc__
def __init__(self, **kwargs):
manager = kwargs.get('manager', None)
if manager is None:
manager = get_manager(stream=kwargs.get('stream', sys.stdout),
counter_class=self.__class__, set_scroll=False)
manager.counters[self] = 1
kwargs['manager'] = manager
super(Counter, self).__init__(**kwargs)
|
da40ff6b02d158612883ac7e61faf48da85c7d90
|
saleor/core/models.py
|
saleor/core/models.py
|
from django.db import models
from django.utils.translation import pgettext_lazy
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Setting(models.Model):
INTEGER = 'i'
STRING = 's'
BOOLEAN = 'b'
VALUE_TYPE_CHOICES = (
(INTEGER, pgettext_lazy('Settings', 'Integer')),
(STRING, pgettext_lazy('Settings', 'String')),
(BOOLEAN, pgettext_lazy('Settings', 'Boolean')),
)
name = models.CharField(
pgettext_lazy('Settings field', 'name'), max_length=128)
value_type = models.CharField(pgettext_lazy('Settings field', 'value type'),
max_length=1, choices=VALUE_TYPE_CHOICES)
value = models.CharField(
pgettext_lazy('Settings field', 'value'), max_length=256)
def convert_value(self):
if self.value_type == self.INTEGER:
return int(self.value)
elif self.value_type == self.BOOLEAN:
return self._to_bool()
elif self.value_type == self.STRING:
return self.value
else:
raise ValueError('Incorrect value')
def _to_bool(self):
values_dict = {'true': True, 'false': False}
try:
return values_dict[self.value.lower()]
except KeyError:
raise ValueError('Cannot convert to boolean')
def __str__(self):
return '%s: %s' % (self.name, self.value)
|
from django.db import models
from django.utils.translation import pgettext_lazy
from django.utils.encoding import python_2_unicode_compatible
INTEGER = 'i'
STRING = 's'
BOOLEAN = 'b'
@python_2_unicode_compatible
class Setting(models.Model):
VALUE_TYPE_CHOICES = (
(INTEGER, pgettext_lazy('Settings', 'Integer')),
(STRING, pgettext_lazy('Settings', 'String')),
(BOOLEAN, pgettext_lazy('Settings', 'Boolean')),
)
name = models.CharField(
pgettext_lazy('Settings field', 'name'), max_length=128)
value_type = models.CharField(pgettext_lazy('Settings field', 'value type'),
max_length=1, choices=VALUE_TYPE_CHOICES)
value = models.CharField(
pgettext_lazy('Settings field', 'value'), max_length=256)
def convert_value(self):
if self.value_type == self.INTEGER:
return int(self.value)
elif self.value_type == self.BOOLEAN:
return self._to_bool()
elif self.value_type == self.STRING:
return self.value
else:
raise ValueError('Incorrect value')
def _to_bool(self):
values_dict = {'true': True, 'false': False}
try:
return values_dict[self.value.lower()]
except KeyError:
raise ValueError('Cannot convert to boolean')
def __str__(self):
return '%s: %s' % (self.name, self.value)
|
Move choices outside of model class
|
Move choices outside of model class
|
Python
|
bsd-3-clause
|
mociepka/saleor,HyperManTT/ECommerceSaleor,tfroehlich82/saleor,itbabu/saleor,HyperManTT/ECommerceSaleor,tfroehlich82/saleor,jreigel/saleor,itbabu/saleor,itbabu/saleor,UITools/saleor,UITools/saleor,car3oon/saleor,tfroehlich82/saleor,maferelo/saleor,HyperManTT/ECommerceSaleor,UITools/saleor,car3oon/saleor,UITools/saleor,maferelo/saleor,mociepka/saleor,car3oon/saleor,maferelo/saleor,jreigel/saleor,KenMutemi/saleor,UITools/saleor,KenMutemi/saleor,jreigel/saleor,mociepka/saleor,KenMutemi/saleor
|
from django.db import models
from django.utils.translation import pgettext_lazy
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Setting(models.Model):
INTEGER = 'i'
STRING = 's'
BOOLEAN = 'b'
VALUE_TYPE_CHOICES = (
(INTEGER, pgettext_lazy('Settings', 'Integer')),
(STRING, pgettext_lazy('Settings', 'String')),
(BOOLEAN, pgettext_lazy('Settings', 'Boolean')),
)
name = models.CharField(
pgettext_lazy('Settings field', 'name'), max_length=128)
value_type = models.CharField(pgettext_lazy('Settings field', 'value type'),
max_length=1, choices=VALUE_TYPE_CHOICES)
value = models.CharField(
pgettext_lazy('Settings field', 'value'), max_length=256)
def convert_value(self):
if self.value_type == self.INTEGER:
return int(self.value)
elif self.value_type == self.BOOLEAN:
return self._to_bool()
elif self.value_type == self.STRING:
return self.value
else:
raise ValueError('Incorrect value')
def _to_bool(self):
values_dict = {'true': True, 'false': False}
try:
return values_dict[self.value.lower()]
except KeyError:
raise ValueError('Cannot convert to boolean')
def __str__(self):
return '%s: %s' % (self.name, self.value)
Move choices outside of model class
|
from django.db import models
from django.utils.translation import pgettext_lazy
from django.utils.encoding import python_2_unicode_compatible
INTEGER = 'i'
STRING = 's'
BOOLEAN = 'b'
@python_2_unicode_compatible
class Setting(models.Model):
VALUE_TYPE_CHOICES = (
(INTEGER, pgettext_lazy('Settings', 'Integer')),
(STRING, pgettext_lazy('Settings', 'String')),
(BOOLEAN, pgettext_lazy('Settings', 'Boolean')),
)
name = models.CharField(
pgettext_lazy('Settings field', 'name'), max_length=128)
value_type = models.CharField(pgettext_lazy('Settings field', 'value type'),
max_length=1, choices=VALUE_TYPE_CHOICES)
value = models.CharField(
pgettext_lazy('Settings field', 'value'), max_length=256)
def convert_value(self):
if self.value_type == self.INTEGER:
return int(self.value)
elif self.value_type == self.BOOLEAN:
return self._to_bool()
elif self.value_type == self.STRING:
return self.value
else:
raise ValueError('Incorrect value')
def _to_bool(self):
values_dict = {'true': True, 'false': False}
try:
return values_dict[self.value.lower()]
except KeyError:
raise ValueError('Cannot convert to boolean')
def __str__(self):
return '%s: %s' % (self.name, self.value)
|
<commit_before>from django.db import models
from django.utils.translation import pgettext_lazy
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Setting(models.Model):
INTEGER = 'i'
STRING = 's'
BOOLEAN = 'b'
VALUE_TYPE_CHOICES = (
(INTEGER, pgettext_lazy('Settings', 'Integer')),
(STRING, pgettext_lazy('Settings', 'String')),
(BOOLEAN, pgettext_lazy('Settings', 'Boolean')),
)
name = models.CharField(
pgettext_lazy('Settings field', 'name'), max_length=128)
value_type = models.CharField(pgettext_lazy('Settings field', 'value type'),
max_length=1, choices=VALUE_TYPE_CHOICES)
value = models.CharField(
pgettext_lazy('Settings field', 'value'), max_length=256)
def convert_value(self):
if self.value_type == self.INTEGER:
return int(self.value)
elif self.value_type == self.BOOLEAN:
return self._to_bool()
elif self.value_type == self.STRING:
return self.value
else:
raise ValueError('Incorrect value')
def _to_bool(self):
values_dict = {'true': True, 'false': False}
try:
return values_dict[self.value.lower()]
except KeyError:
raise ValueError('Cannot convert to boolean')
def __str__(self):
return '%s: %s' % (self.name, self.value)
<commit_msg>Move choices outside of model class<commit_after>
|
from django.db import models
from django.utils.translation import pgettext_lazy
from django.utils.encoding import python_2_unicode_compatible
INTEGER = 'i'
STRING = 's'
BOOLEAN = 'b'
@python_2_unicode_compatible
class Setting(models.Model):
VALUE_TYPE_CHOICES = (
(INTEGER, pgettext_lazy('Settings', 'Integer')),
(STRING, pgettext_lazy('Settings', 'String')),
(BOOLEAN, pgettext_lazy('Settings', 'Boolean')),
)
name = models.CharField(
pgettext_lazy('Settings field', 'name'), max_length=128)
value_type = models.CharField(pgettext_lazy('Settings field', 'value type'),
max_length=1, choices=VALUE_TYPE_CHOICES)
value = models.CharField(
pgettext_lazy('Settings field', 'value'), max_length=256)
def convert_value(self):
if self.value_type == self.INTEGER:
return int(self.value)
elif self.value_type == self.BOOLEAN:
return self._to_bool()
elif self.value_type == self.STRING:
return self.value
else:
raise ValueError('Incorrect value')
def _to_bool(self):
values_dict = {'true': True, 'false': False}
try:
return values_dict[self.value.lower()]
except KeyError:
raise ValueError('Cannot convert to boolean')
def __str__(self):
return '%s: %s' % (self.name, self.value)
|
from django.db import models
from django.utils.translation import pgettext_lazy
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Setting(models.Model):
INTEGER = 'i'
STRING = 's'
BOOLEAN = 'b'
VALUE_TYPE_CHOICES = (
(INTEGER, pgettext_lazy('Settings', 'Integer')),
(STRING, pgettext_lazy('Settings', 'String')),
(BOOLEAN, pgettext_lazy('Settings', 'Boolean')),
)
name = models.CharField(
pgettext_lazy('Settings field', 'name'), max_length=128)
value_type = models.CharField(pgettext_lazy('Settings field', 'value type'),
max_length=1, choices=VALUE_TYPE_CHOICES)
value = models.CharField(
pgettext_lazy('Settings field', 'value'), max_length=256)
def convert_value(self):
if self.value_type == self.INTEGER:
return int(self.value)
elif self.value_type == self.BOOLEAN:
return self._to_bool()
elif self.value_type == self.STRING:
return self.value
else:
raise ValueError('Incorrect value')
def _to_bool(self):
values_dict = {'true': True, 'false': False}
try:
return values_dict[self.value.lower()]
except KeyError:
raise ValueError('Cannot convert to boolean')
def __str__(self):
return '%s: %s' % (self.name, self.value)
Move choices outside of model classfrom django.db import models
from django.utils.translation import pgettext_lazy
from django.utils.encoding import python_2_unicode_compatible
INTEGER = 'i'
STRING = 's'
BOOLEAN = 'b'
@python_2_unicode_compatible
class Setting(models.Model):
VALUE_TYPE_CHOICES = (
(INTEGER, pgettext_lazy('Settings', 'Integer')),
(STRING, pgettext_lazy('Settings', 'String')),
(BOOLEAN, pgettext_lazy('Settings', 'Boolean')),
)
name = models.CharField(
pgettext_lazy('Settings field', 'name'), max_length=128)
value_type = models.CharField(pgettext_lazy('Settings field', 'value type'),
max_length=1, choices=VALUE_TYPE_CHOICES)
value = models.CharField(
pgettext_lazy('Settings field', 'value'), max_length=256)
def convert_value(self):
if self.value_type == self.INTEGER:
return int(self.value)
elif self.value_type == self.BOOLEAN:
return self._to_bool()
elif self.value_type == self.STRING:
return self.value
else:
raise ValueError('Incorrect value')
def _to_bool(self):
values_dict = {'true': True, 'false': False}
try:
return values_dict[self.value.lower()]
except KeyError:
raise ValueError('Cannot convert to boolean')
def __str__(self):
return '%s: %s' % (self.name, self.value)
|
<commit_before>from django.db import models
from django.utils.translation import pgettext_lazy
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Setting(models.Model):
INTEGER = 'i'
STRING = 's'
BOOLEAN = 'b'
VALUE_TYPE_CHOICES = (
(INTEGER, pgettext_lazy('Settings', 'Integer')),
(STRING, pgettext_lazy('Settings', 'String')),
(BOOLEAN, pgettext_lazy('Settings', 'Boolean')),
)
name = models.CharField(
pgettext_lazy('Settings field', 'name'), max_length=128)
value_type = models.CharField(pgettext_lazy('Settings field', 'value type'),
max_length=1, choices=VALUE_TYPE_CHOICES)
value = models.CharField(
pgettext_lazy('Settings field', 'value'), max_length=256)
def convert_value(self):
if self.value_type == self.INTEGER:
return int(self.value)
elif self.value_type == self.BOOLEAN:
return self._to_bool()
elif self.value_type == self.STRING:
return self.value
else:
raise ValueError('Incorrect value')
def _to_bool(self):
values_dict = {'true': True, 'false': False}
try:
return values_dict[self.value.lower()]
except KeyError:
raise ValueError('Cannot convert to boolean')
def __str__(self):
return '%s: %s' % (self.name, self.value)
<commit_msg>Move choices outside of model class<commit_after>from django.db import models
from django.utils.translation import pgettext_lazy
from django.utils.encoding import python_2_unicode_compatible
INTEGER = 'i'
STRING = 's'
BOOLEAN = 'b'
@python_2_unicode_compatible
class Setting(models.Model):
VALUE_TYPE_CHOICES = (
(INTEGER, pgettext_lazy('Settings', 'Integer')),
(STRING, pgettext_lazy('Settings', 'String')),
(BOOLEAN, pgettext_lazy('Settings', 'Boolean')),
)
name = models.CharField(
pgettext_lazy('Settings field', 'name'), max_length=128)
value_type = models.CharField(pgettext_lazy('Settings field', 'value type'),
max_length=1, choices=VALUE_TYPE_CHOICES)
value = models.CharField(
pgettext_lazy('Settings field', 'value'), max_length=256)
def convert_value(self):
if self.value_type == self.INTEGER:
return int(self.value)
elif self.value_type == self.BOOLEAN:
return self._to_bool()
elif self.value_type == self.STRING:
return self.value
else:
raise ValueError('Incorrect value')
def _to_bool(self):
values_dict = {'true': True, 'false': False}
try:
return values_dict[self.value.lower()]
except KeyError:
raise ValueError('Cannot convert to boolean')
def __str__(self):
return '%s: %s' % (self.name, self.value)
|
e02f31032a2e3b3ff76432ae814e6e3fbeb7ae29
|
scripts/master/factory/dart/channels.py
|
scripts/master/factory/dart/channels.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/0.8', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.0', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
Use 1.0 branch for stable channel builders
|
Use 1.0 branch for stable channel builders
TBR=ricow
Review URL: https://codereview.chromium.org/69023002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@234222 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/0.8', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
Use 1.0 branch for stable channel builders
TBR=ricow
Review URL: https://codereview.chromium.org/69023002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@234222 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.0', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/0.8', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
<commit_msg>Use 1.0 branch for stable channel builders
TBR=ricow
Review URL: https://codereview.chromium.org/69023002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@234222 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.0', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/0.8', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
Use 1.0 branch for stable channel builders
TBR=ricow
Review URL: https://codereview.chromium.org/69023002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@234222 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.0', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/0.8', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
<commit_msg>Use 1.0 branch for stable channel builders
TBR=ricow
Review URL: https://codereview.chromium.org/69023002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@234222 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.0', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
0b4b124061b31a58582f4dc79d917fac1303ed1a
|
breadcrumbs/__init__.py
|
breadcrumbs/__init__.py
|
# -*- coding: utf-8 -*-
__version__ = '1.1.3-p1'
from .breadcrumbs import Breadcrumb
|
# -*- coding: utf-8 -*-
__version__ = '1.1.4'
from .breadcrumbs import Breadcrumb
|
Fix version in breadcrumbs module.
|
Fix version in breadcrumbs module.
|
Python
|
bsd-3-clause
|
chronossc/django-breadcrumbs,chronossc/django-breadcrumbs,chronossc/django-breadcrumbs
|
# -*- coding: utf-8 -*-
__version__ = '1.1.3-p1'
from .breadcrumbs import Breadcrumb
Fix version in breadcrumbs module.
|
# -*- coding: utf-8 -*-
__version__ = '1.1.4'
from .breadcrumbs import Breadcrumb
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '1.1.3-p1'
from .breadcrumbs import Breadcrumb
<commit_msg>Fix version in breadcrumbs module.<commit_after>
|
# -*- coding: utf-8 -*-
__version__ = '1.1.4'
from .breadcrumbs import Breadcrumb
|
# -*- coding: utf-8 -*-
__version__ = '1.1.3-p1'
from .breadcrumbs import Breadcrumb
Fix version in breadcrumbs module.# -*- coding: utf-8 -*-
__version__ = '1.1.4'
from .breadcrumbs import Breadcrumb
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '1.1.3-p1'
from .breadcrumbs import Breadcrumb
<commit_msg>Fix version in breadcrumbs module.<commit_after># -*- coding: utf-8 -*-
__version__ = '1.1.4'
from .breadcrumbs import Breadcrumb
|
5cf4603efb1d0fc8bd8ec44bf3aa19a292403cdf
|
beaver/redis_transport.py
|
beaver/redis_transport.py
|
import datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, beaver_config, file_config, logger=None):
super(RedisTransport, self).__init__(beaver_config, file_config, logger=logger)
redis_url = beaver_config.get('redis_url')
redis_password = beaver_config.get('redis_password')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self._redis = redis.StrictRedis(host=_url.hostname, port=_url.port, password=redis_password, db=int(_db), socket_timeout=10)
self._redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self._redis.ping()
break
except redis.exceptions.ConnectionError:
pass
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self._redis.rpush(
self._redis_namespace,
self.format(filename, timestamp, line)
)
|
import datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, beaver_config, file_config, logger=None):
super(RedisTransport, self).__init__(beaver_config, file_config, logger=logger)
redis_url = beaver_config.get('redis_url')
redis_password = beaver_config.get('redis_password')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self._redis = redis.StrictRedis(host=_url.hostname, port=_url.port, password=redis_password, db=int(_db), socket_timeout=10)
self._redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self._redis.ping()
break
except redis.exceptions.ConnectionError:
pass
self._pipeline = self._redis.pipeline(transaction=False)
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self._pipeline.rpush(
self._redis_namespace,
self.format(filename, timestamp, line)
)
self._pipeline.execute()
|
Use redis pipelining when sending events
|
Use redis pipelining when sending events
|
Python
|
mit
|
python-beaver/python-beaver,rajmarndi/python-beaver,jlambert121/beaver,davidmoravek/python-beaver,Appdynamics/beaver,rajmarndi/python-beaver,Appdynamics/beaver,timstoop/python-beaver,davidmoravek/python-beaver,imacube/python-beaver,PierreF/beaver,timstoop/python-beaver,zuazo-forks/beaver,doghrim/python-beaver,Open-Party/python-beaver,zuazo-forks/beaver,josegonzalez/python-beaver,doghrim/python-beaver,PierreF/beaver,josegonzalez/python-beaver,thomasalrin/beaver,Open-Party/python-beaver,jlambert121/beaver,thomasalrin/beaver,python-beaver/python-beaver,imacube/python-beaver
|
import datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, beaver_config, file_config, logger=None):
super(RedisTransport, self).__init__(beaver_config, file_config, logger=logger)
redis_url = beaver_config.get('redis_url')
redis_password = beaver_config.get('redis_password')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self._redis = redis.StrictRedis(host=_url.hostname, port=_url.port, password=redis_password, db=int(_db), socket_timeout=10)
self._redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self._redis.ping()
break
except redis.exceptions.ConnectionError:
pass
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self._redis.rpush(
self._redis_namespace,
self.format(filename, timestamp, line)
)
Use redis pipelining when sending events
|
import datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, beaver_config, file_config, logger=None):
super(RedisTransport, self).__init__(beaver_config, file_config, logger=logger)
redis_url = beaver_config.get('redis_url')
redis_password = beaver_config.get('redis_password')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self._redis = redis.StrictRedis(host=_url.hostname, port=_url.port, password=redis_password, db=int(_db), socket_timeout=10)
self._redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self._redis.ping()
break
except redis.exceptions.ConnectionError:
pass
self._pipeline = self._redis.pipeline(transaction=False)
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self._pipeline.rpush(
self._redis_namespace,
self.format(filename, timestamp, line)
)
self._pipeline.execute()
|
<commit_before>import datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, beaver_config, file_config, logger=None):
super(RedisTransport, self).__init__(beaver_config, file_config, logger=logger)
redis_url = beaver_config.get('redis_url')
redis_password = beaver_config.get('redis_password')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self._redis = redis.StrictRedis(host=_url.hostname, port=_url.port, password=redis_password, db=int(_db), socket_timeout=10)
self._redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self._redis.ping()
break
except redis.exceptions.ConnectionError:
pass
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self._redis.rpush(
self._redis_namespace,
self.format(filename, timestamp, line)
)
<commit_msg>Use redis pipelining when sending events<commit_after>
|
import datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, beaver_config, file_config, logger=None):
super(RedisTransport, self).__init__(beaver_config, file_config, logger=logger)
redis_url = beaver_config.get('redis_url')
redis_password = beaver_config.get('redis_password')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self._redis = redis.StrictRedis(host=_url.hostname, port=_url.port, password=redis_password, db=int(_db), socket_timeout=10)
self._redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self._redis.ping()
break
except redis.exceptions.ConnectionError:
pass
self._pipeline = self._redis.pipeline(transaction=False)
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self._pipeline.rpush(
self._redis_namespace,
self.format(filename, timestamp, line)
)
self._pipeline.execute()
|
import datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, beaver_config, file_config, logger=None):
super(RedisTransport, self).__init__(beaver_config, file_config, logger=logger)
redis_url = beaver_config.get('redis_url')
redis_password = beaver_config.get('redis_password')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self._redis = redis.StrictRedis(host=_url.hostname, port=_url.port, password=redis_password, db=int(_db), socket_timeout=10)
self._redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self._redis.ping()
break
except redis.exceptions.ConnectionError:
pass
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self._redis.rpush(
self._redis_namespace,
self.format(filename, timestamp, line)
)
Use redis pipelining when sending eventsimport datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, beaver_config, file_config, logger=None):
super(RedisTransport, self).__init__(beaver_config, file_config, logger=logger)
redis_url = beaver_config.get('redis_url')
redis_password = beaver_config.get('redis_password')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self._redis = redis.StrictRedis(host=_url.hostname, port=_url.port, password=redis_password, db=int(_db), socket_timeout=10)
self._redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self._redis.ping()
break
except redis.exceptions.ConnectionError:
pass
self._pipeline = self._redis.pipeline(transaction=False)
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self._pipeline.rpush(
self._redis_namespace,
self.format(filename, timestamp, line)
)
self._pipeline.execute()
|
<commit_before>import datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, beaver_config, file_config, logger=None):
super(RedisTransport, self).__init__(beaver_config, file_config, logger=logger)
redis_url = beaver_config.get('redis_url')
redis_password = beaver_config.get('redis_password')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self._redis = redis.StrictRedis(host=_url.hostname, port=_url.port, password=redis_password, db=int(_db), socket_timeout=10)
self._redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self._redis.ping()
break
except redis.exceptions.ConnectionError:
pass
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self._redis.rpush(
self._redis_namespace,
self.format(filename, timestamp, line)
)
<commit_msg>Use redis pipelining when sending events<commit_after>import datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, beaver_config, file_config, logger=None):
super(RedisTransport, self).__init__(beaver_config, file_config, logger=logger)
redis_url = beaver_config.get('redis_url')
redis_password = beaver_config.get('redis_password')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self._redis = redis.StrictRedis(host=_url.hostname, port=_url.port, password=redis_password, db=int(_db), socket_timeout=10)
self._redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self._redis.ping()
break
except redis.exceptions.ConnectionError:
pass
self._pipeline = self._redis.pipeline(transaction=False)
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self._pipeline.rpush(
self._redis_namespace,
self.format(filename, timestamp, line)
)
self._pipeline.execute()
|
8cf8e1b5aa824d691850e0cb431e56744f699a92
|
bin/task_usage_analyze.py
|
bin/task_usage_analyze.py
|
#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import glob, json
import matplotlib.pyplot as pp
import support
def main(index_path, min_length=0, max_length=100, report_each=1000000):
support.figure()
print('Loading the index from "{}"...'.format(index_path))
with open(index_path, 'r') as file:
index = json.load(file)['index']
total = len(index)
print('Processing {} traces...'.format(total))
data = []
processed, selected = 0, 0
for record in index:
processed += 1
length = record['length']
if length >= min_length and length <= max_length:
selected += 1
data.append(length)
if processed % report_each == 0 or processed == total:
pp.clf()
pp.title("Processed {} ({:.2f}%), selected {} ({:.2f}%)".format(
processed, 100 * processed / total, selected,
100 * selected / processed))
pp.hist(data, bins=200)
pp.pause(1e-3)
pp.show()
if __name__ == '__main__':
assert(len(sys.argv) == 2)
main(sys.argv[1])
|
#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import glob, json
import matplotlib.pyplot as pp
import support
def main(index_path, min_length=0, max_length=100, report_each=1000000):
support.figure()
print('Loading the index from "{}"...'.format(index_path))
with open(index_path, 'r') as file:
index = json.load(file)['index']
total = len(index)
print('Processing {} traces...'.format(total))
data = []
processed, selected = 0, 0
for record in index:
processed += 1
length = record['length']
if length >= min_length and length <= max_length:
selected += 1
data.append(length)
if processed % report_each == 0 or processed == total:
pp.clf()
pp.title("Processed {} ({:.2f}%), selected {} ({:.2f}%)".format(
processed, 100 * processed / total, selected,
100 * selected / processed))
pp.hist(data, bins=(max_length - min_length))
pp.pause(1e-3)
pp.show()
if __name__ == '__main__':
assert(len(sys.argv) == 2)
main(sys.argv[1])
|
Adjust the number of bins
|
Adjust the number of bins
|
Python
|
mit
|
learning-on-chip/google-cluster-prediction
|
#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import glob, json
import matplotlib.pyplot as pp
import support
def main(index_path, min_length=0, max_length=100, report_each=1000000):
support.figure()
print('Loading the index from "{}"...'.format(index_path))
with open(index_path, 'r') as file:
index = json.load(file)['index']
total = len(index)
print('Processing {} traces...'.format(total))
data = []
processed, selected = 0, 0
for record in index:
processed += 1
length = record['length']
if length >= min_length and length <= max_length:
selected += 1
data.append(length)
if processed % report_each == 0 or processed == total:
pp.clf()
pp.title("Processed {} ({:.2f}%), selected {} ({:.2f}%)".format(
processed, 100 * processed / total, selected,
100 * selected / processed))
pp.hist(data, bins=200)
pp.pause(1e-3)
pp.show()
if __name__ == '__main__':
assert(len(sys.argv) == 2)
main(sys.argv[1])
Adjust the number of bins
|
#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import glob, json
import matplotlib.pyplot as pp
import support
def main(index_path, min_length=0, max_length=100, report_each=1000000):
support.figure()
print('Loading the index from "{}"...'.format(index_path))
with open(index_path, 'r') as file:
index = json.load(file)['index']
total = len(index)
print('Processing {} traces...'.format(total))
data = []
processed, selected = 0, 0
for record in index:
processed += 1
length = record['length']
if length >= min_length and length <= max_length:
selected += 1
data.append(length)
if processed % report_each == 0 or processed == total:
pp.clf()
pp.title("Processed {} ({:.2f}%), selected {} ({:.2f}%)".format(
processed, 100 * processed / total, selected,
100 * selected / processed))
pp.hist(data, bins=(max_length - min_length))
pp.pause(1e-3)
pp.show()
if __name__ == '__main__':
assert(len(sys.argv) == 2)
main(sys.argv[1])
|
<commit_before>#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import glob, json
import matplotlib.pyplot as pp
import support
def main(index_path, min_length=0, max_length=100, report_each=1000000):
support.figure()
print('Loading the index from "{}"...'.format(index_path))
with open(index_path, 'r') as file:
index = json.load(file)['index']
total = len(index)
print('Processing {} traces...'.format(total))
data = []
processed, selected = 0, 0
for record in index:
processed += 1
length = record['length']
if length >= min_length and length <= max_length:
selected += 1
data.append(length)
if processed % report_each == 0 or processed == total:
pp.clf()
pp.title("Processed {} ({:.2f}%), selected {} ({:.2f}%)".format(
processed, 100 * processed / total, selected,
100 * selected / processed))
pp.hist(data, bins=200)
pp.pause(1e-3)
pp.show()
if __name__ == '__main__':
assert(len(sys.argv) == 2)
main(sys.argv[1])
<commit_msg>Adjust the number of bins<commit_after>
|
#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import glob, json
import matplotlib.pyplot as pp
import support
def main(index_path, min_length=0, max_length=100, report_each=1000000):
support.figure()
print('Loading the index from "{}"...'.format(index_path))
with open(index_path, 'r') as file:
index = json.load(file)['index']
total = len(index)
print('Processing {} traces...'.format(total))
data = []
processed, selected = 0, 0
for record in index:
processed += 1
length = record['length']
if length >= min_length and length <= max_length:
selected += 1
data.append(length)
if processed % report_each == 0 or processed == total:
pp.clf()
pp.title("Processed {} ({:.2f}%), selected {} ({:.2f}%)".format(
processed, 100 * processed / total, selected,
100 * selected / processed))
pp.hist(data, bins=(max_length - min_length))
pp.pause(1e-3)
pp.show()
if __name__ == '__main__':
assert(len(sys.argv) == 2)
main(sys.argv[1])
|
#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import glob, json
import matplotlib.pyplot as pp
import support
def main(index_path, min_length=0, max_length=100, report_each=1000000):
support.figure()
print('Loading the index from "{}"...'.format(index_path))
with open(index_path, 'r') as file:
index = json.load(file)['index']
total = len(index)
print('Processing {} traces...'.format(total))
data = []
processed, selected = 0, 0
for record in index:
processed += 1
length = record['length']
if length >= min_length and length <= max_length:
selected += 1
data.append(length)
if processed % report_each == 0 or processed == total:
pp.clf()
pp.title("Processed {} ({:.2f}%), selected {} ({:.2f}%)".format(
processed, 100 * processed / total, selected,
100 * selected / processed))
pp.hist(data, bins=200)
pp.pause(1e-3)
pp.show()
if __name__ == '__main__':
assert(len(sys.argv) == 2)
main(sys.argv[1])
Adjust the number of bins#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import glob, json
import matplotlib.pyplot as pp
import support
def main(index_path, min_length=0, max_length=100, report_each=1000000):
support.figure()
print('Loading the index from "{}"...'.format(index_path))
with open(index_path, 'r') as file:
index = json.load(file)['index']
total = len(index)
print('Processing {} traces...'.format(total))
data = []
processed, selected = 0, 0
for record in index:
processed += 1
length = record['length']
if length >= min_length and length <= max_length:
selected += 1
data.append(length)
if processed % report_each == 0 or processed == total:
pp.clf()
pp.title("Processed {} ({:.2f}%), selected {} ({:.2f}%)".format(
processed, 100 * processed / total, selected,
100 * selected / processed))
pp.hist(data, bins=(max_length - min_length))
pp.pause(1e-3)
pp.show()
if __name__ == '__main__':
assert(len(sys.argv) == 2)
main(sys.argv[1])
|
<commit_before>#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import glob, json
import matplotlib.pyplot as pp
import support
def main(index_path, min_length=0, max_length=100, report_each=1000000):
support.figure()
print('Loading the index from "{}"...'.format(index_path))
with open(index_path, 'r') as file:
index = json.load(file)['index']
total = len(index)
print('Processing {} traces...'.format(total))
data = []
processed, selected = 0, 0
for record in index:
processed += 1
length = record['length']
if length >= min_length and length <= max_length:
selected += 1
data.append(length)
if processed % report_each == 0 or processed == total:
pp.clf()
pp.title("Processed {} ({:.2f}%), selected {} ({:.2f}%)".format(
processed, 100 * processed / total, selected,
100 * selected / processed))
pp.hist(data, bins=200)
pp.pause(1e-3)
pp.show()
if __name__ == '__main__':
assert(len(sys.argv) == 2)
main(sys.argv[1])
<commit_msg>Adjust the number of bins<commit_after>#!/usr/bin/env python3
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import glob, json
import matplotlib.pyplot as pp
import support
def main(index_path, min_length=0, max_length=100, report_each=1000000):
support.figure()
print('Loading the index from "{}"...'.format(index_path))
with open(index_path, 'r') as file:
index = json.load(file)['index']
total = len(index)
print('Processing {} traces...'.format(total))
data = []
processed, selected = 0, 0
for record in index:
processed += 1
length = record['length']
if length >= min_length and length <= max_length:
selected += 1
data.append(length)
if processed % report_each == 0 or processed == total:
pp.clf()
pp.title("Processed {} ({:.2f}%), selected {} ({:.2f}%)".format(
processed, 100 * processed / total, selected,
100 * selected / processed))
pp.hist(data, bins=(max_length - min_length))
pp.pause(1e-3)
pp.show()
if __name__ == '__main__':
assert(len(sys.argv) == 2)
main(sys.argv[1])
|
72ec3088f6eafd20dce15d742dc9d93b4087cc50
|
build/extract_from_cab.py
|
build/extract_from_cab.py
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts a single file from a CAB archive."""
import os
import subprocess
import sys
def main():
if len(sys.argv) != 4:
print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
return 1
[cab_path, archived_file, output_dir] = sys.argv[1:]
# Invoke the Windows expand utility to extract the file.
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
return level
# The expand utility preserves the modification date and time of the archived
# file. Touch the extracted file. This helps build systems that compare the
# modification times of input and output files to determine whether to do an
# action.
os.utime(os.path.join(output_dir, archived_file), None)
return 0
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts a single file from a CAB archive."""
import os
import subprocess
import sys
def main():
if len(sys.argv) != 4:
print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
return 1
[cab_path, archived_file, output_dir] = sys.argv[1:]
# Invoke the Windows expand utility to extract the file.
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
print 'Cab extraction(%s, %s, %s) failed.' % (
cab_path, archived_file, output_dir)
print 'Trying a second time.'
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
return level
# The expand utility preserves the modification date and time of the archived
# file. Touch the extracted file. This helps build systems that compare the
# modification times of input and output files to determine whether to do an
# action.
os.utime(os.path.join(output_dir, archived_file), None)
return 0
if __name__ == '__main__':
sys.exit(main())
|
Add automatic retry of cab extraction.
|
Add automatic retry of cab extraction.
It fails occasionally with:
One or more files could not be expanded. Delta Package Expander Returned 0x80070002
Expanding File ..\third_party\directxsdk\files\redist\jun2010_d3dx9_43_x86.cab Incomplete, Error Code=0x80070002
Error Description: The system cannot find the file specified.
NOTRY=true
TBR=sky@chromium.org
BUG=
TEST=
Review URL: http://codereview.chromium.org/8883029
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@113653 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
axinging/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,junmin-zhu/chromium-rivertrail,dushu1203/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,mogoweb/chromium-crosswalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,ChromiumWebApps/chromium,ondra-novak/chromium.src,anirudhSK/chromium,hujiajie/pa-chromium,hgl888/chromium-crosswalk-efl,robclark/chromium,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,ltilve/chromium,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,dednal/chromium.src,nacl-webkit/chrome_deps,bright-sparks/chromium-spacewalk,Just-D/chromium-1,hgl888/chromium-crosswalk,markYoungH/chromium.src,nacl-webkit/chrome_deps,littlstar/chromium.src,chuan9/chromium-crosswalk,keishi/chromium,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,robclark/chromium,dushu1203/chromium.src,dushu1203/chromium.src,zcbenz/cefode-chromium,fujunwei/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,keishi/chromium,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,markYoungH/chromium.src,Chilledheart/chromium,nacl-webkit/chrome_deps,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,hgl888/chromium-crosswalk,jaruba/chromium.src,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,ChromiumWebApps/chromium,hujiajie/pa-chromium,TheTypoMaster/chromium-crosswalk,mogoweb/chromium-crosswalk,timopulkkinen/BubbleFish,Jonekee/chromium.src,junmin-zhu/chromium-rivertrail,junmin-zhu/chromium-rivertrail,axinging/chromium-crosswalk,M4sse/chromium.src,mogoweb/chromium-crosswalk,zcbenz/cefode-chromium,chuan9/chromium-crosswalk,nacl-webkit/chrome_deps,jaruba/chromium.src,Jonekee/chromium.src,junmin-zhu/chromium-rivertrail,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,mogoweb/chromium-crosswalk,rogerwang/chromium,M4sse/chromium.src,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,pozdnyakov/chromium-crosswalk,dednal/chromium.src,dushu1203/chromium.src,jaruba/chromium.src,hujiajie/pa-chromium,M4sse/chromium.src,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,timopulkkinen/BubbleFish,jaruba/chromium.src,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,M4sse/chromium.src,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,rogerwang/chromium,keishi/chromium,junmin-zhu/chromium-rivertrail,pozdnyakov/chromium-crosswalk,rogerwang/chromium,krieger-od/nwjs_chromium.src,junmin-zhu/chromium-rivertrail,Pluto-tv/chromium-crosswalk,keishi/chromium,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,rogerwang/chromium,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,ltilve/chromium,dednal/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,keishi/chromium,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,rogerwang/chromium,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,pozdnyakov/chromium-crosswalk,Chilledheart/chromium,anirudhSK/chromium,keishi/chromium,rogerwang/chromium,hujiajie/pa-chromium,keishi/chromium,M4sse/chromium.src,littlstar/chromium.src,robclark/chromium,ondra-novak/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,robclark/chromium,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,zcbenz/cefode-chromium,jaruba/chromium.src,mogoweb/chromium-crosswalk,timopulkkinen/BubbleFish,anirudhSK/chromium,ChromiumWebApps/chromium,robclark/chromium,timopulkkinen/BubbleFish,littlstar/chromium.src,rogerwang/chromium,M4sse/chromium.src,rogerwang/chromium,mogoweb/chromium-crosswalk,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,Chilledheart/chromium,crosswalk-project/chromium-crosswalk-efl,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,timopulkkinen/BubbleFish,dushu1203/chromium.src,littlstar/chromium.src,nacl-webkit/chrome_deps,nacl-webkit/chrome_deps,pozdnyakov/chromium-crosswalk,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,anirudhSK/chromium,nacl-webkit/chrome_deps,ltilve/chromium,mogoweb/chromium-crosswalk,patrickm/chromium.src,keishi/chromium,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,hujiajie/pa-chromium,hujiajie/pa-chromium,hujiajie/pa-chromium,bright-sparks/chromium-spacewalk,keishi/chromium,nacl-webkit/chrome_deps,krieger-od/nwjs_chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,ltilve/chromium,nacl-webkit/chrome_deps,zcbenz/cefode-chromium,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,timopulkkinen/BubbleFish,bright-sparks/chromium-spacewalk,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,robclark/chromium,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,timopulkkinen/BubbleFish,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,krieger-od/nwjs_chromium.src,dednal/chromium.src,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,junmin-zhu/chromium-rivertrail,patrickm/chromium.src,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,pozdnyakov/chromium-crosswalk,timopulkkinen/BubbleFish,junmin-zhu/chromium-rivertrail,patrickm/chromium.src,pozdnyakov/chromium-crosswalk,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,hujiajie/pa-chromium,keishi/chromium,dushu1203/chromium.src,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,Just-D/chromium-1,Just-D/chromium-1,ChromiumWebApps/chromium,junmin-zhu/chromium-rivertrail,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,jaruba/chromium.src,nacl-webkit/chrome_deps,ltilve/chromium,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,ChromiumWebApps/chromium,pozdnyakov/chromium-crosswalk,zcbenz/cefode-chromium,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,Just-D/chromium-1,axinging/chromium-crosswalk,dednal/chromium.src,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,rogerwang/chromium,markYoungH/chromium.src,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,axinging/chromium-crosswalk,robclark/chromium,axinging/chromium-crosswalk,robclark/chromium,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,anirudhSK/chromium,robclark/chromium,keishi/chromium,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,junmin-zhu/chromium-rivertrail,patrickm/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,timopulkkinen/BubbleFish,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,Jonekee/chromium.src,ltilve/chromium,zcbenz/cefode-chromium,ondra-novak/chromium.src,robclark/chromium,Jonekee/chromium.src,M4sse/chromium.src,pozdnyakov/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,dednal/chromium.src,dednal/chromium.src,hujiajie/pa-chromium
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts a single file from a CAB archive."""
import os
import subprocess
import sys
def main():
if len(sys.argv) != 4:
print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
return 1
[cab_path, archived_file, output_dir] = sys.argv[1:]
# Invoke the Windows expand utility to extract the file.
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
return level
# The expand utility preserves the modification date and time of the archived
# file. Touch the extracted file. This helps build systems that compare the
# modification times of input and output files to determine whether to do an
# action.
os.utime(os.path.join(output_dir, archived_file), None)
return 0
if __name__ == '__main__':
sys.exit(main())
Add automatic retry of cab extraction.
It fails occasionally with:
One or more files could not be expanded. Delta Package Expander Returned 0x80070002
Expanding File ..\third_party\directxsdk\files\redist\jun2010_d3dx9_43_x86.cab Incomplete, Error Code=0x80070002
Error Description: The system cannot find the file specified.
NOTRY=true
TBR=sky@chromium.org
BUG=
TEST=
Review URL: http://codereview.chromium.org/8883029
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@113653 0039d316-1c4b-4281-b951-d872f2087c98
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts a single file from a CAB archive."""
import os
import subprocess
import sys
def main():
if len(sys.argv) != 4:
print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
return 1
[cab_path, archived_file, output_dir] = sys.argv[1:]
# Invoke the Windows expand utility to extract the file.
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
print 'Cab extraction(%s, %s, %s) failed.' % (
cab_path, archived_file, output_dir)
print 'Trying a second time.'
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
return level
# The expand utility preserves the modification date and time of the archived
# file. Touch the extracted file. This helps build systems that compare the
# modification times of input and output files to determine whether to do an
# action.
os.utime(os.path.join(output_dir, archived_file), None)
return 0
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts a single file from a CAB archive."""
import os
import subprocess
import sys
def main():
if len(sys.argv) != 4:
print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
return 1
[cab_path, archived_file, output_dir] = sys.argv[1:]
# Invoke the Windows expand utility to extract the file.
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
return level
# The expand utility preserves the modification date and time of the archived
# file. Touch the extracted file. This helps build systems that compare the
# modification times of input and output files to determine whether to do an
# action.
os.utime(os.path.join(output_dir, archived_file), None)
return 0
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Add automatic retry of cab extraction.
It fails occasionally with:
One or more files could not be expanded. Delta Package Expander Returned 0x80070002
Expanding File ..\third_party\directxsdk\files\redist\jun2010_d3dx9_43_x86.cab Incomplete, Error Code=0x80070002
Error Description: The system cannot find the file specified.
NOTRY=true
TBR=sky@chromium.org
BUG=
TEST=
Review URL: http://codereview.chromium.org/8883029
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@113653 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts a single file from a CAB archive."""
import os
import subprocess
import sys
def main():
if len(sys.argv) != 4:
print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
return 1
[cab_path, archived_file, output_dir] = sys.argv[1:]
# Invoke the Windows expand utility to extract the file.
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
print 'Cab extraction(%s, %s, %s) failed.' % (
cab_path, archived_file, output_dir)
print 'Trying a second time.'
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
return level
# The expand utility preserves the modification date and time of the archived
# file. Touch the extracted file. This helps build systems that compare the
# modification times of input and output files to determine whether to do an
# action.
os.utime(os.path.join(output_dir, archived_file), None)
return 0
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts a single file from a CAB archive."""
import os
import subprocess
import sys
def main():
if len(sys.argv) != 4:
print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
return 1
[cab_path, archived_file, output_dir] = sys.argv[1:]
# Invoke the Windows expand utility to extract the file.
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
return level
# The expand utility preserves the modification date and time of the archived
# file. Touch the extracted file. This helps build systems that compare the
# modification times of input and output files to determine whether to do an
# action.
os.utime(os.path.join(output_dir, archived_file), None)
return 0
if __name__ == '__main__':
sys.exit(main())
Add automatic retry of cab extraction.
It fails occasionally with:
One or more files could not be expanded. Delta Package Expander Returned 0x80070002
Expanding File ..\third_party\directxsdk\files\redist\jun2010_d3dx9_43_x86.cab Incomplete, Error Code=0x80070002
Error Description: The system cannot find the file specified.
NOTRY=true
TBR=sky@chromium.org
BUG=
TEST=
Review URL: http://codereview.chromium.org/8883029
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@113653 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts a single file from a CAB archive."""
import os
import subprocess
import sys
def main():
if len(sys.argv) != 4:
print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
return 1
[cab_path, archived_file, output_dir] = sys.argv[1:]
# Invoke the Windows expand utility to extract the file.
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
print 'Cab extraction(%s, %s, %s) failed.' % (
cab_path, archived_file, output_dir)
print 'Trying a second time.'
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
return level
# The expand utility preserves the modification date and time of the archived
# file. Touch the extracted file. This helps build systems that compare the
# modification times of input and output files to determine whether to do an
# action.
os.utime(os.path.join(output_dir, archived_file), None)
return 0
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts a single file from a CAB archive."""
import os
import subprocess
import sys
def main():
if len(sys.argv) != 4:
print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
return 1
[cab_path, archived_file, output_dir] = sys.argv[1:]
# Invoke the Windows expand utility to extract the file.
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
return level
# The expand utility preserves the modification date and time of the archived
# file. Touch the extracted file. This helps build systems that compare the
# modification times of input and output files to determine whether to do an
# action.
os.utime(os.path.join(output_dir, archived_file), None)
return 0
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Add automatic retry of cab extraction.
It fails occasionally with:
One or more files could not be expanded. Delta Package Expander Returned 0x80070002
Expanding File ..\third_party\directxsdk\files\redist\jun2010_d3dx9_43_x86.cab Incomplete, Error Code=0x80070002
Error Description: The system cannot find the file specified.
NOTRY=true
TBR=sky@chromium.org
BUG=
TEST=
Review URL: http://codereview.chromium.org/8883029
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@113653 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts a single file from a CAB archive."""
import os
import subprocess
import sys
def main():
if len(sys.argv) != 4:
print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
return 1
[cab_path, archived_file, output_dir] = sys.argv[1:]
# Invoke the Windows expand utility to extract the file.
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
print 'Cab extraction(%s, %s, %s) failed.' % (
cab_path, archived_file, output_dir)
print 'Trying a second time.'
level = subprocess.call(
['expand', cab_path, '-F:' + archived_file, output_dir])
if level != 0:
return level
# The expand utility preserves the modification date and time of the archived
# file. Touch the extracted file. This helps build systems that compare the
# modification times of input and output files to determine whether to do an
# action.
os.utime(os.path.join(output_dir, archived_file), None)
return 0
if __name__ == '__main__':
sys.exit(main())
|
62b90eb97c9e32280f7f1a9c1127099f20440c11
|
byceps/config_defaults.py
|
byceps/config_defaults.py
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pytz import timezone
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
# user accounts
USER_REGISTRATION_ENABLED = True
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# news item pagination
NEWS_ITEMS_PER_PAGE = 4
# message board pagination
BOARD_TOPICS_PER_PAGE = 10
BOARD_POSTINGS_PER_PAGE = 10
# shop
SHOP_ORDER_EXPORT_TIMEZONE = timezone('Europe/Berlin')
# ticketing
TICKET_MANAGEMENT_ENABLED = True
# seating
SEAT_MANAGEMENT_ENABLED = True
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pytz import timezone
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# user accounts
USER_REGISTRATION_ENABLED = True
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# news item pagination
NEWS_ITEMS_PER_PAGE = 4
# message board pagination
BOARD_TOPICS_PER_PAGE = 10
BOARD_POSTINGS_PER_PAGE = 10
# shop
SHOP_ORDER_EXPORT_TIMEZONE = timezone('Europe/Berlin')
# ticketing
TICKET_MANAGEMENT_ENABLED = True
# seating
SEAT_MANAGEMENT_ENABLED = True
|
Set required background color for RQ dashboard
|
Set required background color for RQ dashboard
BYCEPS doesn't use ra dashboard's default settings, so they need to be set explicitly as necessary.
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pytz import timezone
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
# user accounts
USER_REGISTRATION_ENABLED = True
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# news item pagination
NEWS_ITEMS_PER_PAGE = 4
# message board pagination
BOARD_TOPICS_PER_PAGE = 10
BOARD_POSTINGS_PER_PAGE = 10
# shop
SHOP_ORDER_EXPORT_TIMEZONE = timezone('Europe/Berlin')
# ticketing
TICKET_MANAGEMENT_ENABLED = True
# seating
SEAT_MANAGEMENT_ENABLED = True
Set required background color for RQ dashboard
BYCEPS doesn't use ra dashboard's default settings, so they need to be set explicitly as necessary.
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pytz import timezone
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# user accounts
USER_REGISTRATION_ENABLED = True
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# news item pagination
NEWS_ITEMS_PER_PAGE = 4
# message board pagination
BOARD_TOPICS_PER_PAGE = 10
BOARD_POSTINGS_PER_PAGE = 10
# shop
SHOP_ORDER_EXPORT_TIMEZONE = timezone('Europe/Berlin')
# ticketing
TICKET_MANAGEMENT_ENABLED = True
# seating
SEAT_MANAGEMENT_ENABLED = True
|
<commit_before>"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pytz import timezone
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
# user accounts
USER_REGISTRATION_ENABLED = True
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# news item pagination
NEWS_ITEMS_PER_PAGE = 4
# message board pagination
BOARD_TOPICS_PER_PAGE = 10
BOARD_POSTINGS_PER_PAGE = 10
# shop
SHOP_ORDER_EXPORT_TIMEZONE = timezone('Europe/Berlin')
# ticketing
TICKET_MANAGEMENT_ENABLED = True
# seating
SEAT_MANAGEMENT_ENABLED = True
<commit_msg>Set required background color for RQ dashboard
BYCEPS doesn't use ra dashboard's default settings, so they need to be set explicitly as necessary.<commit_after>
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pytz import timezone
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# user accounts
USER_REGISTRATION_ENABLED = True
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# news item pagination
NEWS_ITEMS_PER_PAGE = 4
# message board pagination
BOARD_TOPICS_PER_PAGE = 10
BOARD_POSTINGS_PER_PAGE = 10
# shop
SHOP_ORDER_EXPORT_TIMEZONE = timezone('Europe/Berlin')
# ticketing
TICKET_MANAGEMENT_ENABLED = True
# seating
SEAT_MANAGEMENT_ENABLED = True
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pytz import timezone
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
# user accounts
USER_REGISTRATION_ENABLED = True
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# news item pagination
NEWS_ITEMS_PER_PAGE = 4
# message board pagination
BOARD_TOPICS_PER_PAGE = 10
BOARD_POSTINGS_PER_PAGE = 10
# shop
SHOP_ORDER_EXPORT_TIMEZONE = timezone('Europe/Berlin')
# ticketing
TICKET_MANAGEMENT_ENABLED = True
# seating
SEAT_MANAGEMENT_ENABLED = True
Set required background color for RQ dashboard
BYCEPS doesn't use ra dashboard's default settings, so they need to be set explicitly as necessary."""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pytz import timezone
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# user accounts
USER_REGISTRATION_ENABLED = True
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# news item pagination
NEWS_ITEMS_PER_PAGE = 4
# message board pagination
BOARD_TOPICS_PER_PAGE = 10
BOARD_POSTINGS_PER_PAGE = 10
# shop
SHOP_ORDER_EXPORT_TIMEZONE = timezone('Europe/Berlin')
# ticketing
TICKET_MANAGEMENT_ENABLED = True
# seating
SEAT_MANAGEMENT_ENABLED = True
|
<commit_before>"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pytz import timezone
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
# user accounts
USER_REGISTRATION_ENABLED = True
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# news item pagination
NEWS_ITEMS_PER_PAGE = 4
# message board pagination
BOARD_TOPICS_PER_PAGE = 10
BOARD_POSTINGS_PER_PAGE = 10
# shop
SHOP_ORDER_EXPORT_TIMEZONE = timezone('Europe/Berlin')
# ticketing
TICKET_MANAGEMENT_ENABLED = True
# seating
SEAT_MANAGEMENT_ENABLED = True
<commit_msg>Set required background color for RQ dashboard
BYCEPS doesn't use ra dashboard's default settings, so they need to be set explicitly as necessary.<commit_after>"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pytz import timezone
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# user accounts
USER_REGISTRATION_ENABLED = True
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# news item pagination
NEWS_ITEMS_PER_PAGE = 4
# message board pagination
BOARD_TOPICS_PER_PAGE = 10
BOARD_POSTINGS_PER_PAGE = 10
# shop
SHOP_ORDER_EXPORT_TIMEZONE = timezone('Europe/Berlin')
# ticketing
TICKET_MANAGEMENT_ENABLED = True
# seating
SEAT_MANAGEMENT_ENABLED = True
|
2f1bcd83bf9069e5fc599aa20e1ed533bebd5e67
|
Detect_Face_Sides.py
|
Detect_Face_Sides.py
|
import numpy as np
def get_leftside_average(self):
"""Return Array of Left Most Points."""
width = self.size[0]
height = self.size[1]
left_most_points = []
for row in range(height):
for column in range(width):
if image.getpixel(row, column) > 200:
left_most_points.append(column)
break
return np.median(left_most_points)
|
import numpy as np
def get_leftside_average(self):
"""Return the value of the Average of the left_most_points."""
width = self.size[0]
height = self.size[1]
left_most_points = []
for row in range(height):
for column in range(width):
if image.getpixel(row, column) > 200:
left_most_points.append(row)
break
return np.median(left_most_points)
def get_rightside_average(self):
"""Return the value of the average of the right_most_points."""
width = self.size[0]
height = self.size[1]
right_most_points = []
for row in range(height):
for column in range(width, -1, -1): #Indices moving right to left
if image.getpixel(row column) > 200:
right_most_points.append(row)
break
return np.median(right_most_points)
|
Add get_rightside_face and Fix bug
|
Add get_rightside_face and Fix bug
|
Python
|
mit
|
anassinator/codejam-2014,anassinator/codejam
|
import numpy as np
def get_leftside_average(self):
"""Return Array of Left Most Points."""
width = self.size[0]
height = self.size[1]
left_most_points = []
for row in range(height):
for column in range(width):
if image.getpixel(row, column) > 200:
left_most_points.append(column)
break
return np.median(left_most_points)
Add get_rightside_face and Fix bug
|
import numpy as np
def get_leftside_average(self):
"""Return the value of the Average of the left_most_points."""
width = self.size[0]
height = self.size[1]
left_most_points = []
for row in range(height):
for column in range(width):
if image.getpixel(row, column) > 200:
left_most_points.append(row)
break
return np.median(left_most_points)
def get_rightside_average(self):
"""Return the value of the average of the right_most_points."""
width = self.size[0]
height = self.size[1]
right_most_points = []
for row in range(height):
for column in range(width, -1, -1): #Indices moving right to left
if image.getpixel(row column) > 200:
right_most_points.append(row)
break
return np.median(right_most_points)
|
<commit_before>import numpy as np
def get_leftside_average(self):
"""Return Array of Left Most Points."""
width = self.size[0]
height = self.size[1]
left_most_points = []
for row in range(height):
for column in range(width):
if image.getpixel(row, column) > 200:
left_most_points.append(column)
break
return np.median(left_most_points)
<commit_msg>Add get_rightside_face and Fix bug<commit_after>
|
import numpy as np
def get_leftside_average(self):
"""Return the value of the Average of the left_most_points."""
width = self.size[0]
height = self.size[1]
left_most_points = []
for row in range(height):
for column in range(width):
if image.getpixel(row, column) > 200:
left_most_points.append(row)
break
return np.median(left_most_points)
def get_rightside_average(self):
"""Return the value of the average of the right_most_points."""
width = self.size[0]
height = self.size[1]
right_most_points = []
for row in range(height):
for column in range(width, -1, -1): #Indices moving right to left
if image.getpixel(row column) > 200:
right_most_points.append(row)
break
return np.median(right_most_points)
|
import numpy as np
def get_leftside_average(self):
"""Return Array of Left Most Points."""
width = self.size[0]
height = self.size[1]
left_most_points = []
for row in range(height):
for column in range(width):
if image.getpixel(row, column) > 200:
left_most_points.append(column)
break
return np.median(left_most_points)
Add get_rightside_face and Fix bugimport numpy as np
def get_leftside_average(self):
"""Return the value of the Average of the left_most_points."""
width = self.size[0]
height = self.size[1]
left_most_points = []
for row in range(height):
for column in range(width):
if image.getpixel(row, column) > 200:
left_most_points.append(row)
break
return np.median(left_most_points)
def get_rightside_average(self):
"""Return the value of the average of the right_most_points."""
width = self.size[0]
height = self.size[1]
right_most_points = []
for row in range(height):
for column in range(width, -1, -1): #Indices moving right to left
if image.getpixel(row column) > 200:
right_most_points.append(row)
break
return np.median(right_most_points)
|
<commit_before>import numpy as np
def get_leftside_average(self):
"""Return Array of Left Most Points."""
width = self.size[0]
height = self.size[1]
left_most_points = []
for row in range(height):
for column in range(width):
if image.getpixel(row, column) > 200:
left_most_points.append(column)
break
return np.median(left_most_points)
<commit_msg>Add get_rightside_face and Fix bug<commit_after>import numpy as np
def get_leftside_average(self):
"""Return the value of the Average of the left_most_points."""
width = self.size[0]
height = self.size[1]
left_most_points = []
for row in range(height):
for column in range(width):
if image.getpixel(row, column) > 200:
left_most_points.append(row)
break
return np.median(left_most_points)
def get_rightside_average(self):
"""Return the value of the average of the right_most_points."""
width = self.size[0]
height = self.size[1]
right_most_points = []
for row in range(height):
for column in range(width, -1, -1): #Indices moving right to left
if image.getpixel(row column) > 200:
right_most_points.append(row)
break
return np.median(right_most_points)
|
66b7715ada14051f2e54b061e09c896a6e7d3844
|
openahjo_activity_streams/server.py
|
openahjo_activity_streams/server.py
|
# Copyright (c) 2015 ThoughtWorks
#
# See the file LICENSE for copying permission.
import os
import flask
from openahjo_activity_streams import convert
import requests
import logging
import json
OPENAHJO_URL = 'http://dev.hel.fi/paatokset/v1/agenda_item/?order_by=-last_modified_time'
def create_app(remote_url=OPENAHJO_URL, converter=convert.to_activity_stream):
logging.basicConfig(level=logging.INFO)
application = flask.Flask(__name__, instance_path=os.environ['INSTANCE_PATH'])
application.config['REMOTE_URL'] = remote_url
application.config['CONVERTER'] = converter
@application.route('/')
def show_something():
openahjo_data = requests.get(application.config['REMOTE_URL'])
converted_data = application.config['CONVERTER'](openahjo_data.json())
return application.response_class(json.dumps(converted_data), mimetype='application/activity+json')
return application
application = create_app()
if __name__ == '__main__':
application.run()
|
# Copyright (c) 2015 ThoughtWorks
#
# See the file LICENSE for copying permission.
import flask
from openahjo_activity_streams import convert
import requests
import logging
import json
OPENAHJO_URL = 'http://dev.hel.fi/paatokset/v1/agenda_item/?order_by=-last_modified_time'
def create_app(remote_url=OPENAHJO_URL, converter=convert.to_activity_stream):
logging.basicConfig(level=logging.INFO)
application = flask.Flask(__name__)
application.config['REMOTE_URL'] = remote_url
application.config['CONVERTER'] = converter
@application.route('/')
def show_something():
openahjo_data = requests.get(application.config['REMOTE_URL'])
converted_data = application.config['CONVERTER'](openahjo_data.json())
return application.response_class(json.dumps(converted_data), mimetype='application/activity+json')
return application
application = create_app()
if __name__ == '__main__':
application.run()
|
Revert "CW + AW | 225 | add instance path when building app"
|
Revert "CW + AW | 225 | add instance path when building app"
This reverts commit 9aa8d2ec4f49dfe8261893de70887052cf134bd5.
|
Python
|
mit
|
d-cent/HelsinkiActivityStream,ThoughtWorksInc/HelsinkiActivityStream,d-cent/HelsinkiActivityStream,d-cent/HelsinkiActivityStream,ThoughtWorksInc/HelsinkiActivityStream,ThoughtWorksInc/HelsinkiActivityStream
|
# Copyright (c) 2015 ThoughtWorks
#
# See the file LICENSE for copying permission.
import os
import flask
from openahjo_activity_streams import convert
import requests
import logging
import json
OPENAHJO_URL = 'http://dev.hel.fi/paatokset/v1/agenda_item/?order_by=-last_modified_time'
def create_app(remote_url=OPENAHJO_URL, converter=convert.to_activity_stream):
logging.basicConfig(level=logging.INFO)
application = flask.Flask(__name__, instance_path=os.environ['INSTANCE_PATH'])
application.config['REMOTE_URL'] = remote_url
application.config['CONVERTER'] = converter
@application.route('/')
def show_something():
openahjo_data = requests.get(application.config['REMOTE_URL'])
converted_data = application.config['CONVERTER'](openahjo_data.json())
return application.response_class(json.dumps(converted_data), mimetype='application/activity+json')
return application
application = create_app()
if __name__ == '__main__':
application.run()
Revert "CW + AW | 225 | add instance path when building app"
This reverts commit 9aa8d2ec4f49dfe8261893de70887052cf134bd5.
|
# Copyright (c) 2015 ThoughtWorks
#
# See the file LICENSE for copying permission.
import flask
from openahjo_activity_streams import convert
import requests
import logging
import json
OPENAHJO_URL = 'http://dev.hel.fi/paatokset/v1/agenda_item/?order_by=-last_modified_time'
def create_app(remote_url=OPENAHJO_URL, converter=convert.to_activity_stream):
logging.basicConfig(level=logging.INFO)
application = flask.Flask(__name__)
application.config['REMOTE_URL'] = remote_url
application.config['CONVERTER'] = converter
@application.route('/')
def show_something():
openahjo_data = requests.get(application.config['REMOTE_URL'])
converted_data = application.config['CONVERTER'](openahjo_data.json())
return application.response_class(json.dumps(converted_data), mimetype='application/activity+json')
return application
application = create_app()
if __name__ == '__main__':
application.run()
|
<commit_before># Copyright (c) 2015 ThoughtWorks
#
# See the file LICENSE for copying permission.
import os
import flask
from openahjo_activity_streams import convert
import requests
import logging
import json
OPENAHJO_URL = 'http://dev.hel.fi/paatokset/v1/agenda_item/?order_by=-last_modified_time'
def create_app(remote_url=OPENAHJO_URL, converter=convert.to_activity_stream):
logging.basicConfig(level=logging.INFO)
application = flask.Flask(__name__, instance_path=os.environ['INSTANCE_PATH'])
application.config['REMOTE_URL'] = remote_url
application.config['CONVERTER'] = converter
@application.route('/')
def show_something():
openahjo_data = requests.get(application.config['REMOTE_URL'])
converted_data = application.config['CONVERTER'](openahjo_data.json())
return application.response_class(json.dumps(converted_data), mimetype='application/activity+json')
return application
application = create_app()
if __name__ == '__main__':
application.run()
<commit_msg>Revert "CW + AW | 225 | add instance path when building app"
This reverts commit 9aa8d2ec4f49dfe8261893de70887052cf134bd5.<commit_after>
|
# Copyright (c) 2015 ThoughtWorks
#
# See the file LICENSE for copying permission.
import flask
from openahjo_activity_streams import convert
import requests
import logging
import json
OPENAHJO_URL = 'http://dev.hel.fi/paatokset/v1/agenda_item/?order_by=-last_modified_time'
def create_app(remote_url=OPENAHJO_URL, converter=convert.to_activity_stream):
logging.basicConfig(level=logging.INFO)
application = flask.Flask(__name__)
application.config['REMOTE_URL'] = remote_url
application.config['CONVERTER'] = converter
@application.route('/')
def show_something():
openahjo_data = requests.get(application.config['REMOTE_URL'])
converted_data = application.config['CONVERTER'](openahjo_data.json())
return application.response_class(json.dumps(converted_data), mimetype='application/activity+json')
return application
application = create_app()
if __name__ == '__main__':
application.run()
|
# Copyright (c) 2015 ThoughtWorks
#
# See the file LICENSE for copying permission.
import os
import flask
from openahjo_activity_streams import convert
import requests
import logging
import json
OPENAHJO_URL = 'http://dev.hel.fi/paatokset/v1/agenda_item/?order_by=-last_modified_time'
def create_app(remote_url=OPENAHJO_URL, converter=convert.to_activity_stream):
logging.basicConfig(level=logging.INFO)
application = flask.Flask(__name__, instance_path=os.environ['INSTANCE_PATH'])
application.config['REMOTE_URL'] = remote_url
application.config['CONVERTER'] = converter
@application.route('/')
def show_something():
openahjo_data = requests.get(application.config['REMOTE_URL'])
converted_data = application.config['CONVERTER'](openahjo_data.json())
return application.response_class(json.dumps(converted_data), mimetype='application/activity+json')
return application
application = create_app()
if __name__ == '__main__':
application.run()
Revert "CW + AW | 225 | add instance path when building app"
This reverts commit 9aa8d2ec4f49dfe8261893de70887052cf134bd5.# Copyright (c) 2015 ThoughtWorks
#
# See the file LICENSE for copying permission.
import flask
from openahjo_activity_streams import convert
import requests
import logging
import json
OPENAHJO_URL = 'http://dev.hel.fi/paatokset/v1/agenda_item/?order_by=-last_modified_time'
def create_app(remote_url=OPENAHJO_URL, converter=convert.to_activity_stream):
logging.basicConfig(level=logging.INFO)
application = flask.Flask(__name__)
application.config['REMOTE_URL'] = remote_url
application.config['CONVERTER'] = converter
@application.route('/')
def show_something():
openahjo_data = requests.get(application.config['REMOTE_URL'])
converted_data = application.config['CONVERTER'](openahjo_data.json())
return application.response_class(json.dumps(converted_data), mimetype='application/activity+json')
return application
application = create_app()
if __name__ == '__main__':
application.run()
|
<commit_before># Copyright (c) 2015 ThoughtWorks
#
# See the file LICENSE for copying permission.
import os
import flask
from openahjo_activity_streams import convert
import requests
import logging
import json
OPENAHJO_URL = 'http://dev.hel.fi/paatokset/v1/agenda_item/?order_by=-last_modified_time'
def create_app(remote_url=OPENAHJO_URL, converter=convert.to_activity_stream):
logging.basicConfig(level=logging.INFO)
application = flask.Flask(__name__, instance_path=os.environ['INSTANCE_PATH'])
application.config['REMOTE_URL'] = remote_url
application.config['CONVERTER'] = converter
@application.route('/')
def show_something():
openahjo_data = requests.get(application.config['REMOTE_URL'])
converted_data = application.config['CONVERTER'](openahjo_data.json())
return application.response_class(json.dumps(converted_data), mimetype='application/activity+json')
return application
application = create_app()
if __name__ == '__main__':
application.run()
<commit_msg>Revert "CW + AW | 225 | add instance path when building app"
This reverts commit 9aa8d2ec4f49dfe8261893de70887052cf134bd5.<commit_after># Copyright (c) 2015 ThoughtWorks
#
# See the file LICENSE for copying permission.
import flask
from openahjo_activity_streams import convert
import requests
import logging
import json
OPENAHJO_URL = 'http://dev.hel.fi/paatokset/v1/agenda_item/?order_by=-last_modified_time'
def create_app(remote_url=OPENAHJO_URL, converter=convert.to_activity_stream):
logging.basicConfig(level=logging.INFO)
application = flask.Flask(__name__)
application.config['REMOTE_URL'] = remote_url
application.config['CONVERTER'] = converter
@application.route('/')
def show_something():
openahjo_data = requests.get(application.config['REMOTE_URL'])
converted_data = application.config['CONVERTER'](openahjo_data.json())
return application.response_class(json.dumps(converted_data), mimetype='application/activity+json')
return application
application = create_app()
if __name__ == '__main__':
application.run()
|
02bf1d3c37904af6b9ab41e05c23ed7e5cebc0f7
|
kolibri/core/auth/migrations/0016_add_adhoclearnersgroup_collection_kind.py
|
kolibri/core/auth/migrations/0016_add_adhoclearnersgroup_collection_kind.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-04 04:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("kolibriauth", "0015_facilitydataset_registered")]
operations = [
migrations.CreateModel(
name="AdHocGroup",
fields=[],
options={"indexes": [], "proxy": True},
bases=("kolibriauth.collection",),
),
migrations.AlterField(
model_name="collection",
name="kind",
field=models.CharField(
choices=[
("facility", "Facility"),
("classroom", "Classroom"),
("learnergroup", "Learner group"),
("adhoclearnersgroup", "Individual learners group"),
],
max_length=20,
),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-04 04:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("kolibriauth", "0015_facilitydataset_registered")]
operations = [
migrations.CreateModel(
name="AdHocGroup",
fields=[],
options={"indexes": [], "proxy": True},
bases=("kolibriauth.collection",),
),
migrations.AlterField(
model_name="collection",
name="kind",
field=models.CharField(
choices=[
("facility", "Facility"),
("classroom", "Classroom"),
("learnergroup", "Learner group"),
("adhoclearnersgroup', 'Ad hoc learners group"),
],
max_length=20,
),
),
]
|
Fix migration file for new collection kind name
|
Fix migration file for new collection kind name
|
Python
|
mit
|
indirectlylit/kolibri,mrpau/kolibri,indirectlylit/kolibri,learningequality/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,learningequality/kolibri,mrpau/kolibri,learningequality/kolibri,mrpau/kolibri,mrpau/kolibri,learningequality/kolibri
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-04 04:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("kolibriauth", "0015_facilitydataset_registered")]
operations = [
migrations.CreateModel(
name="AdHocGroup",
fields=[],
options={"indexes": [], "proxy": True},
bases=("kolibriauth.collection",),
),
migrations.AlterField(
model_name="collection",
name="kind",
field=models.CharField(
choices=[
("facility", "Facility"),
("classroom", "Classroom"),
("learnergroup", "Learner group"),
("adhoclearnersgroup", "Individual learners group"),
],
max_length=20,
),
),
]
Fix migration file for new collection kind name
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-04 04:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("kolibriauth", "0015_facilitydataset_registered")]
operations = [
migrations.CreateModel(
name="AdHocGroup",
fields=[],
options={"indexes": [], "proxy": True},
bases=("kolibriauth.collection",),
),
migrations.AlterField(
model_name="collection",
name="kind",
field=models.CharField(
choices=[
("facility", "Facility"),
("classroom", "Classroom"),
("learnergroup", "Learner group"),
("adhoclearnersgroup', 'Ad hoc learners group"),
],
max_length=20,
),
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-04 04:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("kolibriauth", "0015_facilitydataset_registered")]
operations = [
migrations.CreateModel(
name="AdHocGroup",
fields=[],
options={"indexes": [], "proxy": True},
bases=("kolibriauth.collection",),
),
migrations.AlterField(
model_name="collection",
name="kind",
field=models.CharField(
choices=[
("facility", "Facility"),
("classroom", "Classroom"),
("learnergroup", "Learner group"),
("adhoclearnersgroup", "Individual learners group"),
],
max_length=20,
),
),
]
<commit_msg>Fix migration file for new collection kind name<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-04 04:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("kolibriauth", "0015_facilitydataset_registered")]
operations = [
migrations.CreateModel(
name="AdHocGroup",
fields=[],
options={"indexes": [], "proxy": True},
bases=("kolibriauth.collection",),
),
migrations.AlterField(
model_name="collection",
name="kind",
field=models.CharField(
choices=[
("facility", "Facility"),
("classroom", "Classroom"),
("learnergroup", "Learner group"),
("adhoclearnersgroup', 'Ad hoc learners group"),
],
max_length=20,
),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-04 04:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("kolibriauth", "0015_facilitydataset_registered")]
operations = [
migrations.CreateModel(
name="AdHocGroup",
fields=[],
options={"indexes": [], "proxy": True},
bases=("kolibriauth.collection",),
),
migrations.AlterField(
model_name="collection",
name="kind",
field=models.CharField(
choices=[
("facility", "Facility"),
("classroom", "Classroom"),
("learnergroup", "Learner group"),
("adhoclearnersgroup", "Individual learners group"),
],
max_length=20,
),
),
]
Fix migration file for new collection kind name# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-04 04:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("kolibriauth", "0015_facilitydataset_registered")]
operations = [
migrations.CreateModel(
name="AdHocGroup",
fields=[],
options={"indexes": [], "proxy": True},
bases=("kolibriauth.collection",),
),
migrations.AlterField(
model_name="collection",
name="kind",
field=models.CharField(
choices=[
("facility", "Facility"),
("classroom", "Classroom"),
("learnergroup", "Learner group"),
("adhoclearnersgroup', 'Ad hoc learners group"),
],
max_length=20,
),
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-04 04:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("kolibriauth", "0015_facilitydataset_registered")]
operations = [
migrations.CreateModel(
name="AdHocGroup",
fields=[],
options={"indexes": [], "proxy": True},
bases=("kolibriauth.collection",),
),
migrations.AlterField(
model_name="collection",
name="kind",
field=models.CharField(
choices=[
("facility", "Facility"),
("classroom", "Classroom"),
("learnergroup", "Learner group"),
("adhoclearnersgroup", "Individual learners group"),
],
max_length=20,
),
),
]
<commit_msg>Fix migration file for new collection kind name<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-04 04:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("kolibriauth", "0015_facilitydataset_registered")]
operations = [
migrations.CreateModel(
name="AdHocGroup",
fields=[],
options={"indexes": [], "proxy": True},
bases=("kolibriauth.collection",),
),
migrations.AlterField(
model_name="collection",
name="kind",
field=models.CharField(
choices=[
("facility", "Facility"),
("classroom", "Classroom"),
("learnergroup", "Learner group"),
("adhoclearnersgroup', 'Ad hoc learners group"),
],
max_length=20,
),
),
]
|
638f6fb659792ec69b9df25391001241d12c39bd
|
src/python/grpcio_tests/tests_aio/unit/init_test.py
|
src/python/grpcio_tests/tests_aio/unit/init_test.py
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
import grpc
from tests_aio.unit._test_base import AioTestBase
class TestInit(AioTestBase):
async def test_grpc_dot_aio(self):
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
from tests_aio.unit._test_base import AioTestBase
class TestInit(AioTestBase):
async def test_grpc(self):
import grpc # pylint: disable=wrong-import-position
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
async def test_grpc_dot_aio(self):
import grpc.aio # pylint: disable=wrong-import-position
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
async def test_aio_from_grpc(self):
from grpc import aio # pylint: disable=wrong-import-position
channel = aio.insecure_channel('dummy')
self.assertIsInstance(channel, aio.Channel)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
|
Expand alternatives to import aio module
|
Expand alternatives to import aio module
|
Python
|
apache-2.0
|
donnadionne/grpc,nicolasnoble/grpc,jtattermusch/grpc,vjpai/grpc,stanley-cheung/grpc,donnadionne/grpc,donnadionne/grpc,donnadionne/grpc,ejona86/grpc,stanley-cheung/grpc,stanley-cheung/grpc,nicolasnoble/grpc,stanley-cheung/grpc,jtattermusch/grpc,stanley-cheung/grpc,ejona86/grpc,stanley-cheung/grpc,ctiller/grpc,vjpai/grpc,stanley-cheung/grpc,nicolasnoble/grpc,grpc/grpc,grpc/grpc,stanley-cheung/grpc,ejona86/grpc,ejona86/grpc,ctiller/grpc,donnadionne/grpc,vjpai/grpc,vjpai/grpc,nicolasnoble/grpc,nicolasnoble/grpc,ejona86/grpc,donnadionne/grpc,jtattermusch/grpc,vjpai/grpc,grpc/grpc,grpc/grpc,donnadionne/grpc,vjpai/grpc,vjpai/grpc,ejona86/grpc,ctiller/grpc,jtattermusch/grpc,grpc/grpc,jtattermusch/grpc,grpc/grpc,donnadionne/grpc,jtattermusch/grpc,stanley-cheung/grpc,ctiller/grpc,ejona86/grpc,donnadionne/grpc,ctiller/grpc,nicolasnoble/grpc,nicolasnoble/grpc,grpc/grpc,donnadionne/grpc,donnadionne/grpc,nicolasnoble/grpc,ejona86/grpc,ejona86/grpc,ctiller/grpc,vjpai/grpc,jtattermusch/grpc,jtattermusch/grpc,ctiller/grpc,grpc/grpc,donnadionne/grpc,jtattermusch/grpc,stanley-cheung/grpc,ctiller/grpc,grpc/grpc,ctiller/grpc,ejona86/grpc,vjpai/grpc,grpc/grpc,stanley-cheung/grpc,ctiller/grpc,vjpai/grpc,ejona86/grpc,nicolasnoble/grpc,stanley-cheung/grpc,vjpai/grpc,nicolasnoble/grpc,ctiller/grpc,grpc/grpc,ctiller/grpc,vjpai/grpc,nicolasnoble/grpc,jtattermusch/grpc,nicolasnoble/grpc,jtattermusch/grpc,ejona86/grpc,grpc/grpc,jtattermusch/grpc
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
import grpc
from tests_aio.unit._test_base import AioTestBase
class TestInit(AioTestBase):
async def test_grpc_dot_aio(self):
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
Expand alternatives to import aio module
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
from tests_aio.unit._test_base import AioTestBase
class TestInit(AioTestBase):
async def test_grpc(self):
import grpc # pylint: disable=wrong-import-position
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
async def test_grpc_dot_aio(self):
import grpc.aio # pylint: disable=wrong-import-position
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
async def test_aio_from_grpc(self):
from grpc import aio # pylint: disable=wrong-import-position
channel = aio.insecure_channel('dummy')
self.assertIsInstance(channel, aio.Channel)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
|
<commit_before># Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
import grpc
from tests_aio.unit._test_base import AioTestBase
class TestInit(AioTestBase):
async def test_grpc_dot_aio(self):
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
<commit_msg>Expand alternatives to import aio module<commit_after>
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
from tests_aio.unit._test_base import AioTestBase
class TestInit(AioTestBase):
async def test_grpc(self):
import grpc # pylint: disable=wrong-import-position
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
async def test_grpc_dot_aio(self):
import grpc.aio # pylint: disable=wrong-import-position
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
async def test_aio_from_grpc(self):
from grpc import aio # pylint: disable=wrong-import-position
channel = aio.insecure_channel('dummy')
self.assertIsInstance(channel, aio.Channel)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
|
# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
import grpc
from tests_aio.unit._test_base import AioTestBase
class TestInit(AioTestBase):
async def test_grpc_dot_aio(self):
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
Expand alternatives to import aio module# Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
from tests_aio.unit._test_base import AioTestBase
class TestInit(AioTestBase):
async def test_grpc(self):
import grpc # pylint: disable=wrong-import-position
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
async def test_grpc_dot_aio(self):
import grpc.aio # pylint: disable=wrong-import-position
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
async def test_aio_from_grpc(self):
from grpc import aio # pylint: disable=wrong-import-position
channel = aio.insecure_channel('dummy')
self.assertIsInstance(channel, aio.Channel)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
|
<commit_before># Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
import grpc
from tests_aio.unit._test_base import AioTestBase
class TestInit(AioTestBase):
async def test_grpc_dot_aio(self):
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
<commit_msg>Expand alternatives to import aio module<commit_after># Copyright 2019 The gRPC Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
from tests_aio.unit._test_base import AioTestBase
class TestInit(AioTestBase):
async def test_grpc(self):
import grpc # pylint: disable=wrong-import-position
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
async def test_grpc_dot_aio(self):
import grpc.aio # pylint: disable=wrong-import-position
channel = grpc.aio.insecure_channel('dummy')
self.assertIsInstance(channel, grpc.aio.Channel)
async def test_aio_from_grpc(self):
from grpc import aio # pylint: disable=wrong-import-position
channel = aio.insecure_channel('dummy')
self.assertIsInstance(channel, aio.Channel)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
|
c6d68c78ac9391138d2f433248e35dc6fdd1cf98
|
setup_egg.py
|
setup_egg.py
|
#!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Wrapper to run setup.py using setuptools."""
if __name__ == '__main__':
exec('setup.py', dict(__name__='__main__',
__file__='setup.py', # needed in setup.py
force_setuptools=True))
|
#!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Wrapper to run setup.py using setuptools."""
if __name__ == '__main__':
with open('setup.py') as f:
exec(f.read(), dict(__name__='__main__',
__file__='setup.py', # needed in setup.py
force_setuptools=True))
|
Update call to `exec` to read the file in and execute the code.
|
Update call to `exec` to read the file in and execute the code.
|
Python
|
bsd-3-clause
|
FrancoisRheaultUS/dipy,FrancoisRheaultUS/dipy
|
#!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Wrapper to run setup.py using setuptools."""
if __name__ == '__main__':
exec('setup.py', dict(__name__='__main__',
__file__='setup.py', # needed in setup.py
force_setuptools=True))
Update call to `exec` to read the file in and execute the code.
|
#!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Wrapper to run setup.py using setuptools."""
if __name__ == '__main__':
with open('setup.py') as f:
exec(f.read(), dict(__name__='__main__',
__file__='setup.py', # needed in setup.py
force_setuptools=True))
|
<commit_before>#!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Wrapper to run setup.py using setuptools."""
if __name__ == '__main__':
exec('setup.py', dict(__name__='__main__',
__file__='setup.py', # needed in setup.py
force_setuptools=True))
<commit_msg>Update call to `exec` to read the file in and execute the code.<commit_after>
|
#!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Wrapper to run setup.py using setuptools."""
if __name__ == '__main__':
with open('setup.py') as f:
exec(f.read(), dict(__name__='__main__',
__file__='setup.py', # needed in setup.py
force_setuptools=True))
|
#!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Wrapper to run setup.py using setuptools."""
if __name__ == '__main__':
exec('setup.py', dict(__name__='__main__',
__file__='setup.py', # needed in setup.py
force_setuptools=True))
Update call to `exec` to read the file in and execute the code.#!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Wrapper to run setup.py using setuptools."""
if __name__ == '__main__':
with open('setup.py') as f:
exec(f.read(), dict(__name__='__main__',
__file__='setup.py', # needed in setup.py
force_setuptools=True))
|
<commit_before>#!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Wrapper to run setup.py using setuptools."""
if __name__ == '__main__':
exec('setup.py', dict(__name__='__main__',
__file__='setup.py', # needed in setup.py
force_setuptools=True))
<commit_msg>Update call to `exec` to read the file in and execute the code.<commit_after>#!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Wrapper to run setup.py using setuptools."""
if __name__ == '__main__':
with open('setup.py') as f:
exec(f.read(), dict(__name__='__main__',
__file__='setup.py', # needed in setup.py
force_setuptools=True))
|
c140c1a6d32c2caaf9f0e5a87efd219b9573608a
|
shub/tool.py
|
shub/tool.py
|
import click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
cli.add_command(command_module.cli, command)
|
import click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
command_name = command.replace('_', '-') # easier to type
cli.add_command(command_module.cli, command)
|
Use hifens instead of underscore for command names
|
Use hifens instead of underscore for command names
|
Python
|
bsd-3-clause
|
scrapinghub/shub
|
import click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
cli.add_command(command_module.cli, command)
Use hifens instead of underscore for command names
|
import click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
command_name = command.replace('_', '-') # easier to type
cli.add_command(command_module.cli, command)
|
<commit_before>import click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
cli.add_command(command_module.cli, command)
<commit_msg>Use hifens instead of underscore for command names<commit_after>
|
import click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
command_name = command.replace('_', '-') # easier to type
cli.add_command(command_module.cli, command)
|
import click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
cli.add_command(command_module.cli, command)
Use hifens instead of underscore for command namesimport click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
command_name = command.replace('_', '-') # easier to type
cli.add_command(command_module.cli, command)
|
<commit_before>import click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
cli.add_command(command_module.cli, command)
<commit_msg>Use hifens instead of underscore for command names<commit_after>import click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
command_name = command.replace('_', '-') # easier to type
cli.add_command(command_module.cli, command)
|
d040311ba25eca9459f60336391002a1d661d448
|
sift/util.py
|
sift/util.py
|
import re
from pattern import en
# todo: use spacy tokenization
def ngrams(text, n=1):
for i in xrange(n):
for n in en.ngrams(text, n=i+1):
yield ' '.join(n)
SENT_RE = re.compile('((?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|[\?!])\s)|(\s*\n\s*)')
def iter_sent_spans(text):
last = 0
for m in SENT_RE.finditer(text):
yield slice(last, m.start())
last = m.end()
if last != len(text):
yield slice(last, len(text))
def trim_link_subsection(s):
idx = s.find('#')
return s if idx == -1 else s[:idx]
def trim_link_protocol(s):
idx = s.find('://')
return s if idx == -1 else s[idx+3:]
|
import re
from pattern import en
# todo: use spacy tokenization
def ngrams(text, max_n=1, min_n=1):
for i in xrange(min_n-1,max_n):
for n in en.ngrams(text, n=i+1):
yield ' '.join(n)
SENT_RE = re.compile('((?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|[\?!])\s)|(\s*\n\s*)')
def iter_sent_spans(text):
last = 0
for m in SENT_RE.finditer(text):
yield slice(last, m.start())
last = m.end()
if last != len(text):
yield slice(last, len(text))
def trim_link_subsection(s):
idx = s.find('#')
return s if idx == -1 else s[:idx]
def trim_link_protocol(s):
idx = s.find('://')
return s if idx == -1 else s[idx+3:]
|
Add min argument for n when generating ngrams
|
Add min argument for n when generating ngrams
|
Python
|
mit
|
wikilinks/sift,wikilinks/sift
|
import re
from pattern import en
# todo: use spacy tokenization
def ngrams(text, n=1):
for i in xrange(n):
for n in en.ngrams(text, n=i+1):
yield ' '.join(n)
SENT_RE = re.compile('((?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|[\?!])\s)|(\s*\n\s*)')
def iter_sent_spans(text):
last = 0
for m in SENT_RE.finditer(text):
yield slice(last, m.start())
last = m.end()
if last != len(text):
yield slice(last, len(text))
def trim_link_subsection(s):
idx = s.find('#')
return s if idx == -1 else s[:idx]
def trim_link_protocol(s):
idx = s.find('://')
return s if idx == -1 else s[idx+3:]
Add min argument for n when generating ngrams
|
import re
from pattern import en
# todo: use spacy tokenization
def ngrams(text, max_n=1, min_n=1):
for i in xrange(min_n-1,max_n):
for n in en.ngrams(text, n=i+1):
yield ' '.join(n)
SENT_RE = re.compile('((?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|[\?!])\s)|(\s*\n\s*)')
def iter_sent_spans(text):
last = 0
for m in SENT_RE.finditer(text):
yield slice(last, m.start())
last = m.end()
if last != len(text):
yield slice(last, len(text))
def trim_link_subsection(s):
idx = s.find('#')
return s if idx == -1 else s[:idx]
def trim_link_protocol(s):
idx = s.find('://')
return s if idx == -1 else s[idx+3:]
|
<commit_before>import re
from pattern import en
# todo: use spacy tokenization
def ngrams(text, n=1):
for i in xrange(n):
for n in en.ngrams(text, n=i+1):
yield ' '.join(n)
SENT_RE = re.compile('((?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|[\?!])\s)|(\s*\n\s*)')
def iter_sent_spans(text):
last = 0
for m in SENT_RE.finditer(text):
yield slice(last, m.start())
last = m.end()
if last != len(text):
yield slice(last, len(text))
def trim_link_subsection(s):
idx = s.find('#')
return s if idx == -1 else s[:idx]
def trim_link_protocol(s):
idx = s.find('://')
return s if idx == -1 else s[idx+3:]
<commit_msg>Add min argument for n when generating ngrams<commit_after>
|
import re
from pattern import en
# todo: use spacy tokenization
def ngrams(text, max_n=1, min_n=1):
for i in xrange(min_n-1,max_n):
for n in en.ngrams(text, n=i+1):
yield ' '.join(n)
SENT_RE = re.compile('((?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|[\?!])\s)|(\s*\n\s*)')
def iter_sent_spans(text):
last = 0
for m in SENT_RE.finditer(text):
yield slice(last, m.start())
last = m.end()
if last != len(text):
yield slice(last, len(text))
def trim_link_subsection(s):
idx = s.find('#')
return s if idx == -1 else s[:idx]
def trim_link_protocol(s):
idx = s.find('://')
return s if idx == -1 else s[idx+3:]
|
import re
from pattern import en
# todo: use spacy tokenization
def ngrams(text, n=1):
for i in xrange(n):
for n in en.ngrams(text, n=i+1):
yield ' '.join(n)
SENT_RE = re.compile('((?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|[\?!])\s)|(\s*\n\s*)')
def iter_sent_spans(text):
last = 0
for m in SENT_RE.finditer(text):
yield slice(last, m.start())
last = m.end()
if last != len(text):
yield slice(last, len(text))
def trim_link_subsection(s):
idx = s.find('#')
return s if idx == -1 else s[:idx]
def trim_link_protocol(s):
idx = s.find('://')
return s if idx == -1 else s[idx+3:]
Add min argument for n when generating ngramsimport re
from pattern import en
# todo: use spacy tokenization
def ngrams(text, max_n=1, min_n=1):
for i in xrange(min_n-1,max_n):
for n in en.ngrams(text, n=i+1):
yield ' '.join(n)
SENT_RE = re.compile('((?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|[\?!])\s)|(\s*\n\s*)')
def iter_sent_spans(text):
last = 0
for m in SENT_RE.finditer(text):
yield slice(last, m.start())
last = m.end()
if last != len(text):
yield slice(last, len(text))
def trim_link_subsection(s):
idx = s.find('#')
return s if idx == -1 else s[:idx]
def trim_link_protocol(s):
idx = s.find('://')
return s if idx == -1 else s[idx+3:]
|
<commit_before>import re
from pattern import en
# todo: use spacy tokenization
def ngrams(text, n=1):
for i in xrange(n):
for n in en.ngrams(text, n=i+1):
yield ' '.join(n)
SENT_RE = re.compile('((?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|[\?!])\s)|(\s*\n\s*)')
def iter_sent_spans(text):
last = 0
for m in SENT_RE.finditer(text):
yield slice(last, m.start())
last = m.end()
if last != len(text):
yield slice(last, len(text))
def trim_link_subsection(s):
idx = s.find('#')
return s if idx == -1 else s[:idx]
def trim_link_protocol(s):
idx = s.find('://')
return s if idx == -1 else s[idx+3:]
<commit_msg>Add min argument for n when generating ngrams<commit_after>import re
from pattern import en
# todo: use spacy tokenization
def ngrams(text, max_n=1, min_n=1):
for i in xrange(min_n-1,max_n):
for n in en.ngrams(text, n=i+1):
yield ' '.join(n)
SENT_RE = re.compile('((?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|[\?!])\s)|(\s*\n\s*)')
def iter_sent_spans(text):
last = 0
for m in SENT_RE.finditer(text):
yield slice(last, m.start())
last = m.end()
if last != len(text):
yield slice(last, len(text))
def trim_link_subsection(s):
idx = s.find('#')
return s if idx == -1 else s[:idx]
def trim_link_protocol(s):
idx = s.find('://')
return s if idx == -1 else s[idx+3:]
|
a54127994b110e65423b9ef956ed3b26dfc10d2d
|
tyr/servers/zookeeper/server.py
|
tyr/servers/zookeeper/server.py
|
from tyr.servers.server import Server
class ZookeeperServer(Server):
SERVER_TYPE = 'zookeeper'
CHEF_RUNLIST = ['role[RoleZookeeper]']
IAM_ROLE_POLICIES = [
'allow-describe-instances',
'allow-describe-tags',
'allow-volume-control'
]
def __init__(self, group=None, server_type=None, instance_type=None,
environment=None, ami=None, region=None, role=None,
keypair=None, availability_zone=None, security_groups=None,
block_devices=None, chef_path=None, subnet_id=None,
dns_zones=None):
if server_type is None:
server_type = self.SERVER_TYPE
super(ZookeeperServer, self).__init__(group, server_type, instance_type,
environment, ami, region, role,
keypair, availability_zone,
security_groups, block_devices,
chef_path, subnet_id, dns_zones)
|
from tyr.servers.server import Server
class ZookeeperServer(Server):
SERVER_TYPE = 'zookeeper'
CHEF_RUNLIST = ['role[RoleZookeeper]']
IAM_ROLE_POLICIES = [
'allow-describe-instances',
'allow-describe-tags',
'allow-volume-control'
]
def __init__(self, group=None, server_type=None, instance_type=None,
environment=None, ami=None, region=None, role=None,
keypair=None, availability_zone=None, security_groups=None,
block_devices=None, chef_path=None, subnet_id=None,
dns_zones=None, exhibitor_s3config=None):
if server_type is None:
server_type = self.SERVER_TYPE
self.exhibitor_s3config = exhibitor_s3config
super(ZookeeperServer, self).__init__(group, server_type, instance_type,
environment, ami, region, role,
keypair, availability_zone,
security_groups, block_devices,
chef_path, subnet_id, dns_zones)
def bake(self):
super(ZookeeperServer, self).bake()
with self.chef_api:
if self.exhibitor_s3config:
self.chef_node.attributes.set_dotted('exhibitor.cli.s3config',
self.exhibitor_s3config)
self.log.info('Set exhibitor.cli.s3config to {}'
.format(self.exhibitor_s3config))
else:
self.log.info('exhibitor.cli.s3config not set. Using default.')
self.chef_node.save()
self.log.info('Saved the Chef Node configuration')
|
Add exhibitor_s3config option to Zookeeper creation
|
Add exhibitor_s3config option to Zookeeper creation
|
Python
|
unlicense
|
hudl/Tyr
|
from tyr.servers.server import Server
class ZookeeperServer(Server):
SERVER_TYPE = 'zookeeper'
CHEF_RUNLIST = ['role[RoleZookeeper]']
IAM_ROLE_POLICIES = [
'allow-describe-instances',
'allow-describe-tags',
'allow-volume-control'
]
def __init__(self, group=None, server_type=None, instance_type=None,
environment=None, ami=None, region=None, role=None,
keypair=None, availability_zone=None, security_groups=None,
block_devices=None, chef_path=None, subnet_id=None,
dns_zones=None):
if server_type is None:
server_type = self.SERVER_TYPE
super(ZookeeperServer, self).__init__(group, server_type, instance_type,
environment, ami, region, role,
keypair, availability_zone,
security_groups, block_devices,
chef_path, subnet_id, dns_zones)
Add exhibitor_s3config option to Zookeeper creation
|
from tyr.servers.server import Server
class ZookeeperServer(Server):
SERVER_TYPE = 'zookeeper'
CHEF_RUNLIST = ['role[RoleZookeeper]']
IAM_ROLE_POLICIES = [
'allow-describe-instances',
'allow-describe-tags',
'allow-volume-control'
]
def __init__(self, group=None, server_type=None, instance_type=None,
environment=None, ami=None, region=None, role=None,
keypair=None, availability_zone=None, security_groups=None,
block_devices=None, chef_path=None, subnet_id=None,
dns_zones=None, exhibitor_s3config=None):
if server_type is None:
server_type = self.SERVER_TYPE
self.exhibitor_s3config = exhibitor_s3config
super(ZookeeperServer, self).__init__(group, server_type, instance_type,
environment, ami, region, role,
keypair, availability_zone,
security_groups, block_devices,
chef_path, subnet_id, dns_zones)
def bake(self):
super(ZookeeperServer, self).bake()
with self.chef_api:
if self.exhibitor_s3config:
self.chef_node.attributes.set_dotted('exhibitor.cli.s3config',
self.exhibitor_s3config)
self.log.info('Set exhibitor.cli.s3config to {}'
.format(self.exhibitor_s3config))
else:
self.log.info('exhibitor.cli.s3config not set. Using default.')
self.chef_node.save()
self.log.info('Saved the Chef Node configuration')
|
<commit_before>from tyr.servers.server import Server
class ZookeeperServer(Server):
SERVER_TYPE = 'zookeeper'
CHEF_RUNLIST = ['role[RoleZookeeper]']
IAM_ROLE_POLICIES = [
'allow-describe-instances',
'allow-describe-tags',
'allow-volume-control'
]
def __init__(self, group=None, server_type=None, instance_type=None,
environment=None, ami=None, region=None, role=None,
keypair=None, availability_zone=None, security_groups=None,
block_devices=None, chef_path=None, subnet_id=None,
dns_zones=None):
if server_type is None:
server_type = self.SERVER_TYPE
super(ZookeeperServer, self).__init__(group, server_type, instance_type,
environment, ami, region, role,
keypair, availability_zone,
security_groups, block_devices,
chef_path, subnet_id, dns_zones)
<commit_msg>Add exhibitor_s3config option to Zookeeper creation<commit_after>
|
from tyr.servers.server import Server
class ZookeeperServer(Server):
SERVER_TYPE = 'zookeeper'
CHEF_RUNLIST = ['role[RoleZookeeper]']
IAM_ROLE_POLICIES = [
'allow-describe-instances',
'allow-describe-tags',
'allow-volume-control'
]
def __init__(self, group=None, server_type=None, instance_type=None,
environment=None, ami=None, region=None, role=None,
keypair=None, availability_zone=None, security_groups=None,
block_devices=None, chef_path=None, subnet_id=None,
dns_zones=None, exhibitor_s3config=None):
if server_type is None:
server_type = self.SERVER_TYPE
self.exhibitor_s3config = exhibitor_s3config
super(ZookeeperServer, self).__init__(group, server_type, instance_type,
environment, ami, region, role,
keypair, availability_zone,
security_groups, block_devices,
chef_path, subnet_id, dns_zones)
def bake(self):
super(ZookeeperServer, self).bake()
with self.chef_api:
if self.exhibitor_s3config:
self.chef_node.attributes.set_dotted('exhibitor.cli.s3config',
self.exhibitor_s3config)
self.log.info('Set exhibitor.cli.s3config to {}'
.format(self.exhibitor_s3config))
else:
self.log.info('exhibitor.cli.s3config not set. Using default.')
self.chef_node.save()
self.log.info('Saved the Chef Node configuration')
|
from tyr.servers.server import Server
class ZookeeperServer(Server):
SERVER_TYPE = 'zookeeper'
CHEF_RUNLIST = ['role[RoleZookeeper]']
IAM_ROLE_POLICIES = [
'allow-describe-instances',
'allow-describe-tags',
'allow-volume-control'
]
def __init__(self, group=None, server_type=None, instance_type=None,
environment=None, ami=None, region=None, role=None,
keypair=None, availability_zone=None, security_groups=None,
block_devices=None, chef_path=None, subnet_id=None,
dns_zones=None):
if server_type is None:
server_type = self.SERVER_TYPE
super(ZookeeperServer, self).__init__(group, server_type, instance_type,
environment, ami, region, role,
keypair, availability_zone,
security_groups, block_devices,
chef_path, subnet_id, dns_zones)
Add exhibitor_s3config option to Zookeeper creationfrom tyr.servers.server import Server
class ZookeeperServer(Server):
SERVER_TYPE = 'zookeeper'
CHEF_RUNLIST = ['role[RoleZookeeper]']
IAM_ROLE_POLICIES = [
'allow-describe-instances',
'allow-describe-tags',
'allow-volume-control'
]
def __init__(self, group=None, server_type=None, instance_type=None,
environment=None, ami=None, region=None, role=None,
keypair=None, availability_zone=None, security_groups=None,
block_devices=None, chef_path=None, subnet_id=None,
dns_zones=None, exhibitor_s3config=None):
if server_type is None:
server_type = self.SERVER_TYPE
self.exhibitor_s3config = exhibitor_s3config
super(ZookeeperServer, self).__init__(group, server_type, instance_type,
environment, ami, region, role,
keypair, availability_zone,
security_groups, block_devices,
chef_path, subnet_id, dns_zones)
def bake(self):
super(ZookeeperServer, self).bake()
with self.chef_api:
if self.exhibitor_s3config:
self.chef_node.attributes.set_dotted('exhibitor.cli.s3config',
self.exhibitor_s3config)
self.log.info('Set exhibitor.cli.s3config to {}'
.format(self.exhibitor_s3config))
else:
self.log.info('exhibitor.cli.s3config not set. Using default.')
self.chef_node.save()
self.log.info('Saved the Chef Node configuration')
|
<commit_before>from tyr.servers.server import Server
class ZookeeperServer(Server):
SERVER_TYPE = 'zookeeper'
CHEF_RUNLIST = ['role[RoleZookeeper]']
IAM_ROLE_POLICIES = [
'allow-describe-instances',
'allow-describe-tags',
'allow-volume-control'
]
def __init__(self, group=None, server_type=None, instance_type=None,
environment=None, ami=None, region=None, role=None,
keypair=None, availability_zone=None, security_groups=None,
block_devices=None, chef_path=None, subnet_id=None,
dns_zones=None):
if server_type is None:
server_type = self.SERVER_TYPE
super(ZookeeperServer, self).__init__(group, server_type, instance_type,
environment, ami, region, role,
keypair, availability_zone,
security_groups, block_devices,
chef_path, subnet_id, dns_zones)
<commit_msg>Add exhibitor_s3config option to Zookeeper creation<commit_after>from tyr.servers.server import Server
class ZookeeperServer(Server):
SERVER_TYPE = 'zookeeper'
CHEF_RUNLIST = ['role[RoleZookeeper]']
IAM_ROLE_POLICIES = [
'allow-describe-instances',
'allow-describe-tags',
'allow-volume-control'
]
def __init__(self, group=None, server_type=None, instance_type=None,
environment=None, ami=None, region=None, role=None,
keypair=None, availability_zone=None, security_groups=None,
block_devices=None, chef_path=None, subnet_id=None,
dns_zones=None, exhibitor_s3config=None):
if server_type is None:
server_type = self.SERVER_TYPE
self.exhibitor_s3config = exhibitor_s3config
super(ZookeeperServer, self).__init__(group, server_type, instance_type,
environment, ami, region, role,
keypair, availability_zone,
security_groups, block_devices,
chef_path, subnet_id, dns_zones)
def bake(self):
super(ZookeeperServer, self).bake()
with self.chef_api:
if self.exhibitor_s3config:
self.chef_node.attributes.set_dotted('exhibitor.cli.s3config',
self.exhibitor_s3config)
self.log.info('Set exhibitor.cli.s3config to {}'
.format(self.exhibitor_s3config))
else:
self.log.info('exhibitor.cli.s3config not set. Using default.')
self.chef_node.save()
self.log.info('Saved the Chef Node configuration')
|
8ba05402376dc2d368bae226f929b9a0b448a3c5
|
localized_fields/admin.py
|
localized_fields/admin.py
|
from django.contrib.admin import ModelAdmin
from . import widgets
from .fields import LocalizedField, LocalizedCharField, LocalizedTextField, \
LocalizedFileField
FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS = {
LocalizedField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedCharField: {'widget': widgets.AdminLocalizedCharFieldWidget},
LocalizedTextField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedFileField: {'widget': widgets.AdminLocalizedFileFieldWidget},
}
class LocalizedFieldsAdminMixin(ModelAdmin):
"""Mixin for making the fancy widgets work in Django Admin."""
class Media:
css = {
'all': (
'localized_fields/localized-fields-admin.css',
)
}
js = (
'localized_fields/localized-fields-admin.js',
)
def __init__(self, *args, **kwargs):
"""Initializes a new instance of :see:LocalizedFieldsAdminMixin."""
super().__init__(*args, **kwargs)
overrides = FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
|
from . import widgets
from .fields import LocalizedField, LocalizedCharField, LocalizedTextField, \
LocalizedFileField
FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS = {
LocalizedField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedCharField: {'widget': widgets.AdminLocalizedCharFieldWidget},
LocalizedTextField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedFileField: {'widget': widgets.AdminLocalizedFileFieldWidget},
}
class LocalizedFieldsAdminMixin:
"""Mixin for making the fancy widgets work in Django Admin."""
class Media:
css = {
'all': (
'localized_fields/localized-fields-admin.css',
)
}
js = (
'localized_fields/localized-fields-admin.js',
)
def __init__(self, *args, **kwargs):
"""Initializes a new instance of :see:LocalizedFieldsAdminMixin."""
super().__init__(*args, **kwargs)
overrides = FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
|
Fix using LocalizedFieldsAdminMixin with inlines
|
Fix using LocalizedFieldsAdminMixin with inlines
|
Python
|
mit
|
SectorLabs/django-localized-fields,SectorLabs/django-localized-fields,SectorLabs/django-localized-fields
|
from django.contrib.admin import ModelAdmin
from . import widgets
from .fields import LocalizedField, LocalizedCharField, LocalizedTextField, \
LocalizedFileField
FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS = {
LocalizedField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedCharField: {'widget': widgets.AdminLocalizedCharFieldWidget},
LocalizedTextField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedFileField: {'widget': widgets.AdminLocalizedFileFieldWidget},
}
class LocalizedFieldsAdminMixin(ModelAdmin):
"""Mixin for making the fancy widgets work in Django Admin."""
class Media:
css = {
'all': (
'localized_fields/localized-fields-admin.css',
)
}
js = (
'localized_fields/localized-fields-admin.js',
)
def __init__(self, *args, **kwargs):
"""Initializes a new instance of :see:LocalizedFieldsAdminMixin."""
super().__init__(*args, **kwargs)
overrides = FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
Fix using LocalizedFieldsAdminMixin with inlines
|
from . import widgets
from .fields import LocalizedField, LocalizedCharField, LocalizedTextField, \
LocalizedFileField
FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS = {
LocalizedField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedCharField: {'widget': widgets.AdminLocalizedCharFieldWidget},
LocalizedTextField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedFileField: {'widget': widgets.AdminLocalizedFileFieldWidget},
}
class LocalizedFieldsAdminMixin:
"""Mixin for making the fancy widgets work in Django Admin."""
class Media:
css = {
'all': (
'localized_fields/localized-fields-admin.css',
)
}
js = (
'localized_fields/localized-fields-admin.js',
)
def __init__(self, *args, **kwargs):
"""Initializes a new instance of :see:LocalizedFieldsAdminMixin."""
super().__init__(*args, **kwargs)
overrides = FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
|
<commit_before>from django.contrib.admin import ModelAdmin
from . import widgets
from .fields import LocalizedField, LocalizedCharField, LocalizedTextField, \
LocalizedFileField
FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS = {
LocalizedField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedCharField: {'widget': widgets.AdminLocalizedCharFieldWidget},
LocalizedTextField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedFileField: {'widget': widgets.AdminLocalizedFileFieldWidget},
}
class LocalizedFieldsAdminMixin(ModelAdmin):
"""Mixin for making the fancy widgets work in Django Admin."""
class Media:
css = {
'all': (
'localized_fields/localized-fields-admin.css',
)
}
js = (
'localized_fields/localized-fields-admin.js',
)
def __init__(self, *args, **kwargs):
"""Initializes a new instance of :see:LocalizedFieldsAdminMixin."""
super().__init__(*args, **kwargs)
overrides = FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
<commit_msg>Fix using LocalizedFieldsAdminMixin with inlines<commit_after>
|
from . import widgets
from .fields import LocalizedField, LocalizedCharField, LocalizedTextField, \
LocalizedFileField
FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS = {
LocalizedField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedCharField: {'widget': widgets.AdminLocalizedCharFieldWidget},
LocalizedTextField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedFileField: {'widget': widgets.AdminLocalizedFileFieldWidget},
}
class LocalizedFieldsAdminMixin:
"""Mixin for making the fancy widgets work in Django Admin."""
class Media:
css = {
'all': (
'localized_fields/localized-fields-admin.css',
)
}
js = (
'localized_fields/localized-fields-admin.js',
)
def __init__(self, *args, **kwargs):
"""Initializes a new instance of :see:LocalizedFieldsAdminMixin."""
super().__init__(*args, **kwargs)
overrides = FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
|
from django.contrib.admin import ModelAdmin
from . import widgets
from .fields import LocalizedField, LocalizedCharField, LocalizedTextField, \
LocalizedFileField
FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS = {
LocalizedField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedCharField: {'widget': widgets.AdminLocalizedCharFieldWidget},
LocalizedTextField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedFileField: {'widget': widgets.AdminLocalizedFileFieldWidget},
}
class LocalizedFieldsAdminMixin(ModelAdmin):
"""Mixin for making the fancy widgets work in Django Admin."""
class Media:
css = {
'all': (
'localized_fields/localized-fields-admin.css',
)
}
js = (
'localized_fields/localized-fields-admin.js',
)
def __init__(self, *args, **kwargs):
"""Initializes a new instance of :see:LocalizedFieldsAdminMixin."""
super().__init__(*args, **kwargs)
overrides = FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
Fix using LocalizedFieldsAdminMixin with inlinesfrom . import widgets
from .fields import LocalizedField, LocalizedCharField, LocalizedTextField, \
LocalizedFileField
FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS = {
LocalizedField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedCharField: {'widget': widgets.AdminLocalizedCharFieldWidget},
LocalizedTextField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedFileField: {'widget': widgets.AdminLocalizedFileFieldWidget},
}
class LocalizedFieldsAdminMixin:
"""Mixin for making the fancy widgets work in Django Admin."""
class Media:
css = {
'all': (
'localized_fields/localized-fields-admin.css',
)
}
js = (
'localized_fields/localized-fields-admin.js',
)
def __init__(self, *args, **kwargs):
"""Initializes a new instance of :see:LocalizedFieldsAdminMixin."""
super().__init__(*args, **kwargs)
overrides = FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
|
<commit_before>from django.contrib.admin import ModelAdmin
from . import widgets
from .fields import LocalizedField, LocalizedCharField, LocalizedTextField, \
LocalizedFileField
FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS = {
LocalizedField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedCharField: {'widget': widgets.AdminLocalizedCharFieldWidget},
LocalizedTextField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedFileField: {'widget': widgets.AdminLocalizedFileFieldWidget},
}
class LocalizedFieldsAdminMixin(ModelAdmin):
"""Mixin for making the fancy widgets work in Django Admin."""
class Media:
css = {
'all': (
'localized_fields/localized-fields-admin.css',
)
}
js = (
'localized_fields/localized-fields-admin.js',
)
def __init__(self, *args, **kwargs):
"""Initializes a new instance of :see:LocalizedFieldsAdminMixin."""
super().__init__(*args, **kwargs)
overrides = FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
<commit_msg>Fix using LocalizedFieldsAdminMixin with inlines<commit_after>from . import widgets
from .fields import LocalizedField, LocalizedCharField, LocalizedTextField, \
LocalizedFileField
FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS = {
LocalizedField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedCharField: {'widget': widgets.AdminLocalizedCharFieldWidget},
LocalizedTextField: {'widget': widgets.AdminLocalizedFieldWidget},
LocalizedFileField: {'widget': widgets.AdminLocalizedFileFieldWidget},
}
class LocalizedFieldsAdminMixin:
"""Mixin for making the fancy widgets work in Django Admin."""
class Media:
css = {
'all': (
'localized_fields/localized-fields-admin.css',
)
}
js = (
'localized_fields/localized-fields-admin.js',
)
def __init__(self, *args, **kwargs):
"""Initializes a new instance of :see:LocalizedFieldsAdminMixin."""
super().__init__(*args, **kwargs)
overrides = FORMFIELD_FOR_LOCALIZED_FIELDS_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
|
5eced1c1cb9253d73e3246dccb4c33e5ba154fd3
|
rcbi/rcbi/spiders/FlyduinoSpider.py
|
rcbi/rcbi/spiders/FlyduinoSpider.py
|
import scrapy
from scrapy import log
from scrapy.contrib.spiders import SitemapSpider, Rule
from scrapy.contrib.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(SitemapSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
sitemap_urls = ["http://flyduino.net/sitemap.xml"]
def parse(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return item
|
import scrapy
from scrapy import log
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(CrawlSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
start_urls = ["http://flyduino.net/"]
rules = (
# Extract links matching 'category.php' (but not matching 'subsection.php')
# and follow links from them (since no callback means follow=True by default).
Rule(LinkExtractor(restrict_css=".categories")),
# Extract links matching 'item.php' and parse them with the spider's method parse_item
Rule(LinkExtractor(restrict_css=".article_wrapper h3"), callback='parse_item'),
)
def parse_item(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return item
|
Stop using the Flyduino sitemap.
|
Stop using the Flyduino sitemap.
|
Python
|
apache-2.0
|
rcbuild-info/scrape,rcbuild-info/scrape
|
import scrapy
from scrapy import log
from scrapy.contrib.spiders import SitemapSpider, Rule
from scrapy.contrib.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(SitemapSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
sitemap_urls = ["http://flyduino.net/sitemap.xml"]
def parse(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return itemStop using the Flyduino sitemap.
|
import scrapy
from scrapy import log
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(CrawlSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
start_urls = ["http://flyduino.net/"]
rules = (
# Extract links matching 'category.php' (but not matching 'subsection.php')
# and follow links from them (since no callback means follow=True by default).
Rule(LinkExtractor(restrict_css=".categories")),
# Extract links matching 'item.php' and parse them with the spider's method parse_item
Rule(LinkExtractor(restrict_css=".article_wrapper h3"), callback='parse_item'),
)
def parse_item(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return item
|
<commit_before>import scrapy
from scrapy import log
from scrapy.contrib.spiders import SitemapSpider, Rule
from scrapy.contrib.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(SitemapSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
sitemap_urls = ["http://flyduino.net/sitemap.xml"]
def parse(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return item<commit_msg>Stop using the Flyduino sitemap.<commit_after>
|
import scrapy
from scrapy import log
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(CrawlSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
start_urls = ["http://flyduino.net/"]
rules = (
# Extract links matching 'category.php' (but not matching 'subsection.php')
# and follow links from them (since no callback means follow=True by default).
Rule(LinkExtractor(restrict_css=".categories")),
# Extract links matching 'item.php' and parse them with the spider's method parse_item
Rule(LinkExtractor(restrict_css=".article_wrapper h3"), callback='parse_item'),
)
def parse_item(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return item
|
import scrapy
from scrapy import log
from scrapy.contrib.spiders import SitemapSpider, Rule
from scrapy.contrib.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(SitemapSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
sitemap_urls = ["http://flyduino.net/sitemap.xml"]
def parse(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return itemStop using the Flyduino sitemap.import scrapy
from scrapy import log
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(CrawlSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
start_urls = ["http://flyduino.net/"]
rules = (
# Extract links matching 'category.php' (but not matching 'subsection.php')
# and follow links from them (since no callback means follow=True by default).
Rule(LinkExtractor(restrict_css=".categories")),
# Extract links matching 'item.php' and parse them with the spider's method parse_item
Rule(LinkExtractor(restrict_css=".article_wrapper h3"), callback='parse_item'),
)
def parse_item(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return item
|
<commit_before>import scrapy
from scrapy import log
from scrapy.contrib.spiders import SitemapSpider, Rule
from scrapy.contrib.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(SitemapSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
sitemap_urls = ["http://flyduino.net/sitemap.xml"]
def parse(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return item<commit_msg>Stop using the Flyduino sitemap.<commit_after>import scrapy
from scrapy import log
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from rcbi.items import Part
MANUFACTURERS = ["Rctimer", "RCTimer", "BaseCam", "Elgae", "ELGAE", "ArduFlyer", "Boscam", "T-Motor", "HQProp", "Suppo", "Flyduino", "SLS", "Frsky"]
CORRECT = {"Rctimer": "RCTimer", "ELGAE": "Elgae", "Frsky": "FrSky"}
class FlyduinoSpider(CrawlSpider):
name = "flyduino"
allowed_domains = ["flyduino.net"]
start_urls = ["http://flyduino.net/"]
rules = (
# Extract links matching 'category.php' (but not matching 'subsection.php')
# and follow links from them (since no callback means follow=True by default).
Rule(LinkExtractor(restrict_css=".categories")),
# Extract links matching 'item.php' and parse them with the spider's method parse_item
Rule(LinkExtractor(restrict_css=".article_wrapper h3"), callback='parse_item'),
)
def parse_item(self, response):
item = Part()
item["site"] = "flyduino"
item["url"] = response.url
product_name = response.css("div.hproduct")
if not product_name:
return
item["name"] = product_name[0].xpath("//h1/text()").extract()[0]
for m in MANUFACTURERS:
if item["name"].startswith(m):
if m in CORRECT:
m = CORRECT[m]
item["manufacturer"] = m
item["name"] = item["name"][len(m):].strip()
break
return item
|
adf71b59168c81240258a2b344e4bea1f6377e7b
|
etools/apps/uptime/forms/report_forms.py
|
etools/apps/uptime/forms/report_forms.py
|
from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
|
from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False,
"startDate": "1/1/1953"}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False,
"startDate": "1/1/1953"}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
|
Fix minimum date for uptime:reports
|
Fix minimum date for uptime:reports
|
Python
|
bsd-3-clause
|
Igelinmist/etools,Igelinmist/etools
|
from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
Fix minimum date for uptime:reports
|
from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False,
"startDate": "1/1/1953"}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False,
"startDate": "1/1/1953"}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
|
<commit_before>from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
<commit_msg>Fix minimum date for uptime:reports<commit_after>
|
from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False,
"startDate": "1/1/1953"}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False,
"startDate": "1/1/1953"}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
|
from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
Fix minimum date for uptime:reportsfrom django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False,
"startDate": "1/1/1953"}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False,
"startDate": "1/1/1953"}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
|
<commit_before>from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
<commit_msg>Fix minimum date for uptime:reports<commit_after>from django import forms
from bootstrap3_datetime.widgets import DateTimePicker
class ChooseReportForm(forms.Form):
date_from = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False,
"startDate": "1/1/1953"}),
label='От даты:',
)
date = forms.DateField(
widget=DateTimePicker(options={"locale": "ru",
"pickTime": False,
"startDate": "1/1/1953"}),
label=', на дату:',
)
def __init__(self, choices=None, *args, **kwargs):
super(ChooseReportForm, self).__init__(*args, **kwargs)
if choices:
self.fields.update(
{'report_id': forms.ChoiceField(widget=forms.Select,
label='отчет:',
choices=choices)}
)
|
1d03917856c193e41b4a1622f8297e88fec00ab2
|
damn/templatetags/damn.py
|
damn/templatetags/damn.py
|
from django import template
from damn.processors import find_processor
from damn.utils import AssetRegistry, DepNode
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].items())
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simpletag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
|
from django import template
from ..processors import find_processor, AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].items())
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
|
Fix simpletag -> simple_tag Fix imports
|
Fix simpletag -> simple_tag
Fix imports
|
Python
|
bsd-2-clause
|
funkybob/django-amn
|
from django import template
from damn.processors import find_processor
from damn.utils import AssetRegistry, DepNode
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].items())
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simpletag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
Fix simpletag -> simple_tag
Fix imports
|
from django import template
from ..processors import find_processor, AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].items())
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
|
<commit_before>
from django import template
from damn.processors import find_processor
from damn.utils import AssetRegistry, DepNode
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].items())
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simpletag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
<commit_msg>Fix simpletag -> simple_tag
Fix imports<commit_after>
|
from django import template
from ..processors import find_processor, AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].items())
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
|
from django import template
from damn.processors import find_processor
from damn.utils import AssetRegistry, DepNode
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].items())
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simpletag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
Fix simpletag -> simple_tag
Fix imports
from django import template
from ..processors import find_processor, AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].items())
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
|
<commit_before>
from django import template
from damn.processors import find_processor
from damn.utils import AssetRegistry, DepNode
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].items())
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simpletag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
<commit_msg>Fix simpletag -> simple_tag
Fix imports<commit_after>
from django import template
from ..processors import find_processor, AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].items())
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
|
fe1b9ad1f65ac27c5bc3d02acaf473f001609e73
|
relayer/flask/__init__.py
|
relayer/flask/__init__.py
|
from typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
def __init__(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
|
from typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
def __init__(self, app: Flask = None, logging_topic: str = None, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
|
Use default arguments removed by mypy
|
Use default arguments removed by mypy
|
Python
|
mit
|
wizeline/relayer
|
from typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
def __init__(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
Use default arguments removed by mypy
|
from typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
def __init__(self, app: Flask = None, logging_topic: str = None, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
|
<commit_before>from typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
def __init__(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
<commit_msg>Use default arguments removed by mypy<commit_after>
|
from typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
def __init__(self, app: Flask = None, logging_topic: str = None, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
|
from typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
def __init__(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
Use default arguments removed by mypyfrom typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
def __init__(self, app: Flask = None, logging_topic: str = None, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
|
<commit_before>from typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
def __init__(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
<commit_msg>Use default arguments removed by mypy<commit_after>from typing import Any
from flask import Flask
from relayer import Relayer
class FlaskRelayer(object):
def __init__(self, app: Flask = None, logging_topic: str = None, kafka_hosts: str = None, **kwargs: str) -> None:
if app:
self.init_app(
app,
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def init_app(self, app: Flask, logging_topic: str, kafka_hosts: str = None, **kwargs: str) -> None:
kafka_hosts = kafka_hosts or app.config.get('KAFKA_HOSTS')
self.event_relayer = Relayer(
logging_topic,
kafka_hosts=kafka_hosts,
**kwargs,
)
def emit(self, *args: str, **kwargs: str) -> None:
self.event_relayer.emit(*args, **kwargs)
def emit_raw(self, *args: Any, **kwargs: Any) -> None:
self.event_relayer.emit_raw(*args, **kwargs)
def log(self, *args: str, **kwargs: str) -> None:
self.event_relayer.log(*args, **kwargs)
def flush(self, *args: str, **kwargs: str) -> None:
self.event_relayer.flush()
|
d6a00b0cf70d778e4671ce8aa1c9b115410fcc33
|
studygroups/migrations/0034_create_facilitators_group.py
|
studygroups/migrations/0034_create_facilitators_group.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
group = Group(name="facilitators")
group.save()
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
Group.objects.get_or_create(name="facilitators")
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
|
Change data migration to work even if facilitator group already exists
|
Change data migration to work even if facilitator group already exists
|
Python
|
mit
|
p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
group = Group(name="facilitators")
group.save()
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
Change data migration to work even if facilitator group already exists
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
Group.objects.get_or_create(name="facilitators")
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
group = Group(name="facilitators")
group.save()
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
<commit_msg>Change data migration to work even if facilitator group already exists<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
Group.objects.get_or_create(name="facilitators")
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
group = Group(name="facilitators")
group.save()
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
Change data migration to work even if facilitator group already exists# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
Group.objects.get_or_create(name="facilitators")
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
group = Group(name="facilitators")
group.save()
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
<commit_msg>Change data migration to work even if facilitator group already exists<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
Group.objects.get_or_create(name="facilitators")
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
|
d8974bddff5c16d616fb846eb44ba90c77a18225
|
utils/needle_phy.py
|
utils/needle_phy.py
|
"""
Simulation of Buffon's Needle Experiment.
"""
import math
import numpy as np
import numexpr as ne
from utils import misc
def run_trials(length, gap_width, trials):
"""
Runs the simulation a specified number of times.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
trials = misc.validate_trials(trials)
angles = np.random.random(size=trials)
y = np.random.random(size=trials)
clauses = [
'D - y*D < l/2 * sin(x * pi)',
'y*D < l/2 * sin(x * pi)'
]
return ne.evaluate(
'sum(where (%s, 1, 0))' %
' & '.join(['(%s)' % i for i in clauses]),
local_dict={
'l': length,
'D': gap_width,
'x': angles,
'y': y
},
global_dict={
'pi': math.pi
}
)
def predict_prob(length, gap_width):
"""
Predicts the probability that the needle will hit
one of the two parallel lines.
length and gap_width can be scalars or arrays.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
# TODO: Add in probability calculation.
return
|
"""
Simulation of Buffon's Needle Experiment.
"""
import math
import numpy as np
import numexpr as ne
from utils import misc
def run_trials(length, gap_width, trials):
"""
Runs the simulation a specified number of times.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
trials = misc.validate_trials(trials)
angles = np.random.random(size=trials)
y = np.random.random(size=trials)
clauses = [
'D - y*D < l/2 * sin(x * pi)',
'y*D < l/2 * sin(x * pi)'
]
return ne.evaluate(
'sum(where (%s, 1, 0))' %
' & '.join(['(%s)' % i for i in clauses]),
local_dict={
'l': length,
'D': gap_width,
'x': angles,
'y': y
},
global_dict={
'pi': math.pi
}
)
def predict_prob(length, gap_width):
"""
Predicts the probability that the needle will hit
one of the two parallel lines.
length and gap_width can be scalars or arrays.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
# TODO: Add in probability calculation.
# placeholder to ensure that a full array of zeroes
# is returned
return ne.evaluate(
'l * D * 0',
local_dict={
'l': length,
'D': gap_width
}
)
|
Add placeholder to return array full of zeroes.
|
Add placeholder to return array full of zeroes.
|
Python
|
mit
|
wei2912/bce-simulation,wei2912/bce-simulation,wei2912/bce-simulation,wei2912/bce-simulation
|
"""
Simulation of Buffon's Needle Experiment.
"""
import math
import numpy as np
import numexpr as ne
from utils import misc
def run_trials(length, gap_width, trials):
"""
Runs the simulation a specified number of times.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
trials = misc.validate_trials(trials)
angles = np.random.random(size=trials)
y = np.random.random(size=trials)
clauses = [
'D - y*D < l/2 * sin(x * pi)',
'y*D < l/2 * sin(x * pi)'
]
return ne.evaluate(
'sum(where (%s, 1, 0))' %
' & '.join(['(%s)' % i for i in clauses]),
local_dict={
'l': length,
'D': gap_width,
'x': angles,
'y': y
},
global_dict={
'pi': math.pi
}
)
def predict_prob(length, gap_width):
"""
Predicts the probability that the needle will hit
one of the two parallel lines.
length and gap_width can be scalars or arrays.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
# TODO: Add in probability calculation.
return
Add placeholder to return array full of zeroes.
|
"""
Simulation of Buffon's Needle Experiment.
"""
import math
import numpy as np
import numexpr as ne
from utils import misc
def run_trials(length, gap_width, trials):
"""
Runs the simulation a specified number of times.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
trials = misc.validate_trials(trials)
angles = np.random.random(size=trials)
y = np.random.random(size=trials)
clauses = [
'D - y*D < l/2 * sin(x * pi)',
'y*D < l/2 * sin(x * pi)'
]
return ne.evaluate(
'sum(where (%s, 1, 0))' %
' & '.join(['(%s)' % i for i in clauses]),
local_dict={
'l': length,
'D': gap_width,
'x': angles,
'y': y
},
global_dict={
'pi': math.pi
}
)
def predict_prob(length, gap_width):
"""
Predicts the probability that the needle will hit
one of the two parallel lines.
length and gap_width can be scalars or arrays.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
# TODO: Add in probability calculation.
# placeholder to ensure that a full array of zeroes
# is returned
return ne.evaluate(
'l * D * 0',
local_dict={
'l': length,
'D': gap_width
}
)
|
<commit_before>"""
Simulation of Buffon's Needle Experiment.
"""
import math
import numpy as np
import numexpr as ne
from utils import misc
def run_trials(length, gap_width, trials):
"""
Runs the simulation a specified number of times.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
trials = misc.validate_trials(trials)
angles = np.random.random(size=trials)
y = np.random.random(size=trials)
clauses = [
'D - y*D < l/2 * sin(x * pi)',
'y*D < l/2 * sin(x * pi)'
]
return ne.evaluate(
'sum(where (%s, 1, 0))' %
' & '.join(['(%s)' % i for i in clauses]),
local_dict={
'l': length,
'D': gap_width,
'x': angles,
'y': y
},
global_dict={
'pi': math.pi
}
)
def predict_prob(length, gap_width):
"""
Predicts the probability that the needle will hit
one of the two parallel lines.
length and gap_width can be scalars or arrays.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
# TODO: Add in probability calculation.
return
<commit_msg>Add placeholder to return array full of zeroes.<commit_after>
|
"""
Simulation of Buffon's Needle Experiment.
"""
import math
import numpy as np
import numexpr as ne
from utils import misc
def run_trials(length, gap_width, trials):
"""
Runs the simulation a specified number of times.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
trials = misc.validate_trials(trials)
angles = np.random.random(size=trials)
y = np.random.random(size=trials)
clauses = [
'D - y*D < l/2 * sin(x * pi)',
'y*D < l/2 * sin(x * pi)'
]
return ne.evaluate(
'sum(where (%s, 1, 0))' %
' & '.join(['(%s)' % i for i in clauses]),
local_dict={
'l': length,
'D': gap_width,
'x': angles,
'y': y
},
global_dict={
'pi': math.pi
}
)
def predict_prob(length, gap_width):
"""
Predicts the probability that the needle will hit
one of the two parallel lines.
length and gap_width can be scalars or arrays.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
# TODO: Add in probability calculation.
# placeholder to ensure that a full array of zeroes
# is returned
return ne.evaluate(
'l * D * 0',
local_dict={
'l': length,
'D': gap_width
}
)
|
"""
Simulation of Buffon's Needle Experiment.
"""
import math
import numpy as np
import numexpr as ne
from utils import misc
def run_trials(length, gap_width, trials):
"""
Runs the simulation a specified number of times.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
trials = misc.validate_trials(trials)
angles = np.random.random(size=trials)
y = np.random.random(size=trials)
clauses = [
'D - y*D < l/2 * sin(x * pi)',
'y*D < l/2 * sin(x * pi)'
]
return ne.evaluate(
'sum(where (%s, 1, 0))' %
' & '.join(['(%s)' % i for i in clauses]),
local_dict={
'l': length,
'D': gap_width,
'x': angles,
'y': y
},
global_dict={
'pi': math.pi
}
)
def predict_prob(length, gap_width):
"""
Predicts the probability that the needle will hit
one of the two parallel lines.
length and gap_width can be scalars or arrays.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
# TODO: Add in probability calculation.
return
Add placeholder to return array full of zeroes."""
Simulation of Buffon's Needle Experiment.
"""
import math
import numpy as np
import numexpr as ne
from utils import misc
def run_trials(length, gap_width, trials):
"""
Runs the simulation a specified number of times.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
trials = misc.validate_trials(trials)
angles = np.random.random(size=trials)
y = np.random.random(size=trials)
clauses = [
'D - y*D < l/2 * sin(x * pi)',
'y*D < l/2 * sin(x * pi)'
]
return ne.evaluate(
'sum(where (%s, 1, 0))' %
' & '.join(['(%s)' % i for i in clauses]),
local_dict={
'l': length,
'D': gap_width,
'x': angles,
'y': y
},
global_dict={
'pi': math.pi
}
)
def predict_prob(length, gap_width):
"""
Predicts the probability that the needle will hit
one of the two parallel lines.
length and gap_width can be scalars or arrays.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
# TODO: Add in probability calculation.
# placeholder to ensure that a full array of zeroes
# is returned
return ne.evaluate(
'l * D * 0',
local_dict={
'l': length,
'D': gap_width
}
)
|
<commit_before>"""
Simulation of Buffon's Needle Experiment.
"""
import math
import numpy as np
import numexpr as ne
from utils import misc
def run_trials(length, gap_width, trials):
"""
Runs the simulation a specified number of times.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
trials = misc.validate_trials(trials)
angles = np.random.random(size=trials)
y = np.random.random(size=trials)
clauses = [
'D - y*D < l/2 * sin(x * pi)',
'y*D < l/2 * sin(x * pi)'
]
return ne.evaluate(
'sum(where (%s, 1, 0))' %
' & '.join(['(%s)' % i for i in clauses]),
local_dict={
'l': length,
'D': gap_width,
'x': angles,
'y': y
},
global_dict={
'pi': math.pi
}
)
def predict_prob(length, gap_width):
"""
Predicts the probability that the needle will hit
one of the two parallel lines.
length and gap_width can be scalars or arrays.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
# TODO: Add in probability calculation.
return
<commit_msg>Add placeholder to return array full of zeroes.<commit_after>"""
Simulation of Buffon's Needle Experiment.
"""
import math
import numpy as np
import numexpr as ne
from utils import misc
def run_trials(length, gap_width, trials):
"""
Runs the simulation a specified number of times.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
trials = misc.validate_trials(trials)
angles = np.random.random(size=trials)
y = np.random.random(size=trials)
clauses = [
'D - y*D < l/2 * sin(x * pi)',
'y*D < l/2 * sin(x * pi)'
]
return ne.evaluate(
'sum(where (%s, 1, 0))' %
' & '.join(['(%s)' % i for i in clauses]),
local_dict={
'l': length,
'D': gap_width,
'x': angles,
'y': y
},
global_dict={
'pi': math.pi
}
)
def predict_prob(length, gap_width):
"""
Predicts the probability that the needle will hit
one of the two parallel lines.
length and gap_width can be scalars or arrays.
"""
length = misc.validate_length(length)
gap_width = misc.validate_width(gap_width)
# TODO: Add in probability calculation.
# placeholder to ensure that a full array of zeroes
# is returned
return ne.evaluate(
'l * D * 0',
local_dict={
'l': length,
'D': gap_width
}
)
|
a137e8a92211d3d344a38b5c97d81073d66a1668
|
alembic/versions/17c1af634026_extract_publication_date.py
|
alembic/versions/17c1af634026_extract_publication_date.py
|
"""Populate the `publication_date` column.
Revision ID: 17c1af634026
Revises: 3c4c29f0a791
Create Date: 2012-12-13 21:03:03.445346
"""
# revision identifiers, used by Alembic.
revision = '17c1af634026'
down_revision = '3c4c29f0a791'
import html5lib
from dateutil.parser import parse as parse_date
import pytips
from pytips.models import Tip
def _extract_publication_date(html):
root = html5lib.parse(html, treebuilder='lxml', namespaceHTMLElements=False)
publication_date_string = root.xpath("//a/@data-datetime")[0]
return parse_date(publication_date_string)
def _update_tip(tip):
tip.publication_date = _extract_publication_date(tip.rendered_html)
def _erase_publication_date(tip):
tip.publication_date = None
def upgrade():
tips = Tip.query.all()
map(_update_tip, tips)
pytips.db.session.commit()
def downgrade():
tips = Tip.query.all()
map(_erase_publication_date, tips)
pytips.db.session.commit()
|
"""Populate the `publication_date` column.
Revision ID: 17c1af634026
Revises: 3c4c29f0a791
Create Date: 2012-12-13 21:03:03.445346
"""
# revision identifiers, used by Alembic.
revision = '17c1af634026'
down_revision = '3c4c29f0a791'
import html5lib
from dateutil.parser import parse as parse_date
import pytips
from pytips.util import extract_publication_date
from pytips.models import Tip
def _update_tip(tip):
tip.publication_date = extract_publication_date(tip.rendered_html)
def _erase_publication_date(tip):
tip.publication_date = None
def upgrade():
tips = Tip.query.all()
map(_update_tip, tips)
pytips.db.session.commit()
def downgrade():
tips = Tip.query.all()
map(_erase_publication_date, tips)
pytips.db.session.commit()
|
Use the utility module's extract_publication_date logic.
|
Use the utility module's extract_publication_date logic.
|
Python
|
isc
|
gthank/pytips,gthank/pytips,gthank/pytips,gthank/pytips
|
"""Populate the `publication_date` column.
Revision ID: 17c1af634026
Revises: 3c4c29f0a791
Create Date: 2012-12-13 21:03:03.445346
"""
# revision identifiers, used by Alembic.
revision = '17c1af634026'
down_revision = '3c4c29f0a791'
import html5lib
from dateutil.parser import parse as parse_date
import pytips
from pytips.models import Tip
def _extract_publication_date(html):
root = html5lib.parse(html, treebuilder='lxml', namespaceHTMLElements=False)
publication_date_string = root.xpath("//a/@data-datetime")[0]
return parse_date(publication_date_string)
def _update_tip(tip):
tip.publication_date = _extract_publication_date(tip.rendered_html)
def _erase_publication_date(tip):
tip.publication_date = None
def upgrade():
tips = Tip.query.all()
map(_update_tip, tips)
pytips.db.session.commit()
def downgrade():
tips = Tip.query.all()
map(_erase_publication_date, tips)
pytips.db.session.commit()
Use the utility module's extract_publication_date logic.
|
"""Populate the `publication_date` column.
Revision ID: 17c1af634026
Revises: 3c4c29f0a791
Create Date: 2012-12-13 21:03:03.445346
"""
# revision identifiers, used by Alembic.
revision = '17c1af634026'
down_revision = '3c4c29f0a791'
import html5lib
from dateutil.parser import parse as parse_date
import pytips
from pytips.util import extract_publication_date
from pytips.models import Tip
def _update_tip(tip):
tip.publication_date = extract_publication_date(tip.rendered_html)
def _erase_publication_date(tip):
tip.publication_date = None
def upgrade():
tips = Tip.query.all()
map(_update_tip, tips)
pytips.db.session.commit()
def downgrade():
tips = Tip.query.all()
map(_erase_publication_date, tips)
pytips.db.session.commit()
|
<commit_before>"""Populate the `publication_date` column.
Revision ID: 17c1af634026
Revises: 3c4c29f0a791
Create Date: 2012-12-13 21:03:03.445346
"""
# revision identifiers, used by Alembic.
revision = '17c1af634026'
down_revision = '3c4c29f0a791'
import html5lib
from dateutil.parser import parse as parse_date
import pytips
from pytips.models import Tip
def _extract_publication_date(html):
root = html5lib.parse(html, treebuilder='lxml', namespaceHTMLElements=False)
publication_date_string = root.xpath("//a/@data-datetime")[0]
return parse_date(publication_date_string)
def _update_tip(tip):
tip.publication_date = _extract_publication_date(tip.rendered_html)
def _erase_publication_date(tip):
tip.publication_date = None
def upgrade():
tips = Tip.query.all()
map(_update_tip, tips)
pytips.db.session.commit()
def downgrade():
tips = Tip.query.all()
map(_erase_publication_date, tips)
pytips.db.session.commit()
<commit_msg>Use the utility module's extract_publication_date logic.<commit_after>
|
"""Populate the `publication_date` column.
Revision ID: 17c1af634026
Revises: 3c4c29f0a791
Create Date: 2012-12-13 21:03:03.445346
"""
# revision identifiers, used by Alembic.
revision = '17c1af634026'
down_revision = '3c4c29f0a791'
import html5lib
from dateutil.parser import parse as parse_date
import pytips
from pytips.util import extract_publication_date
from pytips.models import Tip
def _update_tip(tip):
tip.publication_date = extract_publication_date(tip.rendered_html)
def _erase_publication_date(tip):
tip.publication_date = None
def upgrade():
tips = Tip.query.all()
map(_update_tip, tips)
pytips.db.session.commit()
def downgrade():
tips = Tip.query.all()
map(_erase_publication_date, tips)
pytips.db.session.commit()
|
"""Populate the `publication_date` column.
Revision ID: 17c1af634026
Revises: 3c4c29f0a791
Create Date: 2012-12-13 21:03:03.445346
"""
# revision identifiers, used by Alembic.
revision = '17c1af634026'
down_revision = '3c4c29f0a791'
import html5lib
from dateutil.parser import parse as parse_date
import pytips
from pytips.models import Tip
def _extract_publication_date(html):
root = html5lib.parse(html, treebuilder='lxml', namespaceHTMLElements=False)
publication_date_string = root.xpath("//a/@data-datetime")[0]
return parse_date(publication_date_string)
def _update_tip(tip):
tip.publication_date = _extract_publication_date(tip.rendered_html)
def _erase_publication_date(tip):
tip.publication_date = None
def upgrade():
tips = Tip.query.all()
map(_update_tip, tips)
pytips.db.session.commit()
def downgrade():
tips = Tip.query.all()
map(_erase_publication_date, tips)
pytips.db.session.commit()
Use the utility module's extract_publication_date logic."""Populate the `publication_date` column.
Revision ID: 17c1af634026
Revises: 3c4c29f0a791
Create Date: 2012-12-13 21:03:03.445346
"""
# revision identifiers, used by Alembic.
revision = '17c1af634026'
down_revision = '3c4c29f0a791'
import html5lib
from dateutil.parser import parse as parse_date
import pytips
from pytips.util import extract_publication_date
from pytips.models import Tip
def _update_tip(tip):
tip.publication_date = extract_publication_date(tip.rendered_html)
def _erase_publication_date(tip):
tip.publication_date = None
def upgrade():
tips = Tip.query.all()
map(_update_tip, tips)
pytips.db.session.commit()
def downgrade():
tips = Tip.query.all()
map(_erase_publication_date, tips)
pytips.db.session.commit()
|
<commit_before>"""Populate the `publication_date` column.
Revision ID: 17c1af634026
Revises: 3c4c29f0a791
Create Date: 2012-12-13 21:03:03.445346
"""
# revision identifiers, used by Alembic.
revision = '17c1af634026'
down_revision = '3c4c29f0a791'
import html5lib
from dateutil.parser import parse as parse_date
import pytips
from pytips.models import Tip
def _extract_publication_date(html):
root = html5lib.parse(html, treebuilder='lxml', namespaceHTMLElements=False)
publication_date_string = root.xpath("//a/@data-datetime")[0]
return parse_date(publication_date_string)
def _update_tip(tip):
tip.publication_date = _extract_publication_date(tip.rendered_html)
def _erase_publication_date(tip):
tip.publication_date = None
def upgrade():
tips = Tip.query.all()
map(_update_tip, tips)
pytips.db.session.commit()
def downgrade():
tips = Tip.query.all()
map(_erase_publication_date, tips)
pytips.db.session.commit()
<commit_msg>Use the utility module's extract_publication_date logic.<commit_after>"""Populate the `publication_date` column.
Revision ID: 17c1af634026
Revises: 3c4c29f0a791
Create Date: 2012-12-13 21:03:03.445346
"""
# revision identifiers, used by Alembic.
revision = '17c1af634026'
down_revision = '3c4c29f0a791'
import html5lib
from dateutil.parser import parse as parse_date
import pytips
from pytips.util import extract_publication_date
from pytips.models import Tip
def _update_tip(tip):
tip.publication_date = extract_publication_date(tip.rendered_html)
def _erase_publication_date(tip):
tip.publication_date = None
def upgrade():
tips = Tip.query.all()
map(_update_tip, tips)
pytips.db.session.commit()
def downgrade():
tips = Tip.query.all()
map(_erase_publication_date, tips)
pytips.db.session.commit()
|
c76b6f4d5e4b6b24b12a712b062fe7ffe0aedda5
|
base/broadcast.py
|
base/broadcast.py
|
from abc import ABCMeta, abstractmethod
import socket, json
class Broadcast(metaclass=ABCMeta):
"""
An interface for defining a broadcast protocol.
The 'propose' and 'decide' methods need to be defined
"""
BUFFER_SIZE = 1024
def __init__(self, peer_list):
self.peers = peer_list
def broadcast(self, message_type, message):
"""
Sends a message to all of the nodes in the network.
:param message_type: The type of message to be sent.
:param message: The message to be sent.
:return:
"""
def _broadcast(final_msg):
final_msg = final_msg.encode('utf-8')
for addr in self.peers:
broadcast_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
broadcast_client.connect(addr)
broadcast_client.sendall(final_msg)
broadcast_client.shutdown(socket.SHUT_RD)
broadcast_client.close()
message = {"source": socket.gethostname(), "type": message_type, "message": message}
message = json.dumps(message)
_broadcast(message)
@abstractmethod
def broadcast_listener(self):
pass
@abstractmethod
def deliver(self, message):
pass
|
from abc import ABCMeta, abstractmethod
import socket, json
class Broadcast(metaclass=ABCMeta):
"""
An interface for defining a broadcast protocol.
The 'propose' and 'decide' methods need to be defined
"""
BUFFER_SIZE = 1024
def __init__(self, peer_list):
self.peers = peer_list
def broadcast(self, message_type, message):
"""
Sends a message to all of the nodes in the network.
:param message_type: The type of message to be sent.
:param message: The message to be sent.
:return:
"""
def _broadcast(final_msg):
final_msg = final_msg.encode('utf-8')
for addr in self.peers:
broadcast_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
broadcast_client.connect(addr)
broadcast_client.sendall(final_msg)
broadcast_client.shutdown(socket.SHUT_RD)
broadcast_client.close()
message = {"peer": socket.gethostname(), "type": message_type, "message": message}
message = json.dumps(message)
_broadcast(message)
@abstractmethod
def broadcast_listener(self):
pass
class IBroadcastHandler(metaclass=ABCMeta):
"""
An interface for providing the deliver event of a broadcast protocol
"""
@abstractmethod
def deliver(self, message):
pass
|
Extend Broadcast protocol abstraction with a Handler interface for message delivery
|
Extend Broadcast protocol abstraction with a Handler interface for message delivery
|
Python
|
mit
|
koevskinikola/ByzantineRandomizedConsensus
|
from abc import ABCMeta, abstractmethod
import socket, json
class Broadcast(metaclass=ABCMeta):
"""
An interface for defining a broadcast protocol.
The 'propose' and 'decide' methods need to be defined
"""
BUFFER_SIZE = 1024
def __init__(self, peer_list):
self.peers = peer_list
def broadcast(self, message_type, message):
"""
Sends a message to all of the nodes in the network.
:param message_type: The type of message to be sent.
:param message: The message to be sent.
:return:
"""
def _broadcast(final_msg):
final_msg = final_msg.encode('utf-8')
for addr in self.peers:
broadcast_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
broadcast_client.connect(addr)
broadcast_client.sendall(final_msg)
broadcast_client.shutdown(socket.SHUT_RD)
broadcast_client.close()
message = {"source": socket.gethostname(), "type": message_type, "message": message}
message = json.dumps(message)
_broadcast(message)
@abstractmethod
def broadcast_listener(self):
pass
@abstractmethod
def deliver(self, message):
pass
Extend Broadcast protocol abstraction with a Handler interface for message delivery
|
from abc import ABCMeta, abstractmethod
import socket, json
class Broadcast(metaclass=ABCMeta):
"""
An interface for defining a broadcast protocol.
The 'propose' and 'decide' methods need to be defined
"""
BUFFER_SIZE = 1024
def __init__(self, peer_list):
self.peers = peer_list
def broadcast(self, message_type, message):
"""
Sends a message to all of the nodes in the network.
:param message_type: The type of message to be sent.
:param message: The message to be sent.
:return:
"""
def _broadcast(final_msg):
final_msg = final_msg.encode('utf-8')
for addr in self.peers:
broadcast_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
broadcast_client.connect(addr)
broadcast_client.sendall(final_msg)
broadcast_client.shutdown(socket.SHUT_RD)
broadcast_client.close()
message = {"peer": socket.gethostname(), "type": message_type, "message": message}
message = json.dumps(message)
_broadcast(message)
@abstractmethod
def broadcast_listener(self):
pass
class IBroadcastHandler(metaclass=ABCMeta):
"""
An interface for providing the deliver event of a broadcast protocol
"""
@abstractmethod
def deliver(self, message):
pass
|
<commit_before>from abc import ABCMeta, abstractmethod
import socket, json
class Broadcast(metaclass=ABCMeta):
"""
An interface for defining a broadcast protocol.
The 'propose' and 'decide' methods need to be defined
"""
BUFFER_SIZE = 1024
def __init__(self, peer_list):
self.peers = peer_list
def broadcast(self, message_type, message):
"""
Sends a message to all of the nodes in the network.
:param message_type: The type of message to be sent.
:param message: The message to be sent.
:return:
"""
def _broadcast(final_msg):
final_msg = final_msg.encode('utf-8')
for addr in self.peers:
broadcast_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
broadcast_client.connect(addr)
broadcast_client.sendall(final_msg)
broadcast_client.shutdown(socket.SHUT_RD)
broadcast_client.close()
message = {"source": socket.gethostname(), "type": message_type, "message": message}
message = json.dumps(message)
_broadcast(message)
@abstractmethod
def broadcast_listener(self):
pass
@abstractmethod
def deliver(self, message):
pass
<commit_msg>Extend Broadcast protocol abstraction with a Handler interface for message delivery<commit_after>
|
from abc import ABCMeta, abstractmethod
import socket, json
class Broadcast(metaclass=ABCMeta):
"""
An interface for defining a broadcast protocol.
The 'propose' and 'decide' methods need to be defined
"""
BUFFER_SIZE = 1024
def __init__(self, peer_list):
self.peers = peer_list
def broadcast(self, message_type, message):
"""
Sends a message to all of the nodes in the network.
:param message_type: The type of message to be sent.
:param message: The message to be sent.
:return:
"""
def _broadcast(final_msg):
final_msg = final_msg.encode('utf-8')
for addr in self.peers:
broadcast_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
broadcast_client.connect(addr)
broadcast_client.sendall(final_msg)
broadcast_client.shutdown(socket.SHUT_RD)
broadcast_client.close()
message = {"peer": socket.gethostname(), "type": message_type, "message": message}
message = json.dumps(message)
_broadcast(message)
@abstractmethod
def broadcast_listener(self):
pass
class IBroadcastHandler(metaclass=ABCMeta):
"""
An interface for providing the deliver event of a broadcast protocol
"""
@abstractmethod
def deliver(self, message):
pass
|
from abc import ABCMeta, abstractmethod
import socket, json
class Broadcast(metaclass=ABCMeta):
"""
An interface for defining a broadcast protocol.
The 'propose' and 'decide' methods need to be defined
"""
BUFFER_SIZE = 1024
def __init__(self, peer_list):
self.peers = peer_list
def broadcast(self, message_type, message):
"""
Sends a message to all of the nodes in the network.
:param message_type: The type of message to be sent.
:param message: The message to be sent.
:return:
"""
def _broadcast(final_msg):
final_msg = final_msg.encode('utf-8')
for addr in self.peers:
broadcast_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
broadcast_client.connect(addr)
broadcast_client.sendall(final_msg)
broadcast_client.shutdown(socket.SHUT_RD)
broadcast_client.close()
message = {"source": socket.gethostname(), "type": message_type, "message": message}
message = json.dumps(message)
_broadcast(message)
@abstractmethod
def broadcast_listener(self):
pass
@abstractmethod
def deliver(self, message):
pass
Extend Broadcast protocol abstraction with a Handler interface for message deliveryfrom abc import ABCMeta, abstractmethod
import socket, json
class Broadcast(metaclass=ABCMeta):
"""
An interface for defining a broadcast protocol.
The 'propose' and 'decide' methods need to be defined
"""
BUFFER_SIZE = 1024
def __init__(self, peer_list):
self.peers = peer_list
def broadcast(self, message_type, message):
"""
Sends a message to all of the nodes in the network.
:param message_type: The type of message to be sent.
:param message: The message to be sent.
:return:
"""
def _broadcast(final_msg):
final_msg = final_msg.encode('utf-8')
for addr in self.peers:
broadcast_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
broadcast_client.connect(addr)
broadcast_client.sendall(final_msg)
broadcast_client.shutdown(socket.SHUT_RD)
broadcast_client.close()
message = {"peer": socket.gethostname(), "type": message_type, "message": message}
message = json.dumps(message)
_broadcast(message)
@abstractmethod
def broadcast_listener(self):
pass
class IBroadcastHandler(metaclass=ABCMeta):
"""
An interface for providing the deliver event of a broadcast protocol
"""
@abstractmethod
def deliver(self, message):
pass
|
<commit_before>from abc import ABCMeta, abstractmethod
import socket, json
class Broadcast(metaclass=ABCMeta):
"""
An interface for defining a broadcast protocol.
The 'propose' and 'decide' methods need to be defined
"""
BUFFER_SIZE = 1024
def __init__(self, peer_list):
self.peers = peer_list
def broadcast(self, message_type, message):
"""
Sends a message to all of the nodes in the network.
:param message_type: The type of message to be sent.
:param message: The message to be sent.
:return:
"""
def _broadcast(final_msg):
final_msg = final_msg.encode('utf-8')
for addr in self.peers:
broadcast_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
broadcast_client.connect(addr)
broadcast_client.sendall(final_msg)
broadcast_client.shutdown(socket.SHUT_RD)
broadcast_client.close()
message = {"source": socket.gethostname(), "type": message_type, "message": message}
message = json.dumps(message)
_broadcast(message)
@abstractmethod
def broadcast_listener(self):
pass
@abstractmethod
def deliver(self, message):
pass
<commit_msg>Extend Broadcast protocol abstraction with a Handler interface for message delivery<commit_after>from abc import ABCMeta, abstractmethod
import socket, json
class Broadcast(metaclass=ABCMeta):
"""
An interface for defining a broadcast protocol.
The 'propose' and 'decide' methods need to be defined
"""
BUFFER_SIZE = 1024
def __init__(self, peer_list):
self.peers = peer_list
def broadcast(self, message_type, message):
"""
Sends a message to all of the nodes in the network.
:param message_type: The type of message to be sent.
:param message: The message to be sent.
:return:
"""
def _broadcast(final_msg):
final_msg = final_msg.encode('utf-8')
for addr in self.peers:
broadcast_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
broadcast_client.connect(addr)
broadcast_client.sendall(final_msg)
broadcast_client.shutdown(socket.SHUT_RD)
broadcast_client.close()
message = {"peer": socket.gethostname(), "type": message_type, "message": message}
message = json.dumps(message)
_broadcast(message)
@abstractmethod
def broadcast_listener(self):
pass
class IBroadcastHandler(metaclass=ABCMeta):
"""
An interface for providing the deliver event of a broadcast protocol
"""
@abstractmethod
def deliver(self, message):
pass
|
5f8da3c286bf734302ee493e00675b84836ba10e
|
src/server.py
|
src/server.py
|
#!/usr/bin/env python3
import socketserver
from socketserver import BaseRequestHandler
from socketserver import TCPServer
HOST = ''
PORT = 7777
class ConnectionHandler(BaseRequestHandler):
def handle(self):
self.data = self.request.recv(128).strip()
print('{} wrote:\n{}'.format(self.client_address[0], self.data))
def main():
server = TCPServer((HOST, PORT), ConnectionHandler)
server.serve_forever()
server.server_close()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
import socketserver
from socketserver import BaseRequestHandler
from socketserver import TCPServer
from inv_kinematics import get_angles
class ConnectionHandler(BaseRequestHandler):
def handle(self):
self.data = self.request.recv(128).strip().decode()
print('Data from {}:\n\t {}'.format(self.client_address[0], self.data))
x, y, z = [int(value) for value in self.data.split(';')]
theta0, theta1, theta2 = get_angles(x, y, z)
print('\n')
print('Setting angles to: \n\t{};{};{}'.format(theta0, theta1, theta2))
print('\n{}\n'.format('='*40))
def main():
host = ''
port = 7777
server = None
while server is None:
try:
server = TCPServer((host, port), ConnectionHandler)
except OSError:
port += 1
continue
print("Serving on: {}".format(port))
server.serve_forever()
server.server_close()
if __name__ == '__main__':
main()
|
Use other ports and show inv problem solutions
|
Use other ports and show inv problem solutions
|
Python
|
mit
|
saleone/bachelor-thesis
|
#!/usr/bin/env python3
import socketserver
from socketserver import BaseRequestHandler
from socketserver import TCPServer
HOST = ''
PORT = 7777
class ConnectionHandler(BaseRequestHandler):
def handle(self):
self.data = self.request.recv(128).strip()
print('{} wrote:\n{}'.format(self.client_address[0], self.data))
def main():
server = TCPServer((HOST, PORT), ConnectionHandler)
server.serve_forever()
server.server_close()
if __name__ == '__main__':
main()
Use other ports and show inv problem solutions
|
#!/usr/bin/env python3
import socketserver
from socketserver import BaseRequestHandler
from socketserver import TCPServer
from inv_kinematics import get_angles
class ConnectionHandler(BaseRequestHandler):
def handle(self):
self.data = self.request.recv(128).strip().decode()
print('Data from {}:\n\t {}'.format(self.client_address[0], self.data))
x, y, z = [int(value) for value in self.data.split(';')]
theta0, theta1, theta2 = get_angles(x, y, z)
print('\n')
print('Setting angles to: \n\t{};{};{}'.format(theta0, theta1, theta2))
print('\n{}\n'.format('='*40))
def main():
host = ''
port = 7777
server = None
while server is None:
try:
server = TCPServer((host, port), ConnectionHandler)
except OSError:
port += 1
continue
print("Serving on: {}".format(port))
server.serve_forever()
server.server_close()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python3
import socketserver
from socketserver import BaseRequestHandler
from socketserver import TCPServer
HOST = ''
PORT = 7777
class ConnectionHandler(BaseRequestHandler):
def handle(self):
self.data = self.request.recv(128).strip()
print('{} wrote:\n{}'.format(self.client_address[0], self.data))
def main():
server = TCPServer((HOST, PORT), ConnectionHandler)
server.serve_forever()
server.server_close()
if __name__ == '__main__':
main()
<commit_msg>Use other ports and show inv problem solutions<commit_after>
|
#!/usr/bin/env python3
import socketserver
from socketserver import BaseRequestHandler
from socketserver import TCPServer
from inv_kinematics import get_angles
class ConnectionHandler(BaseRequestHandler):
def handle(self):
self.data = self.request.recv(128).strip().decode()
print('Data from {}:\n\t {}'.format(self.client_address[0], self.data))
x, y, z = [int(value) for value in self.data.split(';')]
theta0, theta1, theta2 = get_angles(x, y, z)
print('\n')
print('Setting angles to: \n\t{};{};{}'.format(theta0, theta1, theta2))
print('\n{}\n'.format('='*40))
def main():
host = ''
port = 7777
server = None
while server is None:
try:
server = TCPServer((host, port), ConnectionHandler)
except OSError:
port += 1
continue
print("Serving on: {}".format(port))
server.serve_forever()
server.server_close()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
import socketserver
from socketserver import BaseRequestHandler
from socketserver import TCPServer
HOST = ''
PORT = 7777
class ConnectionHandler(BaseRequestHandler):
def handle(self):
self.data = self.request.recv(128).strip()
print('{} wrote:\n{}'.format(self.client_address[0], self.data))
def main():
server = TCPServer((HOST, PORT), ConnectionHandler)
server.serve_forever()
server.server_close()
if __name__ == '__main__':
main()
Use other ports and show inv problem solutions#!/usr/bin/env python3
import socketserver
from socketserver import BaseRequestHandler
from socketserver import TCPServer
from inv_kinematics import get_angles
class ConnectionHandler(BaseRequestHandler):
def handle(self):
self.data = self.request.recv(128).strip().decode()
print('Data from {}:\n\t {}'.format(self.client_address[0], self.data))
x, y, z = [int(value) for value in self.data.split(';')]
theta0, theta1, theta2 = get_angles(x, y, z)
print('\n')
print('Setting angles to: \n\t{};{};{}'.format(theta0, theta1, theta2))
print('\n{}\n'.format('='*40))
def main():
host = ''
port = 7777
server = None
while server is None:
try:
server = TCPServer((host, port), ConnectionHandler)
except OSError:
port += 1
continue
print("Serving on: {}".format(port))
server.serve_forever()
server.server_close()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python3
import socketserver
from socketserver import BaseRequestHandler
from socketserver import TCPServer
HOST = ''
PORT = 7777
class ConnectionHandler(BaseRequestHandler):
def handle(self):
self.data = self.request.recv(128).strip()
print('{} wrote:\n{}'.format(self.client_address[0], self.data))
def main():
server = TCPServer((HOST, PORT), ConnectionHandler)
server.serve_forever()
server.server_close()
if __name__ == '__main__':
main()
<commit_msg>Use other ports and show inv problem solutions<commit_after>#!/usr/bin/env python3
import socketserver
from socketserver import BaseRequestHandler
from socketserver import TCPServer
from inv_kinematics import get_angles
class ConnectionHandler(BaseRequestHandler):
def handle(self):
self.data = self.request.recv(128).strip().decode()
print('Data from {}:\n\t {}'.format(self.client_address[0], self.data))
x, y, z = [int(value) for value in self.data.split(';')]
theta0, theta1, theta2 = get_angles(x, y, z)
print('\n')
print('Setting angles to: \n\t{};{};{}'.format(theta0, theta1, theta2))
print('\n{}\n'.format('='*40))
def main():
host = ''
port = 7777
server = None
while server is None:
try:
server = TCPServer((host, port), ConnectionHandler)
except OSError:
port += 1
continue
print("Serving on: {}".format(port))
server.serve_forever()
server.server_close()
if __name__ == '__main__':
main()
|
d307abc0c1e96d7c0e7e6c465ded275f796721d3
|
channels/hacks.py
|
channels/hacks.py
|
def monkeypatch_django():
"""
Monkeypatches support for us into parts of Django.
"""
# Ensure that the staticfiles version of runserver bows down to us
# This one is particularly horrible
from django.contrib.staticfiles.management.commands.runserver import (
Command as StaticRunserverCommand
)
from .management.commands.runserver import Command as RunserverCommand
StaticRunserverCommand.__bases__ = (RunserverCommand,)
|
def monkeypatch_django():
"""
Monkeypatches support for us into parts of Django.
"""
# Ensure that the staticfiles version of runserver bows down to us
# This one is particularly horrible
from django.contrib.staticfiles.management.commands.runserver import (
Command as StaticRunserverCommand,
)
from .management.commands.runserver import Command as RunserverCommand
StaticRunserverCommand.__bases__ = (RunserverCommand,)
|
Fix Black linter mismatch with versions
|
Fix Black linter mismatch with versions
|
Python
|
bsd-3-clause
|
andrewgodwin/django-channels,django/channels,andrewgodwin/channels
|
def monkeypatch_django():
"""
Monkeypatches support for us into parts of Django.
"""
# Ensure that the staticfiles version of runserver bows down to us
# This one is particularly horrible
from django.contrib.staticfiles.management.commands.runserver import (
Command as StaticRunserverCommand
)
from .management.commands.runserver import Command as RunserverCommand
StaticRunserverCommand.__bases__ = (RunserverCommand,)
Fix Black linter mismatch with versions
|
def monkeypatch_django():
"""
Monkeypatches support for us into parts of Django.
"""
# Ensure that the staticfiles version of runserver bows down to us
# This one is particularly horrible
from django.contrib.staticfiles.management.commands.runserver import (
Command as StaticRunserverCommand,
)
from .management.commands.runserver import Command as RunserverCommand
StaticRunserverCommand.__bases__ = (RunserverCommand,)
|
<commit_before>def monkeypatch_django():
"""
Monkeypatches support for us into parts of Django.
"""
# Ensure that the staticfiles version of runserver bows down to us
# This one is particularly horrible
from django.contrib.staticfiles.management.commands.runserver import (
Command as StaticRunserverCommand
)
from .management.commands.runserver import Command as RunserverCommand
StaticRunserverCommand.__bases__ = (RunserverCommand,)
<commit_msg>Fix Black linter mismatch with versions<commit_after>
|
def monkeypatch_django():
"""
Monkeypatches support for us into parts of Django.
"""
# Ensure that the staticfiles version of runserver bows down to us
# This one is particularly horrible
from django.contrib.staticfiles.management.commands.runserver import (
Command as StaticRunserverCommand,
)
from .management.commands.runserver import Command as RunserverCommand
StaticRunserverCommand.__bases__ = (RunserverCommand,)
|
def monkeypatch_django():
"""
Monkeypatches support for us into parts of Django.
"""
# Ensure that the staticfiles version of runserver bows down to us
# This one is particularly horrible
from django.contrib.staticfiles.management.commands.runserver import (
Command as StaticRunserverCommand
)
from .management.commands.runserver import Command as RunserverCommand
StaticRunserverCommand.__bases__ = (RunserverCommand,)
Fix Black linter mismatch with versionsdef monkeypatch_django():
"""
Monkeypatches support for us into parts of Django.
"""
# Ensure that the staticfiles version of runserver bows down to us
# This one is particularly horrible
from django.contrib.staticfiles.management.commands.runserver import (
Command as StaticRunserverCommand,
)
from .management.commands.runserver import Command as RunserverCommand
StaticRunserverCommand.__bases__ = (RunserverCommand,)
|
<commit_before>def monkeypatch_django():
"""
Monkeypatches support for us into parts of Django.
"""
# Ensure that the staticfiles version of runserver bows down to us
# This one is particularly horrible
from django.contrib.staticfiles.management.commands.runserver import (
Command as StaticRunserverCommand
)
from .management.commands.runserver import Command as RunserverCommand
StaticRunserverCommand.__bases__ = (RunserverCommand,)
<commit_msg>Fix Black linter mismatch with versions<commit_after>def monkeypatch_django():
"""
Monkeypatches support for us into parts of Django.
"""
# Ensure that the staticfiles version of runserver bows down to us
# This one is particularly horrible
from django.contrib.staticfiles.management.commands.runserver import (
Command as StaticRunserverCommand,
)
from .management.commands.runserver import Command as RunserverCommand
StaticRunserverCommand.__bases__ = (RunserverCommand,)
|
4e5fa71790f7a69bf6bb472ee7ce48f4a801953c
|
test/util.py
|
test/util.py
|
"""
Utilities used throughout the tests.
"""
from functools import wraps
from mongoalchemy.session import Session
DB_NAME = 'mongoalchemy-unit-test'
def known_failure(fun):
"""
Wraps a test known to fail without causing an actual test failure.
"""
@wraps(fun)
def wrapper(*args, **kwds):
try:
fun(*args, **kwds)
raise Exception('Known failure passed! %s' % fun.__name__)
except:
pass
return wrapper
def get_session():
"""
Returns the :class:`Session` used for testing.
"""
return Session.connect(DB_NAME)
|
"""
Utilities used throughout the tests.
"""
from functools import wraps
from mongoalchemy.session import Session
DB_NAME = 'mongoalchemy-unit-test'
""" Name of the database to use for testing. """
def known_failure(fun):
"""
Wraps a test known to fail without causing an actual test failure.
"""
@wraps(fun)
def wrapper(*args, **kwds):
try:
fun(*args, **kwds)
raise Exception('Known failure passed! %s' % fun.__name__)
except:
pass
return wrapper
def get_session(*args, **kwargs):
"""
Returns the :class:`Session` used for testing.
"""
return Session.connect(DB_NAME, *args, **kwargs)
|
Allow get_session to pass through arguments, like `safe`.
|
Allow get_session to pass through arguments, like `safe`.
|
Python
|
mit
|
shakefu/MongoAlchemy,shakefu/MongoAlchemy,shakefu/MongoAlchemy
|
"""
Utilities used throughout the tests.
"""
from functools import wraps
from mongoalchemy.session import Session
DB_NAME = 'mongoalchemy-unit-test'
def known_failure(fun):
"""
Wraps a test known to fail without causing an actual test failure.
"""
@wraps(fun)
def wrapper(*args, **kwds):
try:
fun(*args, **kwds)
raise Exception('Known failure passed! %s' % fun.__name__)
except:
pass
return wrapper
def get_session():
"""
Returns the :class:`Session` used for testing.
"""
return Session.connect(DB_NAME)
Allow get_session to pass through arguments, like `safe`.
|
"""
Utilities used throughout the tests.
"""
from functools import wraps
from mongoalchemy.session import Session
DB_NAME = 'mongoalchemy-unit-test'
""" Name of the database to use for testing. """
def known_failure(fun):
"""
Wraps a test known to fail without causing an actual test failure.
"""
@wraps(fun)
def wrapper(*args, **kwds):
try:
fun(*args, **kwds)
raise Exception('Known failure passed! %s' % fun.__name__)
except:
pass
return wrapper
def get_session(*args, **kwargs):
"""
Returns the :class:`Session` used for testing.
"""
return Session.connect(DB_NAME, *args, **kwargs)
|
<commit_before>"""
Utilities used throughout the tests.
"""
from functools import wraps
from mongoalchemy.session import Session
DB_NAME = 'mongoalchemy-unit-test'
def known_failure(fun):
"""
Wraps a test known to fail without causing an actual test failure.
"""
@wraps(fun)
def wrapper(*args, **kwds):
try:
fun(*args, **kwds)
raise Exception('Known failure passed! %s' % fun.__name__)
except:
pass
return wrapper
def get_session():
"""
Returns the :class:`Session` used for testing.
"""
return Session.connect(DB_NAME)
<commit_msg>Allow get_session to pass through arguments, like `safe`.<commit_after>
|
"""
Utilities used throughout the tests.
"""
from functools import wraps
from mongoalchemy.session import Session
DB_NAME = 'mongoalchemy-unit-test'
""" Name of the database to use for testing. """
def known_failure(fun):
"""
Wraps a test known to fail without causing an actual test failure.
"""
@wraps(fun)
def wrapper(*args, **kwds):
try:
fun(*args, **kwds)
raise Exception('Known failure passed! %s' % fun.__name__)
except:
pass
return wrapper
def get_session(*args, **kwargs):
"""
Returns the :class:`Session` used for testing.
"""
return Session.connect(DB_NAME, *args, **kwargs)
|
"""
Utilities used throughout the tests.
"""
from functools import wraps
from mongoalchemy.session import Session
DB_NAME = 'mongoalchemy-unit-test'
def known_failure(fun):
"""
Wraps a test known to fail without causing an actual test failure.
"""
@wraps(fun)
def wrapper(*args, **kwds):
try:
fun(*args, **kwds)
raise Exception('Known failure passed! %s' % fun.__name__)
except:
pass
return wrapper
def get_session():
"""
Returns the :class:`Session` used for testing.
"""
return Session.connect(DB_NAME)
Allow get_session to pass through arguments, like `safe`."""
Utilities used throughout the tests.
"""
from functools import wraps
from mongoalchemy.session import Session
DB_NAME = 'mongoalchemy-unit-test'
""" Name of the database to use for testing. """
def known_failure(fun):
"""
Wraps a test known to fail without causing an actual test failure.
"""
@wraps(fun)
def wrapper(*args, **kwds):
try:
fun(*args, **kwds)
raise Exception('Known failure passed! %s' % fun.__name__)
except:
pass
return wrapper
def get_session(*args, **kwargs):
"""
Returns the :class:`Session` used for testing.
"""
return Session.connect(DB_NAME, *args, **kwargs)
|
<commit_before>"""
Utilities used throughout the tests.
"""
from functools import wraps
from mongoalchemy.session import Session
DB_NAME = 'mongoalchemy-unit-test'
def known_failure(fun):
"""
Wraps a test known to fail without causing an actual test failure.
"""
@wraps(fun)
def wrapper(*args, **kwds):
try:
fun(*args, **kwds)
raise Exception('Known failure passed! %s' % fun.__name__)
except:
pass
return wrapper
def get_session():
"""
Returns the :class:`Session` used for testing.
"""
return Session.connect(DB_NAME)
<commit_msg>Allow get_session to pass through arguments, like `safe`.<commit_after>"""
Utilities used throughout the tests.
"""
from functools import wraps
from mongoalchemy.session import Session
DB_NAME = 'mongoalchemy-unit-test'
""" Name of the database to use for testing. """
def known_failure(fun):
"""
Wraps a test known to fail without causing an actual test failure.
"""
@wraps(fun)
def wrapper(*args, **kwds):
try:
fun(*args, **kwds)
raise Exception('Known failure passed! %s' % fun.__name__)
except:
pass
return wrapper
def get_session(*args, **kwargs):
"""
Returns the :class:`Session` used for testing.
"""
return Session.connect(DB_NAME, *args, **kwargs)
|
573f3fd726c7bf1495bfdfeb2201317abc2949e4
|
src/parser/menu_item.py
|
src/parser/menu_item.py
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
def target_is_reference(self):
# If it is not another possibility, it is a reference
return not self.target_is_sitemap() and \
not self.target_is_url() and \
self.target is not None
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
|
Remove previously added method because finally not used...
|
Remove previously added method because finally not used...
|
Python
|
mit
|
epfl-idevelop/jahia2wp,epfl-idevelop/jahia2wp,epfl-idevelop/jahia2wp,epfl-idevelop/jahia2wp
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
def target_is_reference(self):
# If it is not another possibility, it is a reference
return not self.target_is_sitemap() and \
not self.target_is_url() and \
self.target is not None
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
Remove previously added method because finally not used...
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
|
<commit_before>"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
def target_is_reference(self):
# If it is not another possibility, it is a reference
return not self.target_is_sitemap() and \
not self.target_is_url() and \
self.target is not None
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
<commit_msg>Remove previously added method because finally not used...<commit_after>
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
def target_is_reference(self):
# If it is not another possibility, it is a reference
return not self.target_is_sitemap() and \
not self.target_is_url() and \
self.target is not None
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
Remove previously added method because finally not used..."""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
|
<commit_before>"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
def target_is_reference(self):
# If it is not another possibility, it is a reference
return not self.target_is_sitemap() and \
not self.target_is_url() and \
self.target is not None
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
<commit_msg>Remove previously added method because finally not used...<commit_after>"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2017"""
class MenuItem:
""" To store menu item information """
def __init__(self, txt, target, hidden):
""" Constructor
txt - Menu link text
target - Can be several things
-- Reference to another jahia page,using its uuid (like c058dc4f-247d-4b23-90d7-25e1206f7de3)
-- hardcoded URL (absolute URL)
-- link to sitemap (so equals 'sitemap')
-- hardcoded URL to file (includes '/files/' in string)
-- None if normal menu entry for page
"""
self.txt = txt
self.target = target
if self.target:
self.target = self.target.strip()
self.hidden = hidden
self.children = []
self.children_sort_way = None
def target_is_url(self):
return False if self.target is None else self.target.startswith('http')
def target_is_sitemap(self):
return self.target == "sitemap"
def target_is_file(self):
return False if self.target is None else '/files/' in self.target
def sort_children(self, sort_way):
self.children_sort_way = sort_way
self.children.sort(key=lambda x: x.txt, reverse=(sort_way == 'desc'))
|
6397ca218be2fe1d8095a04b2c6623f2e1d1fd7b
|
autograder/controller/autograder.py
|
autograder/controller/autograder.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This module is part of the Clemson ACM Auto Grader
This module is contains the main method for the module
"""
import argparse
from autograder.controller import setup, grade_project
def main():
"""
Main method for the autograder
"""
options = parse_args()
#Set up for the project
settings = setup.parse_settings(options)
setup.setup_logging(settings)
setup.prepare_enviroment(settings)
setup.build_tests(settings)
#Grade the results
grade_project.grade(settings)
def parse_args():
"""
Parse the argument file for the appropriate options
Command line options always override options in the config file
"""
parser = argparse.ArgumentParser(prog="Auto Grader")
parser.add_argument('config_file', type=argparse.FileType('r'),
help='A path to the main configuration file', required=True)
return parser.parse_args()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This module is part of the Clemson ACM Auto Grader
Copyright (c) 2016, Robert Underwood
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
This module is contains the main method for the module
"""
import argparse
from autograder.controller import setup, grade_project
def main():
"""
Main method for the autograder
"""
options = parse_args()
#Set up for the project
settings = setup.parse_settings(options)
setup.setup_logging(settings)
#Grade the results
grade_project.grade(settings)
def parse_args():
"""
Parse the argument file for the appropriate options
Command line options always override options in the config file
"""
parser = argparse.ArgumentParser(prog="Auto Grader")
parser.add_argument('config_file', type=argparse.FileType('r'),
help='A path to the main configuration file', required=True)
return parser.parse_args()
if __name__ == "__main__":
main()
|
Clean up of main module
|
Clean up of main module
Moved two commands to the grade_project script. Added copyright
statement.
|
Python
|
bsd-2-clause
|
robertu94/autograder,robertu94/autograder,robertu94/autograder
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This module is part of the Clemson ACM Auto Grader
This module is contains the main method for the module
"""
import argparse
from autograder.controller import setup, grade_project
def main():
"""
Main method for the autograder
"""
options = parse_args()
#Set up for the project
settings = setup.parse_settings(options)
setup.setup_logging(settings)
setup.prepare_enviroment(settings)
setup.build_tests(settings)
#Grade the results
grade_project.grade(settings)
def parse_args():
"""
Parse the argument file for the appropriate options
Command line options always override options in the config file
"""
parser = argparse.ArgumentParser(prog="Auto Grader")
parser.add_argument('config_file', type=argparse.FileType('r'),
help='A path to the main configuration file', required=True)
return parser.parse_args()
Clean up of main module
Moved two commands to the grade_project script. Added copyright
statement.
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This module is part of the Clemson ACM Auto Grader
Copyright (c) 2016, Robert Underwood
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
This module is contains the main method for the module
"""
import argparse
from autograder.controller import setup, grade_project
def main():
"""
Main method for the autograder
"""
options = parse_args()
#Set up for the project
settings = setup.parse_settings(options)
setup.setup_logging(settings)
#Grade the results
grade_project.grade(settings)
def parse_args():
"""
Parse the argument file for the appropriate options
Command line options always override options in the config file
"""
parser = argparse.ArgumentParser(prog="Auto Grader")
parser.add_argument('config_file', type=argparse.FileType('r'),
help='A path to the main configuration file', required=True)
return parser.parse_args()
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This module is part of the Clemson ACM Auto Grader
This module is contains the main method for the module
"""
import argparse
from autograder.controller import setup, grade_project
def main():
"""
Main method for the autograder
"""
options = parse_args()
#Set up for the project
settings = setup.parse_settings(options)
setup.setup_logging(settings)
setup.prepare_enviroment(settings)
setup.build_tests(settings)
#Grade the results
grade_project.grade(settings)
def parse_args():
"""
Parse the argument file for the appropriate options
Command line options always override options in the config file
"""
parser = argparse.ArgumentParser(prog="Auto Grader")
parser.add_argument('config_file', type=argparse.FileType('r'),
help='A path to the main configuration file', required=True)
return parser.parse_args()
<commit_msg>Clean up of main module
Moved two commands to the grade_project script. Added copyright
statement.<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This module is part of the Clemson ACM Auto Grader
Copyright (c) 2016, Robert Underwood
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
This module is contains the main method for the module
"""
import argparse
from autograder.controller import setup, grade_project
def main():
"""
Main method for the autograder
"""
options = parse_args()
#Set up for the project
settings = setup.parse_settings(options)
setup.setup_logging(settings)
#Grade the results
grade_project.grade(settings)
def parse_args():
"""
Parse the argument file for the appropriate options
Command line options always override options in the config file
"""
parser = argparse.ArgumentParser(prog="Auto Grader")
parser.add_argument('config_file', type=argparse.FileType('r'),
help='A path to the main configuration file', required=True)
return parser.parse_args()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This module is part of the Clemson ACM Auto Grader
This module is contains the main method for the module
"""
import argparse
from autograder.controller import setup, grade_project
def main():
"""
Main method for the autograder
"""
options = parse_args()
#Set up for the project
settings = setup.parse_settings(options)
setup.setup_logging(settings)
setup.prepare_enviroment(settings)
setup.build_tests(settings)
#Grade the results
grade_project.grade(settings)
def parse_args():
"""
Parse the argument file for the appropriate options
Command line options always override options in the config file
"""
parser = argparse.ArgumentParser(prog="Auto Grader")
parser.add_argument('config_file', type=argparse.FileType('r'),
help='A path to the main configuration file', required=True)
return parser.parse_args()
Clean up of main module
Moved two commands to the grade_project script. Added copyright
statement.#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This module is part of the Clemson ACM Auto Grader
Copyright (c) 2016, Robert Underwood
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
This module is contains the main method for the module
"""
import argparse
from autograder.controller import setup, grade_project
def main():
"""
Main method for the autograder
"""
options = parse_args()
#Set up for the project
settings = setup.parse_settings(options)
setup.setup_logging(settings)
#Grade the results
grade_project.grade(settings)
def parse_args():
"""
Parse the argument file for the appropriate options
Command line options always override options in the config file
"""
parser = argparse.ArgumentParser(prog="Auto Grader")
parser.add_argument('config_file', type=argparse.FileType('r'),
help='A path to the main configuration file', required=True)
return parser.parse_args()
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This module is part of the Clemson ACM Auto Grader
This module is contains the main method for the module
"""
import argparse
from autograder.controller import setup, grade_project
def main():
"""
Main method for the autograder
"""
options = parse_args()
#Set up for the project
settings = setup.parse_settings(options)
setup.setup_logging(settings)
setup.prepare_enviroment(settings)
setup.build_tests(settings)
#Grade the results
grade_project.grade(settings)
def parse_args():
"""
Parse the argument file for the appropriate options
Command line options always override options in the config file
"""
parser = argparse.ArgumentParser(prog="Auto Grader")
parser.add_argument('config_file', type=argparse.FileType('r'),
help='A path to the main configuration file', required=True)
return parser.parse_args()
<commit_msg>Clean up of main module
Moved two commands to the grade_project script. Added copyright
statement.<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This module is part of the Clemson ACM Auto Grader
Copyright (c) 2016, Robert Underwood
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
This module is contains the main method for the module
"""
import argparse
from autograder.controller import setup, grade_project
def main():
"""
Main method for the autograder
"""
options = parse_args()
#Set up for the project
settings = setup.parse_settings(options)
setup.setup_logging(settings)
#Grade the results
grade_project.grade(settings)
def parse_args():
"""
Parse the argument file for the appropriate options
Command line options always override options in the config file
"""
parser = argparse.ArgumentParser(prog="Auto Grader")
parser.add_argument('config_file', type=argparse.FileType('r'),
help='A path to the main configuration file', required=True)
return parser.parse_args()
if __name__ == "__main__":
main()
|
5251534283d233d5f1e9cfc33f8de9cf18cd3ba1
|
server/lib/python/cartodb_services/cartodb_services/google/client_factory.py
|
server/lib/python/cartodb_services/cartodb_services/google/client_factory.py
|
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import googlemaps
import base64
from exceptions import InvalidGoogleCredentials
class GoogleMapsClientFactory():
clients = {}
@classmethod
def get(cls, client_id, client_secret, channel=None):
cache_key = "{}:{}:{}".format(client_id, client_secret, channel)
client = cls.clients.get(cache_key)
if not client:
cls.assert_valid_crendentials(client_secret)
client = googlemaps.Client(
client_id=client_id,
client_secret=client_secret,
channel=channel)
cls.clients[cache_key] = client
return client
@classmethod
def assert_valid_crendentials(cls, client_secret):
if not cls.valid_credentials(client_secret):
raise InvalidGoogleCredentials
@staticmethod
def valid_credentials(client_secret):
try:
# Only fails if the string dont have a correct padding for b64
# but this way we could provide a more clear error than
# TypeError: Incorrect padding
b64_secret = client_secret.replace('-', '+').replace('_', '/')
base64.b64decode(b64_secret)
return True
except TypeError:
return False
|
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import googlemaps
import base64
from exceptions import InvalidGoogleCredentials
class GoogleMapsClientFactory():
clients = {}
@classmethod
def get(cls, client_id, client_secret, channel=None):
cache_key = "{}:{}:{}".format(client_id, client_secret, channel)
client = cls.clients.get(cache_key)
if not client:
if client_id:
cls.assert_valid_crendentials(client_secret)
client = googlemaps.Client(
client_id=client_id,
client_secret=client_secret,
channel=channel)
else:
client = googlemaps.Client(key=client_secret)
cls.clients[cache_key] = client
return client
@classmethod
def assert_valid_crendentials(cls, client_secret):
if not cls.valid_credentials(client_secret):
raise InvalidGoogleCredentials
@staticmethod
def valid_credentials(client_secret):
try:
# Only fails if the string dont have a correct padding for b64
# but this way we could provide a more clear error than
# TypeError: Incorrect padding
b64_secret = client_secret.replace('-', '+').replace('_', '/')
base64.b64decode(b64_secret)
return True
except TypeError:
return False
|
Allow using non-Premium keys for Google Maps client
|
Allow using non-Premium keys for Google Maps client
|
Python
|
bsd-3-clause
|
CartoDB/dataservices-api,CartoDB/geocoder-api,CartoDB/geocoder-api,CartoDB/dataservices-api,CartoDB/dataservices-api,CartoDB/dataservices-api,CartoDB/geocoder-api,CartoDB/geocoder-api
|
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import googlemaps
import base64
from exceptions import InvalidGoogleCredentials
class GoogleMapsClientFactory():
clients = {}
@classmethod
def get(cls, client_id, client_secret, channel=None):
cache_key = "{}:{}:{}".format(client_id, client_secret, channel)
client = cls.clients.get(cache_key)
if not client:
cls.assert_valid_crendentials(client_secret)
client = googlemaps.Client(
client_id=client_id,
client_secret=client_secret,
channel=channel)
cls.clients[cache_key] = client
return client
@classmethod
def assert_valid_crendentials(cls, client_secret):
if not cls.valid_credentials(client_secret):
raise InvalidGoogleCredentials
@staticmethod
def valid_credentials(client_secret):
try:
# Only fails if the string dont have a correct padding for b64
# but this way we could provide a more clear error than
# TypeError: Incorrect padding
b64_secret = client_secret.replace('-', '+').replace('_', '/')
base64.b64decode(b64_secret)
return True
except TypeError:
return False
Allow using non-Premium keys for Google Maps client
|
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import googlemaps
import base64
from exceptions import InvalidGoogleCredentials
class GoogleMapsClientFactory():
clients = {}
@classmethod
def get(cls, client_id, client_secret, channel=None):
cache_key = "{}:{}:{}".format(client_id, client_secret, channel)
client = cls.clients.get(cache_key)
if not client:
if client_id:
cls.assert_valid_crendentials(client_secret)
client = googlemaps.Client(
client_id=client_id,
client_secret=client_secret,
channel=channel)
else:
client = googlemaps.Client(key=client_secret)
cls.clients[cache_key] = client
return client
@classmethod
def assert_valid_crendentials(cls, client_secret):
if not cls.valid_credentials(client_secret):
raise InvalidGoogleCredentials
@staticmethod
def valid_credentials(client_secret):
try:
# Only fails if the string dont have a correct padding for b64
# but this way we could provide a more clear error than
# TypeError: Incorrect padding
b64_secret = client_secret.replace('-', '+').replace('_', '/')
base64.b64decode(b64_secret)
return True
except TypeError:
return False
|
<commit_before>#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import googlemaps
import base64
from exceptions import InvalidGoogleCredentials
class GoogleMapsClientFactory():
clients = {}
@classmethod
def get(cls, client_id, client_secret, channel=None):
cache_key = "{}:{}:{}".format(client_id, client_secret, channel)
client = cls.clients.get(cache_key)
if not client:
cls.assert_valid_crendentials(client_secret)
client = googlemaps.Client(
client_id=client_id,
client_secret=client_secret,
channel=channel)
cls.clients[cache_key] = client
return client
@classmethod
def assert_valid_crendentials(cls, client_secret):
if not cls.valid_credentials(client_secret):
raise InvalidGoogleCredentials
@staticmethod
def valid_credentials(client_secret):
try:
# Only fails if the string dont have a correct padding for b64
# but this way we could provide a more clear error than
# TypeError: Incorrect padding
b64_secret = client_secret.replace('-', '+').replace('_', '/')
base64.b64decode(b64_secret)
return True
except TypeError:
return False
<commit_msg>Allow using non-Premium keys for Google Maps client<commit_after>
|
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import googlemaps
import base64
from exceptions import InvalidGoogleCredentials
class GoogleMapsClientFactory():
clients = {}
@classmethod
def get(cls, client_id, client_secret, channel=None):
cache_key = "{}:{}:{}".format(client_id, client_secret, channel)
client = cls.clients.get(cache_key)
if not client:
if client_id:
cls.assert_valid_crendentials(client_secret)
client = googlemaps.Client(
client_id=client_id,
client_secret=client_secret,
channel=channel)
else:
client = googlemaps.Client(key=client_secret)
cls.clients[cache_key] = client
return client
@classmethod
def assert_valid_crendentials(cls, client_secret):
if not cls.valid_credentials(client_secret):
raise InvalidGoogleCredentials
@staticmethod
def valid_credentials(client_secret):
try:
# Only fails if the string dont have a correct padding for b64
# but this way we could provide a more clear error than
# TypeError: Incorrect padding
b64_secret = client_secret.replace('-', '+').replace('_', '/')
base64.b64decode(b64_secret)
return True
except TypeError:
return False
|
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import googlemaps
import base64
from exceptions import InvalidGoogleCredentials
class GoogleMapsClientFactory():
clients = {}
@classmethod
def get(cls, client_id, client_secret, channel=None):
cache_key = "{}:{}:{}".format(client_id, client_secret, channel)
client = cls.clients.get(cache_key)
if not client:
cls.assert_valid_crendentials(client_secret)
client = googlemaps.Client(
client_id=client_id,
client_secret=client_secret,
channel=channel)
cls.clients[cache_key] = client
return client
@classmethod
def assert_valid_crendentials(cls, client_secret):
if not cls.valid_credentials(client_secret):
raise InvalidGoogleCredentials
@staticmethod
def valid_credentials(client_secret):
try:
# Only fails if the string dont have a correct padding for b64
# but this way we could provide a more clear error than
# TypeError: Incorrect padding
b64_secret = client_secret.replace('-', '+').replace('_', '/')
base64.b64decode(b64_secret)
return True
except TypeError:
return False
Allow using non-Premium keys for Google Maps client#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import googlemaps
import base64
from exceptions import InvalidGoogleCredentials
class GoogleMapsClientFactory():
clients = {}
@classmethod
def get(cls, client_id, client_secret, channel=None):
cache_key = "{}:{}:{}".format(client_id, client_secret, channel)
client = cls.clients.get(cache_key)
if not client:
if client_id:
cls.assert_valid_crendentials(client_secret)
client = googlemaps.Client(
client_id=client_id,
client_secret=client_secret,
channel=channel)
else:
client = googlemaps.Client(key=client_secret)
cls.clients[cache_key] = client
return client
@classmethod
def assert_valid_crendentials(cls, client_secret):
if not cls.valid_credentials(client_secret):
raise InvalidGoogleCredentials
@staticmethod
def valid_credentials(client_secret):
try:
# Only fails if the string dont have a correct padding for b64
# but this way we could provide a more clear error than
# TypeError: Incorrect padding
b64_secret = client_secret.replace('-', '+').replace('_', '/')
base64.b64decode(b64_secret)
return True
except TypeError:
return False
|
<commit_before>#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import googlemaps
import base64
from exceptions import InvalidGoogleCredentials
class GoogleMapsClientFactory():
clients = {}
@classmethod
def get(cls, client_id, client_secret, channel=None):
cache_key = "{}:{}:{}".format(client_id, client_secret, channel)
client = cls.clients.get(cache_key)
if not client:
cls.assert_valid_crendentials(client_secret)
client = googlemaps.Client(
client_id=client_id,
client_secret=client_secret,
channel=channel)
cls.clients[cache_key] = client
return client
@classmethod
def assert_valid_crendentials(cls, client_secret):
if not cls.valid_credentials(client_secret):
raise InvalidGoogleCredentials
@staticmethod
def valid_credentials(client_secret):
try:
# Only fails if the string dont have a correct padding for b64
# but this way we could provide a more clear error than
# TypeError: Incorrect padding
b64_secret = client_secret.replace('-', '+').replace('_', '/')
base64.b64decode(b64_secret)
return True
except TypeError:
return False
<commit_msg>Allow using non-Premium keys for Google Maps client<commit_after>#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import googlemaps
import base64
from exceptions import InvalidGoogleCredentials
class GoogleMapsClientFactory():
clients = {}
@classmethod
def get(cls, client_id, client_secret, channel=None):
cache_key = "{}:{}:{}".format(client_id, client_secret, channel)
client = cls.clients.get(cache_key)
if not client:
if client_id:
cls.assert_valid_crendentials(client_secret)
client = googlemaps.Client(
client_id=client_id,
client_secret=client_secret,
channel=channel)
else:
client = googlemaps.Client(key=client_secret)
cls.clients[cache_key] = client
return client
@classmethod
def assert_valid_crendentials(cls, client_secret):
if not cls.valid_credentials(client_secret):
raise InvalidGoogleCredentials
@staticmethod
def valid_credentials(client_secret):
try:
# Only fails if the string dont have a correct padding for b64
# but this way we could provide a more clear error than
# TypeError: Incorrect padding
b64_secret = client_secret.replace('-', '+').replace('_', '/')
base64.b64decode(b64_secret)
return True
except TypeError:
return False
|
a4f99f9825fda7f40a8416c367c79dd2cfb8a35b
|
django_docutils/lib/settings.py
|
django_docutils/lib/settings.py
|
from django.conf import settings
BASED_LIB_RST = getattr(
settings,
'BASED_LIB_RST',
{
"font_awesome": {
"url_patterns": {
r'.*twitter.com.*': 'fab fa-twitter',
}
}
},
)
|
from django.conf import settings
BASED_LIB_RST = getattr(settings, 'BASED_LIB_RST', {})
INJECT_FONT_AWESOME = (
BASED_LIB_RST.get('font_awesome', {}).get('url_patterns') is not None
)
|
Remove example setting from defaults, add INJECT_FONT_AWESOME
|
Remove example setting from defaults, add INJECT_FONT_AWESOME
|
Python
|
mit
|
tony/django-docutils,tony/django-docutils
|
from django.conf import settings
BASED_LIB_RST = getattr(
settings,
'BASED_LIB_RST',
{
"font_awesome": {
"url_patterns": {
r'.*twitter.com.*': 'fab fa-twitter',
}
}
},
)
Remove example setting from defaults, add INJECT_FONT_AWESOME
|
from django.conf import settings
BASED_LIB_RST = getattr(settings, 'BASED_LIB_RST', {})
INJECT_FONT_AWESOME = (
BASED_LIB_RST.get('font_awesome', {}).get('url_patterns') is not None
)
|
<commit_before>from django.conf import settings
BASED_LIB_RST = getattr(
settings,
'BASED_LIB_RST',
{
"font_awesome": {
"url_patterns": {
r'.*twitter.com.*': 'fab fa-twitter',
}
}
},
)
<commit_msg>Remove example setting from defaults, add INJECT_FONT_AWESOME<commit_after>
|
from django.conf import settings
BASED_LIB_RST = getattr(settings, 'BASED_LIB_RST', {})
INJECT_FONT_AWESOME = (
BASED_LIB_RST.get('font_awesome', {}).get('url_patterns') is not None
)
|
from django.conf import settings
BASED_LIB_RST = getattr(
settings,
'BASED_LIB_RST',
{
"font_awesome": {
"url_patterns": {
r'.*twitter.com.*': 'fab fa-twitter',
}
}
},
)
Remove example setting from defaults, add INJECT_FONT_AWESOMEfrom django.conf import settings
BASED_LIB_RST = getattr(settings, 'BASED_LIB_RST', {})
INJECT_FONT_AWESOME = (
BASED_LIB_RST.get('font_awesome', {}).get('url_patterns') is not None
)
|
<commit_before>from django.conf import settings
BASED_LIB_RST = getattr(
settings,
'BASED_LIB_RST',
{
"font_awesome": {
"url_patterns": {
r'.*twitter.com.*': 'fab fa-twitter',
}
}
},
)
<commit_msg>Remove example setting from defaults, add INJECT_FONT_AWESOME<commit_after>from django.conf import settings
BASED_LIB_RST = getattr(settings, 'BASED_LIB_RST', {})
INJECT_FONT_AWESOME = (
BASED_LIB_RST.get('font_awesome', {}).get('url_patterns') is not None
)
|
f6c36bbb5b5afec1a029213557b722e50dd6aaaa
|
test/test_run_script.py
|
test/test_run_script.py
|
def test_dummy():
assert True
|
import subprocess
import pytest
def test_filter(tmp_path):
unit_test = tmp_path.joinpath('some_unit_test.sv')
unit_test.write_text('''
module some_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVTEST(some_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering only the passing test should block the fail')
subprocess.check_call(['runSVUnit', '--filter', 'some_ut.some_passing_test'], cwd=tmp_path)
assert 'FAILED' not in log.read_text()
print('No explicit filter should cause both tests to run, hence trigger the fail')
subprocess.check_call(['runSVUnit'], cwd=tmp_path)
assert 'FAILED' in log.read_text()
|
Add test for '--filter' option
|
Add test for '--filter' option
The goal now is to make this test pass by implementing the necessary
production code.
|
Python
|
apache-2.0
|
svunit/svunit,svunit/svunit,svunit/svunit
|
def test_dummy():
assert True
Add test for '--filter' option
The goal now is to make this test pass by implementing the necessary
production code.
|
import subprocess
import pytest
def test_filter(tmp_path):
unit_test = tmp_path.joinpath('some_unit_test.sv')
unit_test.write_text('''
module some_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVTEST(some_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering only the passing test should block the fail')
subprocess.check_call(['runSVUnit', '--filter', 'some_ut.some_passing_test'], cwd=tmp_path)
assert 'FAILED' not in log.read_text()
print('No explicit filter should cause both tests to run, hence trigger the fail')
subprocess.check_call(['runSVUnit'], cwd=tmp_path)
assert 'FAILED' in log.read_text()
|
<commit_before>def test_dummy():
assert True
<commit_msg>Add test for '--filter' option
The goal now is to make this test pass by implementing the necessary
production code.<commit_after>
|
import subprocess
import pytest
def test_filter(tmp_path):
unit_test = tmp_path.joinpath('some_unit_test.sv')
unit_test.write_text('''
module some_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVTEST(some_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering only the passing test should block the fail')
subprocess.check_call(['runSVUnit', '--filter', 'some_ut.some_passing_test'], cwd=tmp_path)
assert 'FAILED' not in log.read_text()
print('No explicit filter should cause both tests to run, hence trigger the fail')
subprocess.check_call(['runSVUnit'], cwd=tmp_path)
assert 'FAILED' in log.read_text()
|
def test_dummy():
assert True
Add test for '--filter' option
The goal now is to make this test pass by implementing the necessary
production code.import subprocess
import pytest
def test_filter(tmp_path):
unit_test = tmp_path.joinpath('some_unit_test.sv')
unit_test.write_text('''
module some_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVTEST(some_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering only the passing test should block the fail')
subprocess.check_call(['runSVUnit', '--filter', 'some_ut.some_passing_test'], cwd=tmp_path)
assert 'FAILED' not in log.read_text()
print('No explicit filter should cause both tests to run, hence trigger the fail')
subprocess.check_call(['runSVUnit'], cwd=tmp_path)
assert 'FAILED' in log.read_text()
|
<commit_before>def test_dummy():
assert True
<commit_msg>Add test for '--filter' option
The goal now is to make this test pass by implementing the necessary
production code.<commit_after>import subprocess
import pytest
def test_filter(tmp_path):
unit_test = tmp_path.joinpath('some_unit_test.sv')
unit_test.write_text('''
module some_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVTEST(some_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering only the passing test should block the fail')
subprocess.check_call(['runSVUnit', '--filter', 'some_ut.some_passing_test'], cwd=tmp_path)
assert 'FAILED' not in log.read_text()
print('No explicit filter should cause both tests to run, hence trigger the fail')
subprocess.check_call(['runSVUnit'], cwd=tmp_path)
assert 'FAILED' in log.read_text()
|
f2109a486b3459a3fbf4e5e7db92780f1765a5a8
|
test_app/urls.py
|
test_app/urls.py
|
from django.conf.urls.defaults import patterns, url, include
from django.contrib import admin
from django.http import HttpResponseNotFound, HttpResponseServerError
from test_app import views
from waffle.views import wafflejs
handler404 = lambda r: HttpResponseNotFound()
handler500 = lambda r: HttpResponseServerError()
admin.autodiscover()
urlpatterns = patterns('',
url(r'^flag_in_view', views.flag_in_view, name='flag_in_view'),
url(r'^wafflejs$', wafflejs, name='wafflejs'),
url(r'^switch-on', views.switched_view),
url(r'^switch-off', views.switched_off_view),
url(r'^flag-on', views.flagged_view),
url(r'^flag-off', views.flagged_off_view),
(r'^admin/', include(admin.site.urls))
)
|
from django.conf.urls.defaults import patterns, url, include
from django.contrib import admin
from django.http import HttpResponseNotFound, HttpResponseServerError
from test_app import views
handler404 = lambda r: HttpResponseNotFound()
handler500 = lambda r: HttpResponseServerError()
admin.autodiscover()
urlpatterns = patterns('',
url(r'^flag_in_view', views.flag_in_view, name='flag_in_view'),
url(r'^switch-on', views.switched_view),
url(r'^switch-off', views.switched_off_view),
url(r'^flag-on', views.flagged_view),
url(r'^flag-off', views.flagged_off_view),
(r'^', include('waffle.urls')),
(r'^admin/', include(admin.site.urls))
)
|
Use new URLs module in test_app.
|
Use new URLs module in test_app.
|
Python
|
bsd-3-clause
|
mark-adams/django-waffle,festicket/django-waffle,hwkns/django-waffle,JeLoueMonCampingCar/django-waffle,crccheck/django-waffle,ilanbm/django-waffle,mwaaas/django-waffle-session,11craft/django-waffle,VladimirFilonov/django-waffle,willkg/django-waffle,rodgomes/django-waffle,festicket/django-waffle,crccheck/django-waffle,rlr/django-waffle,groovecoder/django-waffle,engagespark/django-waffle,rlr/django-waffle,mwaaas/django-waffle-session,safarijv/django-waffle,rsalmaso/django-waffle,safarijv/django-waffle,rodgomes/django-waffle,ilanbm/django-waffle,engagespark/django-waffle,festicket/django-waffle,styleseat/django-waffle,JeLoueMonCampingCar/django-waffle,engagespark/django-waffle,safarijv/django-waffle,styleseat/django-waffle,JeLoueMonCampingCar/django-waffle,VladimirFilonov/django-waffle,VladimirFilonov/django-waffle,ekohl/django-waffle,TwigWorld/django-waffle,TwigWorld/django-waffle,webus/django-waffle,mark-adams/django-waffle,VladimirFilonov/django-waffle,paulcwatts/django-waffle,ilanbm/django-waffle,rodgomes/django-waffle,crccheck/django-waffle,styleseat/django-waffle,paulcwatts/django-waffle,willkg/django-waffle,mark-adams/django-waffle,rlr/django-waffle,hwkns/django-waffle,isotoma/django-waffle,11craft/django-waffle,groovecoder/django-waffle,rsalmaso/django-waffle,rodgomes/django-waffle,mwaaas/django-waffle-session,rlr/django-waffle,paulcwatts/django-waffle,hwkns/django-waffle,festicket/django-waffle,paulcwatts/django-waffle,mark-adams/django-waffle,groovecoder/django-waffle,styleseat/django-waffle,isotoma/django-waffle,mwaaas/django-waffle-session,hwkns/django-waffle,JeLoueMonCampingCar/django-waffle,crccheck/django-waffle,isotoma/django-waffle,TwigWorld/django-waffle,rsalmaso/django-waffle,webus/django-waffle,safarijv/django-waffle,engagespark/django-waffle,ilanbm/django-waffle,isotoma/django-waffle,groovecoder/django-waffle,webus/django-waffle,webus/django-waffle,ekohl/django-waffle,rsalmaso/django-waffle
|
from django.conf.urls.defaults import patterns, url, include
from django.contrib import admin
from django.http import HttpResponseNotFound, HttpResponseServerError
from test_app import views
from waffle.views import wafflejs
handler404 = lambda r: HttpResponseNotFound()
handler500 = lambda r: HttpResponseServerError()
admin.autodiscover()
urlpatterns = patterns('',
url(r'^flag_in_view', views.flag_in_view, name='flag_in_view'),
url(r'^wafflejs$', wafflejs, name='wafflejs'),
url(r'^switch-on', views.switched_view),
url(r'^switch-off', views.switched_off_view),
url(r'^flag-on', views.flagged_view),
url(r'^flag-off', views.flagged_off_view),
(r'^admin/', include(admin.site.urls))
)
Use new URLs module in test_app.
|
from django.conf.urls.defaults import patterns, url, include
from django.contrib import admin
from django.http import HttpResponseNotFound, HttpResponseServerError
from test_app import views
handler404 = lambda r: HttpResponseNotFound()
handler500 = lambda r: HttpResponseServerError()
admin.autodiscover()
urlpatterns = patterns('',
url(r'^flag_in_view', views.flag_in_view, name='flag_in_view'),
url(r'^switch-on', views.switched_view),
url(r'^switch-off', views.switched_off_view),
url(r'^flag-on', views.flagged_view),
url(r'^flag-off', views.flagged_off_view),
(r'^', include('waffle.urls')),
(r'^admin/', include(admin.site.urls))
)
|
<commit_before>from django.conf.urls.defaults import patterns, url, include
from django.contrib import admin
from django.http import HttpResponseNotFound, HttpResponseServerError
from test_app import views
from waffle.views import wafflejs
handler404 = lambda r: HttpResponseNotFound()
handler500 = lambda r: HttpResponseServerError()
admin.autodiscover()
urlpatterns = patterns('',
url(r'^flag_in_view', views.flag_in_view, name='flag_in_view'),
url(r'^wafflejs$', wafflejs, name='wafflejs'),
url(r'^switch-on', views.switched_view),
url(r'^switch-off', views.switched_off_view),
url(r'^flag-on', views.flagged_view),
url(r'^flag-off', views.flagged_off_view),
(r'^admin/', include(admin.site.urls))
)
<commit_msg>Use new URLs module in test_app.<commit_after>
|
from django.conf.urls.defaults import patterns, url, include
from django.contrib import admin
from django.http import HttpResponseNotFound, HttpResponseServerError
from test_app import views
handler404 = lambda r: HttpResponseNotFound()
handler500 = lambda r: HttpResponseServerError()
admin.autodiscover()
urlpatterns = patterns('',
url(r'^flag_in_view', views.flag_in_view, name='flag_in_view'),
url(r'^switch-on', views.switched_view),
url(r'^switch-off', views.switched_off_view),
url(r'^flag-on', views.flagged_view),
url(r'^flag-off', views.flagged_off_view),
(r'^', include('waffle.urls')),
(r'^admin/', include(admin.site.urls))
)
|
from django.conf.urls.defaults import patterns, url, include
from django.contrib import admin
from django.http import HttpResponseNotFound, HttpResponseServerError
from test_app import views
from waffle.views import wafflejs
handler404 = lambda r: HttpResponseNotFound()
handler500 = lambda r: HttpResponseServerError()
admin.autodiscover()
urlpatterns = patterns('',
url(r'^flag_in_view', views.flag_in_view, name='flag_in_view'),
url(r'^wafflejs$', wafflejs, name='wafflejs'),
url(r'^switch-on', views.switched_view),
url(r'^switch-off', views.switched_off_view),
url(r'^flag-on', views.flagged_view),
url(r'^flag-off', views.flagged_off_view),
(r'^admin/', include(admin.site.urls))
)
Use new URLs module in test_app.from django.conf.urls.defaults import patterns, url, include
from django.contrib import admin
from django.http import HttpResponseNotFound, HttpResponseServerError
from test_app import views
handler404 = lambda r: HttpResponseNotFound()
handler500 = lambda r: HttpResponseServerError()
admin.autodiscover()
urlpatterns = patterns('',
url(r'^flag_in_view', views.flag_in_view, name='flag_in_view'),
url(r'^switch-on', views.switched_view),
url(r'^switch-off', views.switched_off_view),
url(r'^flag-on', views.flagged_view),
url(r'^flag-off', views.flagged_off_view),
(r'^', include('waffle.urls')),
(r'^admin/', include(admin.site.urls))
)
|
<commit_before>from django.conf.urls.defaults import patterns, url, include
from django.contrib import admin
from django.http import HttpResponseNotFound, HttpResponseServerError
from test_app import views
from waffle.views import wafflejs
handler404 = lambda r: HttpResponseNotFound()
handler500 = lambda r: HttpResponseServerError()
admin.autodiscover()
urlpatterns = patterns('',
url(r'^flag_in_view', views.flag_in_view, name='flag_in_view'),
url(r'^wafflejs$', wafflejs, name='wafflejs'),
url(r'^switch-on', views.switched_view),
url(r'^switch-off', views.switched_off_view),
url(r'^flag-on', views.flagged_view),
url(r'^flag-off', views.flagged_off_view),
(r'^admin/', include(admin.site.urls))
)
<commit_msg>Use new URLs module in test_app.<commit_after>from django.conf.urls.defaults import patterns, url, include
from django.contrib import admin
from django.http import HttpResponseNotFound, HttpResponseServerError
from test_app import views
handler404 = lambda r: HttpResponseNotFound()
handler500 = lambda r: HttpResponseServerError()
admin.autodiscover()
urlpatterns = patterns('',
url(r'^flag_in_view', views.flag_in_view, name='flag_in_view'),
url(r'^switch-on', views.switched_view),
url(r'^switch-off', views.switched_off_view),
url(r'^flag-on', views.flagged_view),
url(r'^flag-off', views.flagged_off_view),
(r'^', include('waffle.urls')),
(r'^admin/', include(admin.site.urls))
)
|
88d757fa5ccda207fb29502ca1c8c7b6bda6d785
|
tst/utils.py
|
tst/utils.py
|
from __future__ import print_function
import sys
import string
import json
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
if type(msg) is unicode:
data = msg
elif type(msg) is str:
data = msg.__str__()
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
def data2json(data):
def date_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
elif hasattr(obj, 'email'):
return obj.email()
return obj
return json.dumps(
data,
default=date_handler,
indent=2,
separators=(',', ': '),
sort_keys=True,
ensure_ascii=False)
|
from __future__ import print_function
import sys
import string
import json
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stderr, end='\n'):
if type(msg) is unicode:
data = msg
elif type(msg) is str:
data = msg.__str__()
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
def data2json(data):
def date_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
elif hasattr(obj, 'email'):
return obj.email()
return obj
return json.dumps(
data,
default=date_handler,
indent=2,
separators=(',', ': '),
sort_keys=True,
ensure_ascii=False)
|
Fix cprint: use stderr not stdout
|
Fix cprint: use stderr not stdout
|
Python
|
agpl-3.0
|
daltonserey/tst,daltonserey/tst
|
from __future__ import print_function
import sys
import string
import json
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
if type(msg) is unicode:
data = msg
elif type(msg) is str:
data = msg.__str__()
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
def data2json(data):
def date_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
elif hasattr(obj, 'email'):
return obj.email()
return obj
return json.dumps(
data,
default=date_handler,
indent=2,
separators=(',', ': '),
sort_keys=True,
ensure_ascii=False)
Fix cprint: use stderr not stdout
|
from __future__ import print_function
import sys
import string
import json
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stderr, end='\n'):
if type(msg) is unicode:
data = msg
elif type(msg) is str:
data = msg.__str__()
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
def data2json(data):
def date_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
elif hasattr(obj, 'email'):
return obj.email()
return obj
return json.dumps(
data,
default=date_handler,
indent=2,
separators=(',', ': '),
sort_keys=True,
ensure_ascii=False)
|
<commit_before>from __future__ import print_function
import sys
import string
import json
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
if type(msg) is unicode:
data = msg
elif type(msg) is str:
data = msg.__str__()
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
def data2json(data):
def date_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
elif hasattr(obj, 'email'):
return obj.email()
return obj
return json.dumps(
data,
default=date_handler,
indent=2,
separators=(',', ': '),
sort_keys=True,
ensure_ascii=False)
<commit_msg>Fix cprint: use stderr not stdout<commit_after>
|
from __future__ import print_function
import sys
import string
import json
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stderr, end='\n'):
if type(msg) is unicode:
data = msg
elif type(msg) is str:
data = msg.__str__()
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
def data2json(data):
def date_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
elif hasattr(obj, 'email'):
return obj.email()
return obj
return json.dumps(
data,
default=date_handler,
indent=2,
separators=(',', ': '),
sort_keys=True,
ensure_ascii=False)
|
from __future__ import print_function
import sys
import string
import json
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
if type(msg) is unicode:
data = msg
elif type(msg) is str:
data = msg.__str__()
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
def data2json(data):
def date_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
elif hasattr(obj, 'email'):
return obj.email()
return obj
return json.dumps(
data,
default=date_handler,
indent=2,
separators=(',', ': '),
sort_keys=True,
ensure_ascii=False)
Fix cprint: use stderr not stdoutfrom __future__ import print_function
import sys
import string
import json
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stderr, end='\n'):
if type(msg) is unicode:
data = msg
elif type(msg) is str:
data = msg.__str__()
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
def data2json(data):
def date_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
elif hasattr(obj, 'email'):
return obj.email()
return obj
return json.dumps(
data,
default=date_handler,
indent=2,
separators=(',', ': '),
sort_keys=True,
ensure_ascii=False)
|
<commit_before>from __future__ import print_function
import sys
import string
import json
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stdout, end='\n'):
if type(msg) is unicode:
data = msg
elif type(msg) is str:
data = msg.__str__()
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
def data2json(data):
def date_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
elif hasattr(obj, 'email'):
return obj.email()
return obj
return json.dumps(
data,
default=date_handler,
indent=2,
separators=(',', ': '),
sort_keys=True,
ensure_ascii=False)
<commit_msg>Fix cprint: use stderr not stdout<commit_after>from __future__ import print_function
import sys
import string
import json
from colors import *
def is_posix_filename(name, extra_chars=""):
CHARS = string.letters + string.digits + "._-" + extra_chars
return all(c in CHARS for c in name)
def cprint(color, msg, file=sys.stderr, end='\n'):
if type(msg) is unicode:
data = msg
elif type(msg) is str:
data = msg.__str__()
print(color + data + RESET, file=file, end=end)
def _assert(condition, msg):
if condition:
return
cprint(LRED, msg)
sys.exit(1)
def to_unicode(obj, encoding='utf-8'):
assert isinstance(obj, basestring), type(obj)
if isinstance(obj, unicode):
return obj
for encoding in ['utf-8', 'latin1']:
try:
obj = unicode(obj, encoding)
return obj
except UnicodeDecodeError:
pass
assert False, "tst: non-recognized encoding"
def data2json(data):
def date_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
elif hasattr(obj, 'email'):
return obj.email()
return obj
return json.dumps(
data,
default=date_handler,
indent=2,
separators=(',', ': '),
sort_keys=True,
ensure_ascii=False)
|
666b7b3597b857a3bfc99916354c6fc5bd15d68b
|
scripts/generate-user-email-list.py
|
scripts/generate-user-email-list.py
|
"""
Usage:
scripts/generate-user-email-list.py <data_api_url> <data_api_token>
"""
import csv
import sys
from docopt import docopt
from dmutils.apiclient import DataAPIClient
def generate_user_email_list(data_api_url, data_api_token):
client = DataAPIClient(data_api_url, data_api_token)
writer = csv.writer(sys.stdout, delimiter=',', quotechar='"')
for user in client.find_users_iter():
if user['active'] and user['role'] != 'supplier':
writer.writerow([
user['emailAddress'],
user['name'],
user['supplier']['supplierId'],
user['supplier']['name']])
if __name__ == '__main__':
arguments = docopt(__doc__)
generate_user_email_list(
data_api_url=arguments['<data_api_url>'],
data_api_token=arguments['<data_api_token>'])
|
"""
Usage:
scripts/generate-user-email-list.py <data_api_url> <data_api_token>
"""
import csv
import sys
from docopt import docopt
from dmutils.apiclient import DataAPIClient
def generate_user_email_list(data_api_url, data_api_token):
client = DataAPIClient(data_api_url, data_api_token)
writer = csv.writer(sys.stdout, delimiter=',', quotechar='"')
for user in client.find_users_iter():
if user['active'] and user['role'] == 'supplier':
writer.writerow([
user['emailAddress'],
user['name'],
user['supplier']['supplierId'],
user['supplier']['name']])
if __name__ == '__main__':
arguments = docopt(__doc__)
generate_user_email_list(
data_api_url=arguments['<data_api_url>'],
data_api_token=arguments['<data_api_token>'])
|
Fix typo in role check
|
Fix typo in role check
|
Python
|
mit
|
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
|
"""
Usage:
scripts/generate-user-email-list.py <data_api_url> <data_api_token>
"""
import csv
import sys
from docopt import docopt
from dmutils.apiclient import DataAPIClient
def generate_user_email_list(data_api_url, data_api_token):
client = DataAPIClient(data_api_url, data_api_token)
writer = csv.writer(sys.stdout, delimiter=',', quotechar='"')
for user in client.find_users_iter():
if user['active'] and user['role'] != 'supplier':
writer.writerow([
user['emailAddress'],
user['name'],
user['supplier']['supplierId'],
user['supplier']['name']])
if __name__ == '__main__':
arguments = docopt(__doc__)
generate_user_email_list(
data_api_url=arguments['<data_api_url>'],
data_api_token=arguments['<data_api_token>'])
Fix typo in role check
|
"""
Usage:
scripts/generate-user-email-list.py <data_api_url> <data_api_token>
"""
import csv
import sys
from docopt import docopt
from dmutils.apiclient import DataAPIClient
def generate_user_email_list(data_api_url, data_api_token):
client = DataAPIClient(data_api_url, data_api_token)
writer = csv.writer(sys.stdout, delimiter=',', quotechar='"')
for user in client.find_users_iter():
if user['active'] and user['role'] == 'supplier':
writer.writerow([
user['emailAddress'],
user['name'],
user['supplier']['supplierId'],
user['supplier']['name']])
if __name__ == '__main__':
arguments = docopt(__doc__)
generate_user_email_list(
data_api_url=arguments['<data_api_url>'],
data_api_token=arguments['<data_api_token>'])
|
<commit_before>"""
Usage:
scripts/generate-user-email-list.py <data_api_url> <data_api_token>
"""
import csv
import sys
from docopt import docopt
from dmutils.apiclient import DataAPIClient
def generate_user_email_list(data_api_url, data_api_token):
client = DataAPIClient(data_api_url, data_api_token)
writer = csv.writer(sys.stdout, delimiter=',', quotechar='"')
for user in client.find_users_iter():
if user['active'] and user['role'] != 'supplier':
writer.writerow([
user['emailAddress'],
user['name'],
user['supplier']['supplierId'],
user['supplier']['name']])
if __name__ == '__main__':
arguments = docopt(__doc__)
generate_user_email_list(
data_api_url=arguments['<data_api_url>'],
data_api_token=arguments['<data_api_token>'])
<commit_msg>Fix typo in role check<commit_after>
|
"""
Usage:
scripts/generate-user-email-list.py <data_api_url> <data_api_token>
"""
import csv
import sys
from docopt import docopt
from dmutils.apiclient import DataAPIClient
def generate_user_email_list(data_api_url, data_api_token):
client = DataAPIClient(data_api_url, data_api_token)
writer = csv.writer(sys.stdout, delimiter=',', quotechar='"')
for user in client.find_users_iter():
if user['active'] and user['role'] == 'supplier':
writer.writerow([
user['emailAddress'],
user['name'],
user['supplier']['supplierId'],
user['supplier']['name']])
if __name__ == '__main__':
arguments = docopt(__doc__)
generate_user_email_list(
data_api_url=arguments['<data_api_url>'],
data_api_token=arguments['<data_api_token>'])
|
"""
Usage:
scripts/generate-user-email-list.py <data_api_url> <data_api_token>
"""
import csv
import sys
from docopt import docopt
from dmutils.apiclient import DataAPIClient
def generate_user_email_list(data_api_url, data_api_token):
client = DataAPIClient(data_api_url, data_api_token)
writer = csv.writer(sys.stdout, delimiter=',', quotechar='"')
for user in client.find_users_iter():
if user['active'] and user['role'] != 'supplier':
writer.writerow([
user['emailAddress'],
user['name'],
user['supplier']['supplierId'],
user['supplier']['name']])
if __name__ == '__main__':
arguments = docopt(__doc__)
generate_user_email_list(
data_api_url=arguments['<data_api_url>'],
data_api_token=arguments['<data_api_token>'])
Fix typo in role check"""
Usage:
scripts/generate-user-email-list.py <data_api_url> <data_api_token>
"""
import csv
import sys
from docopt import docopt
from dmutils.apiclient import DataAPIClient
def generate_user_email_list(data_api_url, data_api_token):
client = DataAPIClient(data_api_url, data_api_token)
writer = csv.writer(sys.stdout, delimiter=',', quotechar='"')
for user in client.find_users_iter():
if user['active'] and user['role'] == 'supplier':
writer.writerow([
user['emailAddress'],
user['name'],
user['supplier']['supplierId'],
user['supplier']['name']])
if __name__ == '__main__':
arguments = docopt(__doc__)
generate_user_email_list(
data_api_url=arguments['<data_api_url>'],
data_api_token=arguments['<data_api_token>'])
|
<commit_before>"""
Usage:
scripts/generate-user-email-list.py <data_api_url> <data_api_token>
"""
import csv
import sys
from docopt import docopt
from dmutils.apiclient import DataAPIClient
def generate_user_email_list(data_api_url, data_api_token):
client = DataAPIClient(data_api_url, data_api_token)
writer = csv.writer(sys.stdout, delimiter=',', quotechar='"')
for user in client.find_users_iter():
if user['active'] and user['role'] != 'supplier':
writer.writerow([
user['emailAddress'],
user['name'],
user['supplier']['supplierId'],
user['supplier']['name']])
if __name__ == '__main__':
arguments = docopt(__doc__)
generate_user_email_list(
data_api_url=arguments['<data_api_url>'],
data_api_token=arguments['<data_api_token>'])
<commit_msg>Fix typo in role check<commit_after>"""
Usage:
scripts/generate-user-email-list.py <data_api_url> <data_api_token>
"""
import csv
import sys
from docopt import docopt
from dmutils.apiclient import DataAPIClient
def generate_user_email_list(data_api_url, data_api_token):
client = DataAPIClient(data_api_url, data_api_token)
writer = csv.writer(sys.stdout, delimiter=',', quotechar='"')
for user in client.find_users_iter():
if user['active'] and user['role'] == 'supplier':
writer.writerow([
user['emailAddress'],
user['name'],
user['supplier']['supplierId'],
user['supplier']['name']])
if __name__ == '__main__':
arguments = docopt(__doc__)
generate_user_email_list(
data_api_url=arguments['<data_api_url>'],
data_api_token=arguments['<data_api_token>'])
|
3bcec41a2dd9d5a43ce4d51379783d5f398f7571
|
Lib/scipy_version.py
|
Lib/scipy_version.py
|
major = 0
minor = 4
micro = 3
#try:
# from __svn_version__ import version as svn_revision
# scipy_version = '%(major)d.%(minor)d.%(micro)d_%(svn_revision)s'\
# % (locals ())
#except ImportError,msg:
# svn_revision = 0
scipy_version = '%(major)d.%(minor)d.%(micro)d' % (locals ())
|
major = 0
minor = 4
micro = 3
scipy_version = '%(major)d.%(minor)d.%(micro)d' % (locals ())
import os
svn_version_file = os.path.join(os.path.dirname(__file__),
'__svn_version__.py')
if os.path.isfile(svn_version_file):
import imp
svn = imp.load_module('scipy.__svn_version__',
open(svn_version_file),
svn_version_file,
('.py','U',1))
scipy_version += '.'+svn.version
|
Fix the scipy version display.
|
Fix the scipy version display.
|
Python
|
bsd-3-clause
|
fernand/scipy,jonycgn/scipy,andyfaff/scipy,mgaitan/scipy,fernand/scipy,mortada/scipy,pschella/scipy,efiring/scipy,pbrod/scipy,mgaitan/scipy,nonhermitian/scipy,Shaswat27/scipy,mtrbean/scipy,futurulus/scipy,perimosocordiae/scipy,argriffing/scipy,behzadnouri/scipy,nmayorov/scipy,richardotis/scipy,ilayn/scipy,vhaasteren/scipy,aarchiba/scipy,nmayorov/scipy,zxsted/scipy,sonnyhu/scipy,pizzathief/scipy,Shaswat27/scipy,pnedunuri/scipy,josephcslater/scipy,vigna/scipy,befelix/scipy,Newman101/scipy,mortada/scipy,mingwpy/scipy,sargas/scipy,zxsted/scipy,perimosocordiae/scipy,mortonjt/scipy,fernand/scipy,hainm/scipy,minhlongdo/scipy,cpaulik/scipy,kleskjr/scipy,ChanderG/scipy,gef756/scipy,gertingold/scipy,Gillu13/scipy,pbrod/scipy,lhilt/scipy,surhudm/scipy,jjhelmus/scipy,WarrenWeckesser/scipy,kleskjr/scipy,ilayn/scipy,vberaudi/scipy,grlee77/scipy,felipebetancur/scipy,kalvdans/scipy,futurulus/scipy,dominicelse/scipy,aeklant/scipy,anielsen001/scipy,befelix/scipy,ales-erjavec/scipy,witcxc/scipy,Dapid/scipy,minhlongdo/scipy,nvoron23/scipy,Newman101/scipy,pbrod/scipy,maniteja123/scipy,vberaudi/scipy,giorgiop/scipy,gfyoung/scipy,mortada/scipy,anntzer/scipy,andyfaff/scipy,Srisai85/scipy,richardotis/scipy,trankmichael/scipy,arokem/scipy,WillieMaddox/scipy,pyramania/scipy,person142/scipy,Kamp9/scipy,mhogg/scipy,raoulbq/scipy,behzadnouri/scipy,lhilt/scipy,pnedunuri/scipy,ales-erjavec/scipy,gef756/scipy,Newman101/scipy,dch312/scipy,pnedunuri/scipy,piyush0609/scipy,jseabold/scipy,mgaitan/scipy,lukauskas/scipy,vberaudi/scipy,sargas/scipy,larsmans/scipy,vigna/scipy,zerothi/scipy,nmayorov/scipy,andyfaff/scipy,mgaitan/scipy,maniteja123/scipy,anntzer/scipy,Srisai85/scipy,sonnyhu/scipy,zaxliu/scipy,jsilter/scipy,argriffing/scipy,mtrbean/scipy,sriki18/scipy,jonycgn/scipy,fernand/scipy,tylerjereddy/scipy,person142/scipy,WillieMaddox/scipy,raoulbq/scipy,nvoron23/scipy,rgommers/scipy,felipebetancur/scipy,juliantaylor/scipy,mortonjt/scipy,dch312/scipy,lukauskas/scipy,mdhaber/scipy,Stefan-Endres/scipy,pyramania/scipy,niknow/scipy,mikebenfield/scipy,perimosocordiae/scipy,aman-iitj/scipy,matthewalbani/scipy,chatcannon/scipy,gertingold/scipy,WillieMaddox/scipy,jsilter/scipy,Srisai85/scipy,witcxc/scipy,sonnyhu/scipy,surhudm/scipy,futurulus/scipy,mingwpy/scipy,richardotis/scipy,pyramania/scipy,efiring/scipy,aman-iitj/scipy,Shaswat27/scipy,Newman101/scipy,Gillu13/scipy,anntzer/scipy,ortylp/scipy,pyramania/scipy,gdooper/scipy,dch312/scipy,dch312/scipy,e-q/scipy,jor-/scipy,argriffing/scipy,aman-iitj/scipy,kleskjr/scipy,behzadnouri/scipy,ChanderG/scipy,pschella/scipy,mingwpy/scipy,ChanderG/scipy,vanpact/scipy,minhlongdo/scipy,pizzathief/scipy,jonycgn/scipy,rmcgibbo/scipy,fredrikw/scipy,Eric89GXL/scipy,ortylp/scipy,raoulbq/scipy,niknow/scipy,Newman101/scipy,perimosocordiae/scipy,nonhermitian/scipy,cpaulik/scipy,anntzer/scipy,person142/scipy,jsilter/scipy,gef756/scipy,jjhelmus/scipy,mortada/scipy,gertingold/scipy,chatcannon/scipy,dominicelse/scipy,surhudm/scipy,giorgiop/scipy,mortada/scipy,Srisai85/scipy,woodscn/scipy,FRidh/scipy,ortylp/scipy,niknow/scipy,ales-erjavec/scipy,sriki18/scipy,zerothi/scipy,pyramania/scipy,andyfaff/scipy,cpaulik/scipy,apbard/scipy,Dapid/scipy,hainm/scipy,sauliusl/scipy,aarchiba/scipy,petebachant/scipy,mtrbean/scipy,juliantaylor/scipy,matthew-brett/scipy,jakevdp/scipy,zaxliu/scipy,giorgiop/scipy,maniteja123/scipy,jjhelmus/scipy,jamestwebber/scipy,Eric89GXL/scipy,hainm/scipy,piyush0609/scipy,cpaulik/scipy,aeklant/scipy,aman-iitj/scipy,jor-/scipy,mdhaber/scipy,anntzer/scipy,rgommers/scipy,pizzathief/scipy,jseabold/scipy,aarchiba/scipy,anielsen001/scipy,larsmans/scipy,hainm/scipy,jakevdp/scipy,mikebenfield/scipy,befelix/scipy,scipy/scipy,arokem/scipy,Stefan-Endres/scipy,Dapid/scipy,cpaulik/scipy,minhlongdo/scipy,jamestwebber/scipy,trankmichael/scipy,Eric89GXL/scipy,maniteja123/scipy,andim/scipy,newemailjdm/scipy,mdhaber/scipy,Srisai85/scipy,felipebetancur/scipy,njwilson23/scipy,aeklant/scipy,sonnyhu/scipy,raoulbq/scipy,haudren/scipy,mingwpy/scipy,zxsted/scipy,nvoron23/scipy,pschella/scipy,nvoron23/scipy,jor-/scipy,fredrikw/scipy,mhogg/scipy,anielsen001/scipy,ogrisel/scipy,Dapid/scipy,pbrod/scipy,endolith/scipy,cpaulik/scipy,Stefan-Endres/scipy,larsmans/scipy,mdhaber/scipy,WarrenWeckesser/scipy,matthewalbani/scipy,sargas/scipy,kalvdans/scipy,tylerjereddy/scipy,vanpact/scipy,witcxc/scipy,lukauskas/scipy,ChanderG/scipy,e-q/scipy,perimosocordiae/scipy,hainm/scipy,lukauskas/scipy,zerothi/scipy,rmcgibbo/scipy,scipy/scipy,rmcgibbo/scipy,dominicelse/scipy,newemailjdm/scipy,mingwpy/scipy,andim/scipy,ortylp/scipy,felipebetancur/scipy,rgommers/scipy,Stefan-Endres/scipy,WarrenWeckesser/scipy,e-q/scipy,sauliusl/scipy,vhaasteren/scipy,matthew-brett/scipy,endolith/scipy,hainm/scipy,richardotis/scipy,aeklant/scipy,vhaasteren/scipy,zxsted/scipy,efiring/scipy,trankmichael/scipy,andyfaff/scipy,WillieMaddox/scipy,chatcannon/scipy,andim/scipy,jonycgn/scipy,lhilt/scipy,lukauskas/scipy,fredrikw/scipy,richardotis/scipy,anielsen001/scipy,mhogg/scipy,arokem/scipy,sauliusl/scipy,ortylp/scipy,pschella/scipy,vberaudi/scipy,vanpact/scipy,WarrenWeckesser/scipy,rmcgibbo/scipy,apbard/scipy,vhaasteren/scipy,behzadnouri/scipy,rgommers/scipy,nonhermitian/scipy,gef756/scipy,scipy/scipy,niknow/scipy,Dapid/scipy,mtrbean/scipy,endolith/scipy,efiring/scipy,kleskjr/scipy,anielsen001/scipy,mtrbean/scipy,larsmans/scipy,pnedunuri/scipy,witcxc/scipy,Newman101/scipy,Gillu13/scipy,arokem/scipy,pizzathief/scipy,chatcannon/scipy,gfyoung/scipy,maciejkula/scipy,nvoron23/scipy,ndchorley/scipy,ales-erjavec/scipy,vanpact/scipy,andyfaff/scipy,mikebenfield/scipy,raoulbq/scipy,jonycgn/scipy,person142/scipy,WillieMaddox/scipy,petebachant/scipy,apbard/scipy,ilayn/scipy,scipy/scipy,ndchorley/scipy,sriki18/scipy,sauliusl/scipy,anntzer/scipy,sriki18/scipy,nonhermitian/scipy,rmcgibbo/scipy,Gillu13/scipy,haudren/scipy,mortonjt/scipy,zaxliu/scipy,person142/scipy,woodscn/scipy,argriffing/scipy,felipebetancur/scipy,ChanderG/scipy,gef756/scipy,njwilson23/scipy,surhudm/scipy,Kamp9/scipy,newemailjdm/scipy,kleskjr/scipy,perimosocordiae/scipy,e-q/scipy,WarrenWeckesser/scipy,newemailjdm/scipy,pschella/scipy,matthewalbani/scipy,sonnyhu/scipy,gdooper/scipy,kleskjr/scipy,jseabold/scipy,Eric89GXL/scipy,jakevdp/scipy,mikebenfield/scipy,vhaasteren/scipy,haudren/scipy,lhilt/scipy,anielsen001/scipy,Dapid/scipy,woodscn/scipy,argriffing/scipy,njwilson23/scipy,mgaitan/scipy,mgaitan/scipy,fredrikw/scipy,vanpact/scipy,jonycgn/scipy,futurulus/scipy,tylerjereddy/scipy,gdooper/scipy,niknow/scipy,ilayn/scipy,josephcslater/scipy,lhilt/scipy,piyush0609/scipy,Shaswat27/scipy,ales-erjavec/scipy,Kamp9/scipy,vanpact/scipy,gdooper/scipy,grlee77/scipy,maciejkula/scipy,aman-iitj/scipy,petebachant/scipy,piyush0609/scipy,maciejkula/scipy,teoliphant/scipy,ChanderG/scipy,minhlongdo/scipy,matthewalbani/scipy,ndchorley/scipy,piyush0609/scipy,vigna/scipy,arokem/scipy,woodscn/scipy,nonhermitian/scipy,dch312/scipy,endolith/scipy,woodscn/scipy,mhogg/scipy,Gillu13/scipy,bkendzior/scipy,mhogg/scipy,trankmichael/scipy,pnedunuri/scipy,nmayorov/scipy,grlee77/scipy,matthewalbani/scipy,grlee77/scipy,Gillu13/scipy,sriki18/scipy,haudren/scipy,aarchiba/scipy,grlee77/scipy,trankmichael/scipy,mortonjt/scipy,argriffing/scipy,lukauskas/scipy,petebachant/scipy,apbard/scipy,zerothi/scipy,maniteja123/scipy,juliantaylor/scipy,teoliphant/scipy,tylerjereddy/scipy,Kamp9/scipy,haudren/scipy,pnedunuri/scipy,ilayn/scipy,jjhelmus/scipy,tylerjereddy/scipy,aman-iitj/scipy,ortylp/scipy,matthew-brett/scipy,pbrod/scipy,gfyoung/scipy,jakevdp/scipy,jseabold/scipy,minhlongdo/scipy,vigna/scipy,efiring/scipy,gdooper/scipy,teoliphant/scipy,matthew-brett/scipy,ndchorley/scipy,sauliusl/scipy,andim/scipy,nmayorov/scipy,zaxliu/scipy,surhudm/scipy,trankmichael/scipy,WarrenWeckesser/scipy,piyush0609/scipy,mikebenfield/scipy,sargas/scipy,bkendzior/scipy,mortonjt/scipy,ndchorley/scipy,sonnyhu/scipy,andim/scipy,njwilson23/scipy,jseabold/scipy,Eric89GXL/scipy,Kamp9/scipy,befelix/scipy,felipebetancur/scipy,newemailjdm/scipy,jseabold/scipy,befelix/scipy,mdhaber/scipy,sriki18/scipy,behzadnouri/scipy,apbard/scipy,zerothi/scipy,Stefan-Endres/scipy,mingwpy/scipy,ogrisel/scipy,ogrisel/scipy,behzadnouri/scipy,gertingold/scipy,nvoron23/scipy,e-q/scipy,ndchorley/scipy,josephcslater/scipy,richardotis/scipy,pizzathief/scipy,endolith/scipy,giorgiop/scipy,FRidh/scipy,newemailjdm/scipy,petebachant/scipy,dominicelse/scipy,jor-/scipy,zaxliu/scipy,jsilter/scipy,scipy/scipy,jor-/scipy,giorgiop/scipy,vberaudi/scipy,teoliphant/scipy,raoulbq/scipy,mtrbean/scipy,WillieMaddox/scipy,fredrikw/scipy,giorgiop/scipy,dominicelse/scipy,jamestwebber/scipy,teoliphant/scipy,jamestwebber/scipy,andim/scipy,Eric89GXL/scipy,witcxc/scipy,zaxliu/scipy,vigna/scipy,jakevdp/scipy,josephcslater/scipy,maciejkula/scipy,zxsted/scipy,gfyoung/scipy,njwilson23/scipy,vhaasteren/scipy,gertingold/scipy,endolith/scipy,mdhaber/scipy,FRidh/scipy,gfyoung/scipy,bkendzior/scipy,niknow/scipy,gef756/scipy,kalvdans/scipy,jamestwebber/scipy,chatcannon/scipy,larsmans/scipy,FRidh/scipy,bkendzior/scipy,ilayn/scipy,ogrisel/scipy,mortonjt/scipy,surhudm/scipy,jsilter/scipy,FRidh/scipy,ales-erjavec/scipy,fernand/scipy,kalvdans/scipy,maciejkula/scipy,efiring/scipy,kalvdans/scipy,maniteja123/scipy,Stefan-Endres/scipy,njwilson23/scipy,rgommers/scipy,larsmans/scipy,juliantaylor/scipy,Srisai85/scipy,FRidh/scipy,scipy/scipy,bkendzior/scipy,matthew-brett/scipy,petebachant/scipy,haudren/scipy,fernand/scipy,Shaswat27/scipy,rmcgibbo/scipy,juliantaylor/scipy,aeklant/scipy,sauliusl/scipy,jjhelmus/scipy,mhogg/scipy,chatcannon/scipy,zerothi/scipy,futurulus/scipy,Kamp9/scipy,josephcslater/scipy,vberaudi/scipy,fredrikw/scipy,aarchiba/scipy,pbrod/scipy,futurulus/scipy,sargas/scipy,zxsted/scipy,ogrisel/scipy,Shaswat27/scipy,woodscn/scipy,mortada/scipy
|
major = 0
minor = 4
micro = 3
#try:
# from __svn_version__ import version as svn_revision
# scipy_version = '%(major)d.%(minor)d.%(micro)d_%(svn_revision)s'\
# % (locals ())
#except ImportError,msg:
# svn_revision = 0
scipy_version = '%(major)d.%(minor)d.%(micro)d' % (locals ())
Fix the scipy version display.
|
major = 0
minor = 4
micro = 3
scipy_version = '%(major)d.%(minor)d.%(micro)d' % (locals ())
import os
svn_version_file = os.path.join(os.path.dirname(__file__),
'__svn_version__.py')
if os.path.isfile(svn_version_file):
import imp
svn = imp.load_module('scipy.__svn_version__',
open(svn_version_file),
svn_version_file,
('.py','U',1))
scipy_version += '.'+svn.version
|
<commit_before>major = 0
minor = 4
micro = 3
#try:
# from __svn_version__ import version as svn_revision
# scipy_version = '%(major)d.%(minor)d.%(micro)d_%(svn_revision)s'\
# % (locals ())
#except ImportError,msg:
# svn_revision = 0
scipy_version = '%(major)d.%(minor)d.%(micro)d' % (locals ())
<commit_msg>Fix the scipy version display.<commit_after>
|
major = 0
minor = 4
micro = 3
scipy_version = '%(major)d.%(minor)d.%(micro)d' % (locals ())
import os
svn_version_file = os.path.join(os.path.dirname(__file__),
'__svn_version__.py')
if os.path.isfile(svn_version_file):
import imp
svn = imp.load_module('scipy.__svn_version__',
open(svn_version_file),
svn_version_file,
('.py','U',1))
scipy_version += '.'+svn.version
|
major = 0
minor = 4
micro = 3
#try:
# from __svn_version__ import version as svn_revision
# scipy_version = '%(major)d.%(minor)d.%(micro)d_%(svn_revision)s'\
# % (locals ())
#except ImportError,msg:
# svn_revision = 0
scipy_version = '%(major)d.%(minor)d.%(micro)d' % (locals ())
Fix the scipy version display.major = 0
minor = 4
micro = 3
scipy_version = '%(major)d.%(minor)d.%(micro)d' % (locals ())
import os
svn_version_file = os.path.join(os.path.dirname(__file__),
'__svn_version__.py')
if os.path.isfile(svn_version_file):
import imp
svn = imp.load_module('scipy.__svn_version__',
open(svn_version_file),
svn_version_file,
('.py','U',1))
scipy_version += '.'+svn.version
|
<commit_before>major = 0
minor = 4
micro = 3
#try:
# from __svn_version__ import version as svn_revision
# scipy_version = '%(major)d.%(minor)d.%(micro)d_%(svn_revision)s'\
# % (locals ())
#except ImportError,msg:
# svn_revision = 0
scipy_version = '%(major)d.%(minor)d.%(micro)d' % (locals ())
<commit_msg>Fix the scipy version display.<commit_after>major = 0
minor = 4
micro = 3
scipy_version = '%(major)d.%(minor)d.%(micro)d' % (locals ())
import os
svn_version_file = os.path.join(os.path.dirname(__file__),
'__svn_version__.py')
if os.path.isfile(svn_version_file):
import imp
svn = imp.load_module('scipy.__svn_version__',
open(svn_version_file),
svn_version_file,
('.py','U',1))
scipy_version += '.'+svn.version
|
e49fb537143cd0936b62ef53e294717d6ca4dc6f
|
tests/test_automaton.py
|
tests/test_automaton.py
|
#!/usr/bin/env python3
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
with nose.assert_raises(NotImplementedError):
Automaton.__init__(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton._init_from_formal_params(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton.validate_self(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton._validate_input_yield(Automaton, None)
|
#!/usr/bin/env python3
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
abstract_methods = {
'__init__': (Automaton,),
'_init_from_formal_params': (Automaton,),
'validate_self': (Automaton,),
'_validate_input_yield': (Automaton, '')
}
for method_name, method_args in abstract_methods.items():
with nose.assert_raises(NotImplementedError):
getattr(Automaton, method_name)(*method_args)
|
Refactor abstract method test to reduce duplication
|
Refactor abstract method test to reduce duplication
|
Python
|
mit
|
caleb531/automata
|
#!/usr/bin/env python3
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
with nose.assert_raises(NotImplementedError):
Automaton.__init__(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton._init_from_formal_params(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton.validate_self(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton._validate_input_yield(Automaton, None)
Refactor abstract method test to reduce duplication
|
#!/usr/bin/env python3
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
abstract_methods = {
'__init__': (Automaton,),
'_init_from_formal_params': (Automaton,),
'validate_self': (Automaton,),
'_validate_input_yield': (Automaton, '')
}
for method_name, method_args in abstract_methods.items():
with nose.assert_raises(NotImplementedError):
getattr(Automaton, method_name)(*method_args)
|
<commit_before>#!/usr/bin/env python3
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
with nose.assert_raises(NotImplementedError):
Automaton.__init__(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton._init_from_formal_params(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton.validate_self(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton._validate_input_yield(Automaton, None)
<commit_msg>Refactor abstract method test to reduce duplication<commit_after>
|
#!/usr/bin/env python3
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
abstract_methods = {
'__init__': (Automaton,),
'_init_from_formal_params': (Automaton,),
'validate_self': (Automaton,),
'_validate_input_yield': (Automaton, '')
}
for method_name, method_args in abstract_methods.items():
with nose.assert_raises(NotImplementedError):
getattr(Automaton, method_name)(*method_args)
|
#!/usr/bin/env python3
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
with nose.assert_raises(NotImplementedError):
Automaton.__init__(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton._init_from_formal_params(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton.validate_self(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton._validate_input_yield(Automaton, None)
Refactor abstract method test to reduce duplication#!/usr/bin/env python3
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
abstract_methods = {
'__init__': (Automaton,),
'_init_from_formal_params': (Automaton,),
'validate_self': (Automaton,),
'_validate_input_yield': (Automaton, '')
}
for method_name, method_args in abstract_methods.items():
with nose.assert_raises(NotImplementedError):
getattr(Automaton, method_name)(*method_args)
|
<commit_before>#!/usr/bin/env python3
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
with nose.assert_raises(NotImplementedError):
Automaton.__init__(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton._init_from_formal_params(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton.validate_self(Automaton)
with nose.assert_raises(NotImplementedError):
Automaton._validate_input_yield(Automaton, None)
<commit_msg>Refactor abstract method test to reduce duplication<commit_after>#!/usr/bin/env python3
"""Functions for testing the Automaton abstract base class."""
import nose.tools as nose
from automata.base.automaton import Automaton
def test_abstract_methods_not_implemented():
"""Should raise NotImplementedError when calling abstract methods."""
abstract_methods = {
'__init__': (Automaton,),
'_init_from_formal_params': (Automaton,),
'validate_self': (Automaton,),
'_validate_input_yield': (Automaton, '')
}
for method_name, method_args in abstract_methods.items():
with nose.assert_raises(NotImplementedError):
getattr(Automaton, method_name)(*method_args)
|
d7e58494e1b35c315ede2b3019a18af0dd1744b4
|
stuff/urls.py
|
stuff/urls.py
|
import settings
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/(.*)', admin.site.root),
(r'^publication/', include('stuff.publications.urls')),
(r'^file/', include('stuff.files.urls')),
(r'^photo/', include('stuff.picasaweb.urls')),
(r'^bookmark/', include('stuff.delicious.urls')),
(r'^project/', include('stuff.projects.urls')),
(r'^multimedia/', include('stuff.multimedia.urls')),
(r'^git/', include('stuff.dit.urls')),
# (r'^db/(.*)', databrowse.site.root),
(r'^$', 'stuff.views.index'),
# Media serving
(r'^%smedia/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,
'show_indexes': True}
),
)
|
import settings
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/(.*)', admin.site.root),
(r'^publication/', include('stuff.publications.urls')),
(r'^file/', include('stuff.files.urls')),
(r'^photo/', include('stuff.picasaweb.urls')),
(r'^bookmark/', include('stuff.delicious.urls')),
(r'^project/', include('stuff.projects.urls')),
(r'^multimedia/', include('stuff.multimedia.urls')),
# (r'^db/(.*)', databrowse.site.root),
(r'^$', 'stuff.views.index'),
# Media serving
(r'^media/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,
'show_indexes': True}
),
)
|
Remove extra %s from media path
|
Remove extra %s from media path
|
Python
|
bsd-2-clause
|
anjos/website,anjos/website
|
import settings
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/(.*)', admin.site.root),
(r'^publication/', include('stuff.publications.urls')),
(r'^file/', include('stuff.files.urls')),
(r'^photo/', include('stuff.picasaweb.urls')),
(r'^bookmark/', include('stuff.delicious.urls')),
(r'^project/', include('stuff.projects.urls')),
(r'^multimedia/', include('stuff.multimedia.urls')),
(r'^git/', include('stuff.dit.urls')),
# (r'^db/(.*)', databrowse.site.root),
(r'^$', 'stuff.views.index'),
# Media serving
(r'^%smedia/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,
'show_indexes': True}
),
)
Remove extra %s from media path
|
import settings
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/(.*)', admin.site.root),
(r'^publication/', include('stuff.publications.urls')),
(r'^file/', include('stuff.files.urls')),
(r'^photo/', include('stuff.picasaweb.urls')),
(r'^bookmark/', include('stuff.delicious.urls')),
(r'^project/', include('stuff.projects.urls')),
(r'^multimedia/', include('stuff.multimedia.urls')),
# (r'^db/(.*)', databrowse.site.root),
(r'^$', 'stuff.views.index'),
# Media serving
(r'^media/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,
'show_indexes': True}
),
)
|
<commit_before>import settings
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/(.*)', admin.site.root),
(r'^publication/', include('stuff.publications.urls')),
(r'^file/', include('stuff.files.urls')),
(r'^photo/', include('stuff.picasaweb.urls')),
(r'^bookmark/', include('stuff.delicious.urls')),
(r'^project/', include('stuff.projects.urls')),
(r'^multimedia/', include('stuff.multimedia.urls')),
(r'^git/', include('stuff.dit.urls')),
# (r'^db/(.*)', databrowse.site.root),
(r'^$', 'stuff.views.index'),
# Media serving
(r'^%smedia/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,
'show_indexes': True}
),
)
<commit_msg>Remove extra %s from media path<commit_after>
|
import settings
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/(.*)', admin.site.root),
(r'^publication/', include('stuff.publications.urls')),
(r'^file/', include('stuff.files.urls')),
(r'^photo/', include('stuff.picasaweb.urls')),
(r'^bookmark/', include('stuff.delicious.urls')),
(r'^project/', include('stuff.projects.urls')),
(r'^multimedia/', include('stuff.multimedia.urls')),
# (r'^db/(.*)', databrowse.site.root),
(r'^$', 'stuff.views.index'),
# Media serving
(r'^media/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,
'show_indexes': True}
),
)
|
import settings
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/(.*)', admin.site.root),
(r'^publication/', include('stuff.publications.urls')),
(r'^file/', include('stuff.files.urls')),
(r'^photo/', include('stuff.picasaweb.urls')),
(r'^bookmark/', include('stuff.delicious.urls')),
(r'^project/', include('stuff.projects.urls')),
(r'^multimedia/', include('stuff.multimedia.urls')),
(r'^git/', include('stuff.dit.urls')),
# (r'^db/(.*)', databrowse.site.root),
(r'^$', 'stuff.views.index'),
# Media serving
(r'^%smedia/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,
'show_indexes': True}
),
)
Remove extra %s from media pathimport settings
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/(.*)', admin.site.root),
(r'^publication/', include('stuff.publications.urls')),
(r'^file/', include('stuff.files.urls')),
(r'^photo/', include('stuff.picasaweb.urls')),
(r'^bookmark/', include('stuff.delicious.urls')),
(r'^project/', include('stuff.projects.urls')),
(r'^multimedia/', include('stuff.multimedia.urls')),
# (r'^db/(.*)', databrowse.site.root),
(r'^$', 'stuff.views.index'),
# Media serving
(r'^media/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,
'show_indexes': True}
),
)
|
<commit_before>import settings
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/(.*)', admin.site.root),
(r'^publication/', include('stuff.publications.urls')),
(r'^file/', include('stuff.files.urls')),
(r'^photo/', include('stuff.picasaweb.urls')),
(r'^bookmark/', include('stuff.delicious.urls')),
(r'^project/', include('stuff.projects.urls')),
(r'^multimedia/', include('stuff.multimedia.urls')),
(r'^git/', include('stuff.dit.urls')),
# (r'^db/(.*)', databrowse.site.root),
(r'^$', 'stuff.views.index'),
# Media serving
(r'^%smedia/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,
'show_indexes': True}
),
)
<commit_msg>Remove extra %s from media path<commit_after>import settings
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/(.*)', admin.site.root),
(r'^publication/', include('stuff.publications.urls')),
(r'^file/', include('stuff.files.urls')),
(r'^photo/', include('stuff.picasaweb.urls')),
(r'^bookmark/', include('stuff.delicious.urls')),
(r'^project/', include('stuff.projects.urls')),
(r'^multimedia/', include('stuff.multimedia.urls')),
# (r'^db/(.*)', databrowse.site.root),
(r'^$', 'stuff.views.index'),
# Media serving
(r'^media/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,
'show_indexes': True}
),
)
|
174d9862242cecdf89c3fd398b93e805e49dea44
|
tinned_django/manage.py
|
tinned_django/manage.py
|
#!/usr/bin/env python
"""
Default manage.py for django-configurations
"""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
"""
Default manage.py for django-configurations
"""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
if len(sys.argv) > 1 and sys.argv[1] == 'test':
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Set up test environment when launching tests.
|
Set up test environment when launching tests.
|
Python
|
mit
|
futurecolors/tinned-django,futurecolors/tinned-django
|
#!/usr/bin/env python
"""
Default manage.py for django-configurations
"""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
Set up test environment when launching tests.
|
#!/usr/bin/env python
"""
Default manage.py for django-configurations
"""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
if len(sys.argv) > 1 and sys.argv[1] == 'test':
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
"""
Default manage.py for django-configurations
"""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Set up test environment when launching tests.<commit_after>
|
#!/usr/bin/env python
"""
Default manage.py for django-configurations
"""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
if len(sys.argv) > 1 and sys.argv[1] == 'test':
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
"""
Default manage.py for django-configurations
"""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
Set up test environment when launching tests.#!/usr/bin/env python
"""
Default manage.py for django-configurations
"""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
if len(sys.argv) > 1 and sys.argv[1] == 'test':
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
"""
Default manage.py for django-configurations
"""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Set up test environment when launching tests.<commit_after>#!/usr/bin/env python
"""
Default manage.py for django-configurations
"""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
if len(sys.argv) > 1 and sys.argv[1] == 'test':
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
fece65159abe5d581523108dc1fcd0be462a6f36
|
vanth/main.py
|
vanth/main.py
|
import logging
import os
import chryso.connection
import sepiida.config
import sepiida.log
import vanth.config
import vanth.server
import vanth.tables
LOGGER = logging.getLogger(__name__)
def create_application(config):
sepiida.log.setup_logging()
engine = chryso.connection.Engine(config.db, vanth.tables)
chryso.connection.store(engine)
LOGGER.info("Starting up vanth version %s", vanth.version.VERSION)
application = vanth.server.create_app(config)
return application
def main():
logging.getLogger().setLevel(logging.DEBUG)
logging.basicConfig()
config = sepiida.config.load('/etc/vanth.yaml', vanth.config.SPECIFICATION)
application = create_application(config)
try:
host = os.getenv('HOST', 'localhost')
port = int(os.getenv('PORT', 4545))
application.run(host, port)
except KeyboardInterrupt:
LOGGER.info('Shutting down')
|
import logging
import os
import chryso.connection
import sepiida.config
import sepiida.log
import vanth.config
import vanth.server
import vanth.tables
LOGGER = logging.getLogger(__name__)
def create_application(config):
sepiida.log.setup_logging()
engine = chryso.connection.Engine(config.db, vanth.tables)
chryso.connection.store(engine)
LOGGER.info("Starting up vanth version %s", vanth.version.VERSION)
application = vanth.server.create_app(config)
logging.getLogger('vanth.cors').setLevel(logging.WARNING)
return application
def main():
logging.getLogger().setLevel(logging.DEBUG)
logging.basicConfig()
config = sepiida.config.load('/etc/vanth.yaml', vanth.config.SPECIFICATION)
application = create_application(config)
try:
host = os.getenv('HOST', 'localhost')
port = int(os.getenv('PORT', 4545))
application.run(host, port)
except KeyboardInterrupt:
LOGGER.info('Shutting down')
|
Make CORS debug logs go quiet
|
Make CORS debug logs go quiet
Otherwise I'm just inundated with them and I'm rarely debugging CORS
|
Python
|
agpl-3.0
|
EliRibble/vanth,EliRibble/vanth,EliRibble/vanth,EliRibble/vanth
|
import logging
import os
import chryso.connection
import sepiida.config
import sepiida.log
import vanth.config
import vanth.server
import vanth.tables
LOGGER = logging.getLogger(__name__)
def create_application(config):
sepiida.log.setup_logging()
engine = chryso.connection.Engine(config.db, vanth.tables)
chryso.connection.store(engine)
LOGGER.info("Starting up vanth version %s", vanth.version.VERSION)
application = vanth.server.create_app(config)
return application
def main():
logging.getLogger().setLevel(logging.DEBUG)
logging.basicConfig()
config = sepiida.config.load('/etc/vanth.yaml', vanth.config.SPECIFICATION)
application = create_application(config)
try:
host = os.getenv('HOST', 'localhost')
port = int(os.getenv('PORT', 4545))
application.run(host, port)
except KeyboardInterrupt:
LOGGER.info('Shutting down')
Make CORS debug logs go quiet
Otherwise I'm just inundated with them and I'm rarely debugging CORS
|
import logging
import os
import chryso.connection
import sepiida.config
import sepiida.log
import vanth.config
import vanth.server
import vanth.tables
LOGGER = logging.getLogger(__name__)
def create_application(config):
sepiida.log.setup_logging()
engine = chryso.connection.Engine(config.db, vanth.tables)
chryso.connection.store(engine)
LOGGER.info("Starting up vanth version %s", vanth.version.VERSION)
application = vanth.server.create_app(config)
logging.getLogger('vanth.cors').setLevel(logging.WARNING)
return application
def main():
logging.getLogger().setLevel(logging.DEBUG)
logging.basicConfig()
config = sepiida.config.load('/etc/vanth.yaml', vanth.config.SPECIFICATION)
application = create_application(config)
try:
host = os.getenv('HOST', 'localhost')
port = int(os.getenv('PORT', 4545))
application.run(host, port)
except KeyboardInterrupt:
LOGGER.info('Shutting down')
|
<commit_before>import logging
import os
import chryso.connection
import sepiida.config
import sepiida.log
import vanth.config
import vanth.server
import vanth.tables
LOGGER = logging.getLogger(__name__)
def create_application(config):
sepiida.log.setup_logging()
engine = chryso.connection.Engine(config.db, vanth.tables)
chryso.connection.store(engine)
LOGGER.info("Starting up vanth version %s", vanth.version.VERSION)
application = vanth.server.create_app(config)
return application
def main():
logging.getLogger().setLevel(logging.DEBUG)
logging.basicConfig()
config = sepiida.config.load('/etc/vanth.yaml', vanth.config.SPECIFICATION)
application = create_application(config)
try:
host = os.getenv('HOST', 'localhost')
port = int(os.getenv('PORT', 4545))
application.run(host, port)
except KeyboardInterrupt:
LOGGER.info('Shutting down')
<commit_msg>Make CORS debug logs go quiet
Otherwise I'm just inundated with them and I'm rarely debugging CORS<commit_after>
|
import logging
import os
import chryso.connection
import sepiida.config
import sepiida.log
import vanth.config
import vanth.server
import vanth.tables
LOGGER = logging.getLogger(__name__)
def create_application(config):
sepiida.log.setup_logging()
engine = chryso.connection.Engine(config.db, vanth.tables)
chryso.connection.store(engine)
LOGGER.info("Starting up vanth version %s", vanth.version.VERSION)
application = vanth.server.create_app(config)
logging.getLogger('vanth.cors').setLevel(logging.WARNING)
return application
def main():
logging.getLogger().setLevel(logging.DEBUG)
logging.basicConfig()
config = sepiida.config.load('/etc/vanth.yaml', vanth.config.SPECIFICATION)
application = create_application(config)
try:
host = os.getenv('HOST', 'localhost')
port = int(os.getenv('PORT', 4545))
application.run(host, port)
except KeyboardInterrupt:
LOGGER.info('Shutting down')
|
import logging
import os
import chryso.connection
import sepiida.config
import sepiida.log
import vanth.config
import vanth.server
import vanth.tables
LOGGER = logging.getLogger(__name__)
def create_application(config):
sepiida.log.setup_logging()
engine = chryso.connection.Engine(config.db, vanth.tables)
chryso.connection.store(engine)
LOGGER.info("Starting up vanth version %s", vanth.version.VERSION)
application = vanth.server.create_app(config)
return application
def main():
logging.getLogger().setLevel(logging.DEBUG)
logging.basicConfig()
config = sepiida.config.load('/etc/vanth.yaml', vanth.config.SPECIFICATION)
application = create_application(config)
try:
host = os.getenv('HOST', 'localhost')
port = int(os.getenv('PORT', 4545))
application.run(host, port)
except KeyboardInterrupt:
LOGGER.info('Shutting down')
Make CORS debug logs go quiet
Otherwise I'm just inundated with them and I'm rarely debugging CORSimport logging
import os
import chryso.connection
import sepiida.config
import sepiida.log
import vanth.config
import vanth.server
import vanth.tables
LOGGER = logging.getLogger(__name__)
def create_application(config):
sepiida.log.setup_logging()
engine = chryso.connection.Engine(config.db, vanth.tables)
chryso.connection.store(engine)
LOGGER.info("Starting up vanth version %s", vanth.version.VERSION)
application = vanth.server.create_app(config)
logging.getLogger('vanth.cors').setLevel(logging.WARNING)
return application
def main():
logging.getLogger().setLevel(logging.DEBUG)
logging.basicConfig()
config = sepiida.config.load('/etc/vanth.yaml', vanth.config.SPECIFICATION)
application = create_application(config)
try:
host = os.getenv('HOST', 'localhost')
port = int(os.getenv('PORT', 4545))
application.run(host, port)
except KeyboardInterrupt:
LOGGER.info('Shutting down')
|
<commit_before>import logging
import os
import chryso.connection
import sepiida.config
import sepiida.log
import vanth.config
import vanth.server
import vanth.tables
LOGGER = logging.getLogger(__name__)
def create_application(config):
sepiida.log.setup_logging()
engine = chryso.connection.Engine(config.db, vanth.tables)
chryso.connection.store(engine)
LOGGER.info("Starting up vanth version %s", vanth.version.VERSION)
application = vanth.server.create_app(config)
return application
def main():
logging.getLogger().setLevel(logging.DEBUG)
logging.basicConfig()
config = sepiida.config.load('/etc/vanth.yaml', vanth.config.SPECIFICATION)
application = create_application(config)
try:
host = os.getenv('HOST', 'localhost')
port = int(os.getenv('PORT', 4545))
application.run(host, port)
except KeyboardInterrupt:
LOGGER.info('Shutting down')
<commit_msg>Make CORS debug logs go quiet
Otherwise I'm just inundated with them and I'm rarely debugging CORS<commit_after>import logging
import os
import chryso.connection
import sepiida.config
import sepiida.log
import vanth.config
import vanth.server
import vanth.tables
LOGGER = logging.getLogger(__name__)
def create_application(config):
sepiida.log.setup_logging()
engine = chryso.connection.Engine(config.db, vanth.tables)
chryso.connection.store(engine)
LOGGER.info("Starting up vanth version %s", vanth.version.VERSION)
application = vanth.server.create_app(config)
logging.getLogger('vanth.cors').setLevel(logging.WARNING)
return application
def main():
logging.getLogger().setLevel(logging.DEBUG)
logging.basicConfig()
config = sepiida.config.load('/etc/vanth.yaml', vanth.config.SPECIFICATION)
application = create_application(config)
try:
host = os.getenv('HOST', 'localhost')
port = int(os.getenv('PORT', 4545))
application.run(host, port)
except KeyboardInterrupt:
LOGGER.info('Shutting down')
|
d924415639e4ac39faa68d3cfd1696dd9ca30ddc
|
views/base.py
|
views/base.py
|
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feincms.models import Page
def handler(request, path=None):
if path is None:
path = request.path
page = Page.objects.page_for_path_or_404(path)
if page.redirect_to:
return HttpResponseRedirect(page.redirect_to)
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
|
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils import translation
from feincms.models import Page
def handler(request, path=None):
if path is None:
path = request.path
page = Page.objects.page_for_path_or_404(path)
if page.redirect_to:
return HttpResponseRedirect(page.redirect_to)
translation.activate(page.language)
request.LANGUAGE_CODE = translation.get_language()
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
|
Set the current language from the page
|
Set the current language from the page
|
Python
|
bsd-3-clause
|
nickburlett/feincms,michaelkuty/feincms,pjdelport/feincms,hgrimelid/feincms,nickburlett/feincms,joshuajonah/feincms,pjdelport/feincms,joshuajonah/feincms,matthiask/django-content-editor,matthiask/django-content-editor,hgrimelid/feincms,michaelkuty/feincms,michaelkuty/feincms,nickburlett/feincms,mjl/feincms,matthiask/django-content-editor,mjl/feincms,matthiask/feincms2-content,hgrimelid/feincms,feincms/feincms,michaelkuty/feincms,mjl/feincms,matthiask/django-content-editor,matthiask/feincms2-content,joshuajonah/feincms,matthiask/feincms2-content,joshuajonah/feincms,nickburlett/feincms,pjdelport/feincms,feincms/feincms,feincms/feincms
|
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feincms.models import Page
def handler(request, path=None):
if path is None:
path = request.path
page = Page.objects.page_for_path_or_404(path)
if page.redirect_to:
return HttpResponseRedirect(page.redirect_to)
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
Set the current language from the page
|
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils import translation
from feincms.models import Page
def handler(request, path=None):
if path is None:
path = request.path
page = Page.objects.page_for_path_or_404(path)
if page.redirect_to:
return HttpResponseRedirect(page.redirect_to)
translation.activate(page.language)
request.LANGUAGE_CODE = translation.get_language()
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
|
<commit_before>from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feincms.models import Page
def handler(request, path=None):
if path is None:
path = request.path
page = Page.objects.page_for_path_or_404(path)
if page.redirect_to:
return HttpResponseRedirect(page.redirect_to)
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
<commit_msg>Set the current language from the page<commit_after>
|
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils import translation
from feincms.models import Page
def handler(request, path=None):
if path is None:
path = request.path
page = Page.objects.page_for_path_or_404(path)
if page.redirect_to:
return HttpResponseRedirect(page.redirect_to)
translation.activate(page.language)
request.LANGUAGE_CODE = translation.get_language()
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
|
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feincms.models import Page
def handler(request, path=None):
if path is None:
path = request.path
page = Page.objects.page_for_path_or_404(path)
if page.redirect_to:
return HttpResponseRedirect(page.redirect_to)
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
Set the current language from the pagefrom django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils import translation
from feincms.models import Page
def handler(request, path=None):
if path is None:
path = request.path
page = Page.objects.page_for_path_or_404(path)
if page.redirect_to:
return HttpResponseRedirect(page.redirect_to)
translation.activate(page.language)
request.LANGUAGE_CODE = translation.get_language()
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
|
<commit_before>from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feincms.models import Page
def handler(request, path=None):
if path is None:
path = request.path
page = Page.objects.page_for_path_or_404(path)
if page.redirect_to:
return HttpResponseRedirect(page.redirect_to)
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
<commit_msg>Set the current language from the page<commit_after>from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils import translation
from feincms.models import Page
def handler(request, path=None):
if path is None:
path = request.path
page = Page.objects.page_for_path_or_404(path)
if page.redirect_to:
return HttpResponseRedirect(page.redirect_to)
translation.activate(page.language)
request.LANGUAGE_CODE = translation.get_language()
return render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request))
|
fecb2f71aa6ded8fe22a926c5dfc4c46024c30b3
|
currencies/templatetags/currency.py
|
currencies/templatetags/currency.py
|
from django import template
from django.template.defaultfilters import stringfilter
from currencies.models import Currency
from currencies.utils import calculate_price
register = template.Library()
@register.filter(name='currency')
@stringfilter
def set_currency(value, arg):
return calculate_price(value, arg)
class ChangeCurrencyNode(template.Node):
def __init__(self, price, currency):
self.price = template.Variable(price)
self.currency = template.Variable(currency)
def render(self, context):
try:
return calculate_price(self.price.resolve(context),
self.currency.resolve(context))
except template.VariableDoesNotExist:
return ''
@register.tag(name='change_currency')
def change_currency(parser, token):
try:
tag_name, current_price, new_currency = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, \
'%r tag requires exactly two arguments' % token.contents.split()[0]
return ChangeCurrencyNode(current_price, new_currency)
|
from django import template
from django.template.defaultfilters import stringfilter
from currencies.models import Currency
from currencies.utils import calculate_price
register = template.Library()
@register.filter(name='currency')
@stringfilter
def set_currency(value, arg):
return calculate_price(value, arg)
class ChangeCurrencyNode(template.Node):
def __init__(self, price, currency):
self.price = template.Variable(price)
self.currency = template.Variable(currency)
def render(self, context):
try:
return calculate_price(self.price.resolve(context),
self.currency.resolve(context))
except template.VariableDoesNotExist:
return ''
@register.tag(name='change_currency')
def change_currency(parser, token):
try:
tag_name, current_price, new_currency = token.split_contents()
except ValueError:
tag_name = token.contents.split()[0]
raise template.TemplateSyntaxError('%r tag requires exactly two arguments' % (tag_name))
return ChangeCurrencyNode(current_price, new_currency)
|
Use new-style exceptions in a TemplateSyntaxError
|
Use new-style exceptions in a TemplateSyntaxError
|
Python
|
bsd-3-clause
|
pathakamit88/django-currencies,mysociety/django-currencies,panosl/django-currencies,barseghyanartur/django-currencies,mysociety/django-currencies,panosl/django-currencies,racitup/django-currencies,marcosalcazar/django-currencies,marcosalcazar/django-currencies,pathakamit88/django-currencies,ydaniv/django-currencies,racitup/django-currencies,ydaniv/django-currencies,bashu/django-simple-currencies,bashu/django-simple-currencies,jmp0xf/django-currencies
|
from django import template
from django.template.defaultfilters import stringfilter
from currencies.models import Currency
from currencies.utils import calculate_price
register = template.Library()
@register.filter(name='currency')
@stringfilter
def set_currency(value, arg):
return calculate_price(value, arg)
class ChangeCurrencyNode(template.Node):
def __init__(self, price, currency):
self.price = template.Variable(price)
self.currency = template.Variable(currency)
def render(self, context):
try:
return calculate_price(self.price.resolve(context),
self.currency.resolve(context))
except template.VariableDoesNotExist:
return ''
@register.tag(name='change_currency')
def change_currency(parser, token):
try:
tag_name, current_price, new_currency = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, \
'%r tag requires exactly two arguments' % token.contents.split()[0]
return ChangeCurrencyNode(current_price, new_currency)
Use new-style exceptions in a TemplateSyntaxError
|
from django import template
from django.template.defaultfilters import stringfilter
from currencies.models import Currency
from currencies.utils import calculate_price
register = template.Library()
@register.filter(name='currency')
@stringfilter
def set_currency(value, arg):
return calculate_price(value, arg)
class ChangeCurrencyNode(template.Node):
def __init__(self, price, currency):
self.price = template.Variable(price)
self.currency = template.Variable(currency)
def render(self, context):
try:
return calculate_price(self.price.resolve(context),
self.currency.resolve(context))
except template.VariableDoesNotExist:
return ''
@register.tag(name='change_currency')
def change_currency(parser, token):
try:
tag_name, current_price, new_currency = token.split_contents()
except ValueError:
tag_name = token.contents.split()[0]
raise template.TemplateSyntaxError('%r tag requires exactly two arguments' % (tag_name))
return ChangeCurrencyNode(current_price, new_currency)
|
<commit_before>from django import template
from django.template.defaultfilters import stringfilter
from currencies.models import Currency
from currencies.utils import calculate_price
register = template.Library()
@register.filter(name='currency')
@stringfilter
def set_currency(value, arg):
return calculate_price(value, arg)
class ChangeCurrencyNode(template.Node):
def __init__(self, price, currency):
self.price = template.Variable(price)
self.currency = template.Variable(currency)
def render(self, context):
try:
return calculate_price(self.price.resolve(context),
self.currency.resolve(context))
except template.VariableDoesNotExist:
return ''
@register.tag(name='change_currency')
def change_currency(parser, token):
try:
tag_name, current_price, new_currency = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, \
'%r tag requires exactly two arguments' % token.contents.split()[0]
return ChangeCurrencyNode(current_price, new_currency)
<commit_msg>Use new-style exceptions in a TemplateSyntaxError<commit_after>
|
from django import template
from django.template.defaultfilters import stringfilter
from currencies.models import Currency
from currencies.utils import calculate_price
register = template.Library()
@register.filter(name='currency')
@stringfilter
def set_currency(value, arg):
return calculate_price(value, arg)
class ChangeCurrencyNode(template.Node):
def __init__(self, price, currency):
self.price = template.Variable(price)
self.currency = template.Variable(currency)
def render(self, context):
try:
return calculate_price(self.price.resolve(context),
self.currency.resolve(context))
except template.VariableDoesNotExist:
return ''
@register.tag(name='change_currency')
def change_currency(parser, token):
try:
tag_name, current_price, new_currency = token.split_contents()
except ValueError:
tag_name = token.contents.split()[0]
raise template.TemplateSyntaxError('%r tag requires exactly two arguments' % (tag_name))
return ChangeCurrencyNode(current_price, new_currency)
|
from django import template
from django.template.defaultfilters import stringfilter
from currencies.models import Currency
from currencies.utils import calculate_price
register = template.Library()
@register.filter(name='currency')
@stringfilter
def set_currency(value, arg):
return calculate_price(value, arg)
class ChangeCurrencyNode(template.Node):
def __init__(self, price, currency):
self.price = template.Variable(price)
self.currency = template.Variable(currency)
def render(self, context):
try:
return calculate_price(self.price.resolve(context),
self.currency.resolve(context))
except template.VariableDoesNotExist:
return ''
@register.tag(name='change_currency')
def change_currency(parser, token):
try:
tag_name, current_price, new_currency = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, \
'%r tag requires exactly two arguments' % token.contents.split()[0]
return ChangeCurrencyNode(current_price, new_currency)
Use new-style exceptions in a TemplateSyntaxErrorfrom django import template
from django.template.defaultfilters import stringfilter
from currencies.models import Currency
from currencies.utils import calculate_price
register = template.Library()
@register.filter(name='currency')
@stringfilter
def set_currency(value, arg):
return calculate_price(value, arg)
class ChangeCurrencyNode(template.Node):
def __init__(self, price, currency):
self.price = template.Variable(price)
self.currency = template.Variable(currency)
def render(self, context):
try:
return calculate_price(self.price.resolve(context),
self.currency.resolve(context))
except template.VariableDoesNotExist:
return ''
@register.tag(name='change_currency')
def change_currency(parser, token):
try:
tag_name, current_price, new_currency = token.split_contents()
except ValueError:
tag_name = token.contents.split()[0]
raise template.TemplateSyntaxError('%r tag requires exactly two arguments' % (tag_name))
return ChangeCurrencyNode(current_price, new_currency)
|
<commit_before>from django import template
from django.template.defaultfilters import stringfilter
from currencies.models import Currency
from currencies.utils import calculate_price
register = template.Library()
@register.filter(name='currency')
@stringfilter
def set_currency(value, arg):
return calculate_price(value, arg)
class ChangeCurrencyNode(template.Node):
def __init__(self, price, currency):
self.price = template.Variable(price)
self.currency = template.Variable(currency)
def render(self, context):
try:
return calculate_price(self.price.resolve(context),
self.currency.resolve(context))
except template.VariableDoesNotExist:
return ''
@register.tag(name='change_currency')
def change_currency(parser, token):
try:
tag_name, current_price, new_currency = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, \
'%r tag requires exactly two arguments' % token.contents.split()[0]
return ChangeCurrencyNode(current_price, new_currency)
<commit_msg>Use new-style exceptions in a TemplateSyntaxError<commit_after>from django import template
from django.template.defaultfilters import stringfilter
from currencies.models import Currency
from currencies.utils import calculate_price
register = template.Library()
@register.filter(name='currency')
@stringfilter
def set_currency(value, arg):
return calculate_price(value, arg)
class ChangeCurrencyNode(template.Node):
def __init__(self, price, currency):
self.price = template.Variable(price)
self.currency = template.Variable(currency)
def render(self, context):
try:
return calculate_price(self.price.resolve(context),
self.currency.resolve(context))
except template.VariableDoesNotExist:
return ''
@register.tag(name='change_currency')
def change_currency(parser, token):
try:
tag_name, current_price, new_currency = token.split_contents()
except ValueError:
tag_name = token.contents.split()[0]
raise template.TemplateSyntaxError('%r tag requires exactly two arguments' % (tag_name))
return ChangeCurrencyNode(current_price, new_currency)
|
4ca25c413494d43b9ecebcebca0ea79b213992a3
|
test_suite.py
|
test_suite.py
|
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django # noqa
if django.__version__ >= (1, 7):
django.setup()
from django.core import management # noqa
management.call_command('test', 'tests')
|
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django # noqa
if django.VERSION >= (1, 7):
django.setup()
from django.core import management # noqa
management.call_command('test', 'tests')
|
Fix name of Django version attribute
|
Fix name of Django version attribute
Signed-off-by: Byron Ruth <e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98@devel.io>
|
Python
|
bsd-2-clause
|
bruth/django-preserialize,scottp-dpaw/django-preserialize
|
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django # noqa
if django.__version__ >= (1, 7):
django.setup()
from django.core import management # noqa
management.call_command('test', 'tests')
Fix name of Django version attribute
Signed-off-by: Byron Ruth <e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98@devel.io>
|
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django # noqa
if django.VERSION >= (1, 7):
django.setup()
from django.core import management # noqa
management.call_command('test', 'tests')
|
<commit_before>import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django # noqa
if django.__version__ >= (1, 7):
django.setup()
from django.core import management # noqa
management.call_command('test', 'tests')
<commit_msg>Fix name of Django version attribute
Signed-off-by: Byron Ruth <e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98@devel.io><commit_after>
|
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django # noqa
if django.VERSION >= (1, 7):
django.setup()
from django.core import management # noqa
management.call_command('test', 'tests')
|
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django # noqa
if django.__version__ >= (1, 7):
django.setup()
from django.core import management # noqa
management.call_command('test', 'tests')
Fix name of Django version attribute
Signed-off-by: Byron Ruth <e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98@devel.io>import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django # noqa
if django.VERSION >= (1, 7):
django.setup()
from django.core import management # noqa
management.call_command('test', 'tests')
|
<commit_before>import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django # noqa
if django.__version__ >= (1, 7):
django.setup()
from django.core import management # noqa
management.call_command('test', 'tests')
<commit_msg>Fix name of Django version attribute
Signed-off-by: Byron Ruth <e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98@devel.io><commit_after>import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django # noqa
if django.VERSION >= (1, 7):
django.setup()
from django.core import management # noqa
management.call_command('test', 'tests')
|
22e1ce2348264ac2774697e5c56523dbd1b85b14
|
bmi_tester/tests_pytest/conftest.py
|
bmi_tester/tests_pytest/conftest.py
|
import os
import pytest
from scripting.contexts import cd
from . import Bmi, INPUT_FILE
from .utils import all_grids, all_names, out_names, strictly_input_names
@pytest.fixture(scope='module')
def new_bmi(infile=None):
try:
with open('.ROOT_DIR', 'r') as fp:
root_dir = fp.read()
except IOError:
root_dir = '.'
bmi = Bmi()
with cd(root_dir):
bmi.initialize(infile or INPUT_FILE)
return bmi
def pytest_runtest_setup(item):
print 'moving folders', item
# os.chdir('/Users/huttone/git/csdms/bmi-tester/_child_run')
def pytest_generate_tests(metafunc):
if 'gid' in metafunc.fixturenames:
metafunc.parametrize('gid', all_grids(new_bmi()))
elif 'var_name' in metafunc.fixturenames:
metafunc.parametrize('var_name', all_names(new_bmi()))
elif 'in_var_name' in metafunc.fixturenames:
metafunc.parametrize('in_var_name', strictly_input_names(new_bmi()))
elif 'out_var_name' in metafunc.fixturenames:
metafunc.parametrize('out_var_name', out_names(new_bmi()))
|
import os
import pytest
from scripting.contexts import cd
from . import Bmi, INPUT_FILE
from .utils import all_grids, all_names, out_names, strictly_input_names
@pytest.fixture
def new_bmi(infile=None):
try:
with open('.ROOT_DIR', 'r') as fp:
root_dir = fp.read()
except IOError:
root_dir = '.'
bmi = Bmi()
with cd(root_dir):
bmi.initialize(infile or INPUT_FILE)
return bmi
def pytest_runtest_setup(item):
print 'moving folders', item
# os.chdir('/Users/huttone/git/csdms/bmi-tester/_child_run')
def pytest_generate_tests(metafunc):
if 'gid' in metafunc.fixturenames:
metafunc.parametrize('gid', all_grids(new_bmi()))
elif 'var_name' in metafunc.fixturenames:
metafunc.parametrize('var_name', all_names(new_bmi()))
elif 'in_var_name' in metafunc.fixturenames:
metafunc.parametrize('in_var_name', strictly_input_names(new_bmi()))
elif 'out_var_name' in metafunc.fixturenames:
metafunc.parametrize('out_var_name', out_names(new_bmi()))
|
Change new_bmi fixture scope to be function level.
|
Change new_bmi fixture scope to be function level.
|
Python
|
mit
|
csdms/bmi-tester
|
import os
import pytest
from scripting.contexts import cd
from . import Bmi, INPUT_FILE
from .utils import all_grids, all_names, out_names, strictly_input_names
@pytest.fixture(scope='module')
def new_bmi(infile=None):
try:
with open('.ROOT_DIR', 'r') as fp:
root_dir = fp.read()
except IOError:
root_dir = '.'
bmi = Bmi()
with cd(root_dir):
bmi.initialize(infile or INPUT_FILE)
return bmi
def pytest_runtest_setup(item):
print 'moving folders', item
# os.chdir('/Users/huttone/git/csdms/bmi-tester/_child_run')
def pytest_generate_tests(metafunc):
if 'gid' in metafunc.fixturenames:
metafunc.parametrize('gid', all_grids(new_bmi()))
elif 'var_name' in metafunc.fixturenames:
metafunc.parametrize('var_name', all_names(new_bmi()))
elif 'in_var_name' in metafunc.fixturenames:
metafunc.parametrize('in_var_name', strictly_input_names(new_bmi()))
elif 'out_var_name' in metafunc.fixturenames:
metafunc.parametrize('out_var_name', out_names(new_bmi()))
Change new_bmi fixture scope to be function level.
|
import os
import pytest
from scripting.contexts import cd
from . import Bmi, INPUT_FILE
from .utils import all_grids, all_names, out_names, strictly_input_names
@pytest.fixture
def new_bmi(infile=None):
try:
with open('.ROOT_DIR', 'r') as fp:
root_dir = fp.read()
except IOError:
root_dir = '.'
bmi = Bmi()
with cd(root_dir):
bmi.initialize(infile or INPUT_FILE)
return bmi
def pytest_runtest_setup(item):
print 'moving folders', item
# os.chdir('/Users/huttone/git/csdms/bmi-tester/_child_run')
def pytest_generate_tests(metafunc):
if 'gid' in metafunc.fixturenames:
metafunc.parametrize('gid', all_grids(new_bmi()))
elif 'var_name' in metafunc.fixturenames:
metafunc.parametrize('var_name', all_names(new_bmi()))
elif 'in_var_name' in metafunc.fixturenames:
metafunc.parametrize('in_var_name', strictly_input_names(new_bmi()))
elif 'out_var_name' in metafunc.fixturenames:
metafunc.parametrize('out_var_name', out_names(new_bmi()))
|
<commit_before>import os
import pytest
from scripting.contexts import cd
from . import Bmi, INPUT_FILE
from .utils import all_grids, all_names, out_names, strictly_input_names
@pytest.fixture(scope='module')
def new_bmi(infile=None):
try:
with open('.ROOT_DIR', 'r') as fp:
root_dir = fp.read()
except IOError:
root_dir = '.'
bmi = Bmi()
with cd(root_dir):
bmi.initialize(infile or INPUT_FILE)
return bmi
def pytest_runtest_setup(item):
print 'moving folders', item
# os.chdir('/Users/huttone/git/csdms/bmi-tester/_child_run')
def pytest_generate_tests(metafunc):
if 'gid' in metafunc.fixturenames:
metafunc.parametrize('gid', all_grids(new_bmi()))
elif 'var_name' in metafunc.fixturenames:
metafunc.parametrize('var_name', all_names(new_bmi()))
elif 'in_var_name' in metafunc.fixturenames:
metafunc.parametrize('in_var_name', strictly_input_names(new_bmi()))
elif 'out_var_name' in metafunc.fixturenames:
metafunc.parametrize('out_var_name', out_names(new_bmi()))
<commit_msg>Change new_bmi fixture scope to be function level.<commit_after>
|
import os
import pytest
from scripting.contexts import cd
from . import Bmi, INPUT_FILE
from .utils import all_grids, all_names, out_names, strictly_input_names
@pytest.fixture
def new_bmi(infile=None):
try:
with open('.ROOT_DIR', 'r') as fp:
root_dir = fp.read()
except IOError:
root_dir = '.'
bmi = Bmi()
with cd(root_dir):
bmi.initialize(infile or INPUT_FILE)
return bmi
def pytest_runtest_setup(item):
print 'moving folders', item
# os.chdir('/Users/huttone/git/csdms/bmi-tester/_child_run')
def pytest_generate_tests(metafunc):
if 'gid' in metafunc.fixturenames:
metafunc.parametrize('gid', all_grids(new_bmi()))
elif 'var_name' in metafunc.fixturenames:
metafunc.parametrize('var_name', all_names(new_bmi()))
elif 'in_var_name' in metafunc.fixturenames:
metafunc.parametrize('in_var_name', strictly_input_names(new_bmi()))
elif 'out_var_name' in metafunc.fixturenames:
metafunc.parametrize('out_var_name', out_names(new_bmi()))
|
import os
import pytest
from scripting.contexts import cd
from . import Bmi, INPUT_FILE
from .utils import all_grids, all_names, out_names, strictly_input_names
@pytest.fixture(scope='module')
def new_bmi(infile=None):
try:
with open('.ROOT_DIR', 'r') as fp:
root_dir = fp.read()
except IOError:
root_dir = '.'
bmi = Bmi()
with cd(root_dir):
bmi.initialize(infile or INPUT_FILE)
return bmi
def pytest_runtest_setup(item):
print 'moving folders', item
# os.chdir('/Users/huttone/git/csdms/bmi-tester/_child_run')
def pytest_generate_tests(metafunc):
if 'gid' in metafunc.fixturenames:
metafunc.parametrize('gid', all_grids(new_bmi()))
elif 'var_name' in metafunc.fixturenames:
metafunc.parametrize('var_name', all_names(new_bmi()))
elif 'in_var_name' in metafunc.fixturenames:
metafunc.parametrize('in_var_name', strictly_input_names(new_bmi()))
elif 'out_var_name' in metafunc.fixturenames:
metafunc.parametrize('out_var_name', out_names(new_bmi()))
Change new_bmi fixture scope to be function level.import os
import pytest
from scripting.contexts import cd
from . import Bmi, INPUT_FILE
from .utils import all_grids, all_names, out_names, strictly_input_names
@pytest.fixture
def new_bmi(infile=None):
try:
with open('.ROOT_DIR', 'r') as fp:
root_dir = fp.read()
except IOError:
root_dir = '.'
bmi = Bmi()
with cd(root_dir):
bmi.initialize(infile or INPUT_FILE)
return bmi
def pytest_runtest_setup(item):
print 'moving folders', item
# os.chdir('/Users/huttone/git/csdms/bmi-tester/_child_run')
def pytest_generate_tests(metafunc):
if 'gid' in metafunc.fixturenames:
metafunc.parametrize('gid', all_grids(new_bmi()))
elif 'var_name' in metafunc.fixturenames:
metafunc.parametrize('var_name', all_names(new_bmi()))
elif 'in_var_name' in metafunc.fixturenames:
metafunc.parametrize('in_var_name', strictly_input_names(new_bmi()))
elif 'out_var_name' in metafunc.fixturenames:
metafunc.parametrize('out_var_name', out_names(new_bmi()))
|
<commit_before>import os
import pytest
from scripting.contexts import cd
from . import Bmi, INPUT_FILE
from .utils import all_grids, all_names, out_names, strictly_input_names
@pytest.fixture(scope='module')
def new_bmi(infile=None):
try:
with open('.ROOT_DIR', 'r') as fp:
root_dir = fp.read()
except IOError:
root_dir = '.'
bmi = Bmi()
with cd(root_dir):
bmi.initialize(infile or INPUT_FILE)
return bmi
def pytest_runtest_setup(item):
print 'moving folders', item
# os.chdir('/Users/huttone/git/csdms/bmi-tester/_child_run')
def pytest_generate_tests(metafunc):
if 'gid' in metafunc.fixturenames:
metafunc.parametrize('gid', all_grids(new_bmi()))
elif 'var_name' in metafunc.fixturenames:
metafunc.parametrize('var_name', all_names(new_bmi()))
elif 'in_var_name' in metafunc.fixturenames:
metafunc.parametrize('in_var_name', strictly_input_names(new_bmi()))
elif 'out_var_name' in metafunc.fixturenames:
metafunc.parametrize('out_var_name', out_names(new_bmi()))
<commit_msg>Change new_bmi fixture scope to be function level.<commit_after>import os
import pytest
from scripting.contexts import cd
from . import Bmi, INPUT_FILE
from .utils import all_grids, all_names, out_names, strictly_input_names
@pytest.fixture
def new_bmi(infile=None):
try:
with open('.ROOT_DIR', 'r') as fp:
root_dir = fp.read()
except IOError:
root_dir = '.'
bmi = Bmi()
with cd(root_dir):
bmi.initialize(infile or INPUT_FILE)
return bmi
def pytest_runtest_setup(item):
print 'moving folders', item
# os.chdir('/Users/huttone/git/csdms/bmi-tester/_child_run')
def pytest_generate_tests(metafunc):
if 'gid' in metafunc.fixturenames:
metafunc.parametrize('gid', all_grids(new_bmi()))
elif 'var_name' in metafunc.fixturenames:
metafunc.parametrize('var_name', all_names(new_bmi()))
elif 'in_var_name' in metafunc.fixturenames:
metafunc.parametrize('in_var_name', strictly_input_names(new_bmi()))
elif 'out_var_name' in metafunc.fixturenames:
metafunc.parametrize('out_var_name', out_names(new_bmi()))
|
c0355c30b6c7fe18a240a52656639a27b86d8528
|
examples/web/basecontrollers.py
|
examples/web/basecontrollers.py
|
#!/usr/bin/env python
from circuits.web import expose, Server
from circuits.web.controllers import BaseController
class Root(BaseController):
@expose("index")
def index(self):
return "Hello World!"
(Server(9000) + Root()).run()
|
#!/usr/bin/env python
from circuits.web import expose, Server
from circuits.web.controllers import BaseController
class Root(BaseController):
@expose("index")
def index(self):
return "Hello World!"
(Server(8000) + Root()).run()
|
Change default port to 8000
|
examples/web/basecontroller: Change default port to 8000
|
Python
|
mit
|
eriol/circuits,treemo/circuits,treemo/circuits,eriol/circuits,eriol/circuits,treemo/circuits,nizox/circuits
|
#!/usr/bin/env python
from circuits.web import expose, Server
from circuits.web.controllers import BaseController
class Root(BaseController):
@expose("index")
def index(self):
return "Hello World!"
(Server(9000) + Root()).run()
examples/web/basecontroller: Change default port to 8000
|
#!/usr/bin/env python
from circuits.web import expose, Server
from circuits.web.controllers import BaseController
class Root(BaseController):
@expose("index")
def index(self):
return "Hello World!"
(Server(8000) + Root()).run()
|
<commit_before>#!/usr/bin/env python
from circuits.web import expose, Server
from circuits.web.controllers import BaseController
class Root(BaseController):
@expose("index")
def index(self):
return "Hello World!"
(Server(9000) + Root()).run()
<commit_msg>examples/web/basecontroller: Change default port to 8000<commit_after>
|
#!/usr/bin/env python
from circuits.web import expose, Server
from circuits.web.controllers import BaseController
class Root(BaseController):
@expose("index")
def index(self):
return "Hello World!"
(Server(8000) + Root()).run()
|
#!/usr/bin/env python
from circuits.web import expose, Server
from circuits.web.controllers import BaseController
class Root(BaseController):
@expose("index")
def index(self):
return "Hello World!"
(Server(9000) + Root()).run()
examples/web/basecontroller: Change default port to 8000#!/usr/bin/env python
from circuits.web import expose, Server
from circuits.web.controllers import BaseController
class Root(BaseController):
@expose("index")
def index(self):
return "Hello World!"
(Server(8000) + Root()).run()
|
<commit_before>#!/usr/bin/env python
from circuits.web import expose, Server
from circuits.web.controllers import BaseController
class Root(BaseController):
@expose("index")
def index(self):
return "Hello World!"
(Server(9000) + Root()).run()
<commit_msg>examples/web/basecontroller: Change default port to 8000<commit_after>#!/usr/bin/env python
from circuits.web import expose, Server
from circuits.web.controllers import BaseController
class Root(BaseController):
@expose("index")
def index(self):
return "Hello World!"
(Server(8000) + Root()).run()
|
96329106d5b35ec9071d7695a13176ccee8e8ef1
|
face_off/settings/production.py
|
face_off/settings/production.py
|
from .base import *
import dj_database_url
if os.environ.get('DEBUG') == 'False':
DEBUG = False
else:
DEBUG = True
try:
from .local import *
except ImportError:
pass
ADMINS = ADMINS + (
)
ALLOWED_HOSTS = ['*']
DATABASES = {'default': dj_database_url.config()}
SOCIAL_AUTH_YAMMER_KEY = os.environ.get('SOCIAL_AUTH_YAMMER_KEY')
SOCIAL_AUTH_YAMMER_SECRET = os.environ.get('SOCIAL_AUTH_YAMMER_SECRET')
AWS_STORAGE_BUCKET_NAME = os.environ['AWS_STORAGE_BUCKET_NAME']
STATICFILES_STORAGE = 'core.storage.S3PipelineManifestStorage'
STATIC_URL = 'http://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
AWS_QUERYSTRING_AUTH = False
AWS_S3_FILE_OVERWRITE = True
PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_YUGLIFY_BINARY = '/app/.heroku/python/bin/yuglify'
|
from .base import *
import dj_database_url
if os.environ.get('DEBUG') == 'False':
DEBUG = False
else:
DEBUG = True
try:
from .local import *
except ImportError:
pass
ADMINS = ADMINS + (
)
ALLOWED_HOSTS = ['*']
DATABASES = {'default': dj_database_url.config()}
SOCIAL_AUTH_YAMMER_KEY = os.environ.get('SOCIAL_AUTH_YAMMER_KEY')
SOCIAL_AUTH_YAMMER_SECRET = os.environ.get('SOCIAL_AUTH_YAMMER_SECRET')
SOCIAL_AUTH_REDIRECT_IS_HTTPS = os.environ.get('SOCIAL_AUTH_REDIRECT_IS_HTTPS', False)
AWS_STORAGE_BUCKET_NAME = os.environ['AWS_STORAGE_BUCKET_NAME']
STATICFILES_STORAGE = 'core.storage.S3PipelineManifestStorage'
STATIC_URL = 'http://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
AWS_QUERYSTRING_AUTH = False
AWS_S3_FILE_OVERWRITE = True
PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_YUGLIFY_BINARY = '/app/.heroku/python/bin/yuglify'
|
Add support for https redirect
|
Add support for https redirect
|
Python
|
cc0-1.0
|
excellalabs/face-off,m3brown/face_it,m3brown/face_it,excellalabs/face-off,m3brown/face_it,excellalabs/face-off,excellalabs/face-off,m3brown/face_it
|
from .base import *
import dj_database_url
if os.environ.get('DEBUG') == 'False':
DEBUG = False
else:
DEBUG = True
try:
from .local import *
except ImportError:
pass
ADMINS = ADMINS + (
)
ALLOWED_HOSTS = ['*']
DATABASES = {'default': dj_database_url.config()}
SOCIAL_AUTH_YAMMER_KEY = os.environ.get('SOCIAL_AUTH_YAMMER_KEY')
SOCIAL_AUTH_YAMMER_SECRET = os.environ.get('SOCIAL_AUTH_YAMMER_SECRET')
AWS_STORAGE_BUCKET_NAME = os.environ['AWS_STORAGE_BUCKET_NAME']
STATICFILES_STORAGE = 'core.storage.S3PipelineManifestStorage'
STATIC_URL = 'http://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
AWS_QUERYSTRING_AUTH = False
AWS_S3_FILE_OVERWRITE = True
PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_YUGLIFY_BINARY = '/app/.heroku/python/bin/yuglify'
Add support for https redirect
|
from .base import *
import dj_database_url
if os.environ.get('DEBUG') == 'False':
DEBUG = False
else:
DEBUG = True
try:
from .local import *
except ImportError:
pass
ADMINS = ADMINS + (
)
ALLOWED_HOSTS = ['*']
DATABASES = {'default': dj_database_url.config()}
SOCIAL_AUTH_YAMMER_KEY = os.environ.get('SOCIAL_AUTH_YAMMER_KEY')
SOCIAL_AUTH_YAMMER_SECRET = os.environ.get('SOCIAL_AUTH_YAMMER_SECRET')
SOCIAL_AUTH_REDIRECT_IS_HTTPS = os.environ.get('SOCIAL_AUTH_REDIRECT_IS_HTTPS', False)
AWS_STORAGE_BUCKET_NAME = os.environ['AWS_STORAGE_BUCKET_NAME']
STATICFILES_STORAGE = 'core.storage.S3PipelineManifestStorage'
STATIC_URL = 'http://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
AWS_QUERYSTRING_AUTH = False
AWS_S3_FILE_OVERWRITE = True
PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_YUGLIFY_BINARY = '/app/.heroku/python/bin/yuglify'
|
<commit_before>from .base import *
import dj_database_url
if os.environ.get('DEBUG') == 'False':
DEBUG = False
else:
DEBUG = True
try:
from .local import *
except ImportError:
pass
ADMINS = ADMINS + (
)
ALLOWED_HOSTS = ['*']
DATABASES = {'default': dj_database_url.config()}
SOCIAL_AUTH_YAMMER_KEY = os.environ.get('SOCIAL_AUTH_YAMMER_KEY')
SOCIAL_AUTH_YAMMER_SECRET = os.environ.get('SOCIAL_AUTH_YAMMER_SECRET')
AWS_STORAGE_BUCKET_NAME = os.environ['AWS_STORAGE_BUCKET_NAME']
STATICFILES_STORAGE = 'core.storage.S3PipelineManifestStorage'
STATIC_URL = 'http://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
AWS_QUERYSTRING_AUTH = False
AWS_S3_FILE_OVERWRITE = True
PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_YUGLIFY_BINARY = '/app/.heroku/python/bin/yuglify'
<commit_msg>Add support for https redirect<commit_after>
|
from .base import *
import dj_database_url
if os.environ.get('DEBUG') == 'False':
DEBUG = False
else:
DEBUG = True
try:
from .local import *
except ImportError:
pass
ADMINS = ADMINS + (
)
ALLOWED_HOSTS = ['*']
DATABASES = {'default': dj_database_url.config()}
SOCIAL_AUTH_YAMMER_KEY = os.environ.get('SOCIAL_AUTH_YAMMER_KEY')
SOCIAL_AUTH_YAMMER_SECRET = os.environ.get('SOCIAL_AUTH_YAMMER_SECRET')
SOCIAL_AUTH_REDIRECT_IS_HTTPS = os.environ.get('SOCIAL_AUTH_REDIRECT_IS_HTTPS', False)
AWS_STORAGE_BUCKET_NAME = os.environ['AWS_STORAGE_BUCKET_NAME']
STATICFILES_STORAGE = 'core.storage.S3PipelineManifestStorage'
STATIC_URL = 'http://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
AWS_QUERYSTRING_AUTH = False
AWS_S3_FILE_OVERWRITE = True
PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_YUGLIFY_BINARY = '/app/.heroku/python/bin/yuglify'
|
from .base import *
import dj_database_url
if os.environ.get('DEBUG') == 'False':
DEBUG = False
else:
DEBUG = True
try:
from .local import *
except ImportError:
pass
ADMINS = ADMINS + (
)
ALLOWED_HOSTS = ['*']
DATABASES = {'default': dj_database_url.config()}
SOCIAL_AUTH_YAMMER_KEY = os.environ.get('SOCIAL_AUTH_YAMMER_KEY')
SOCIAL_AUTH_YAMMER_SECRET = os.environ.get('SOCIAL_AUTH_YAMMER_SECRET')
AWS_STORAGE_BUCKET_NAME = os.environ['AWS_STORAGE_BUCKET_NAME']
STATICFILES_STORAGE = 'core.storage.S3PipelineManifestStorage'
STATIC_URL = 'http://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
AWS_QUERYSTRING_AUTH = False
AWS_S3_FILE_OVERWRITE = True
PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_YUGLIFY_BINARY = '/app/.heroku/python/bin/yuglify'
Add support for https redirectfrom .base import *
import dj_database_url
if os.environ.get('DEBUG') == 'False':
DEBUG = False
else:
DEBUG = True
try:
from .local import *
except ImportError:
pass
ADMINS = ADMINS + (
)
ALLOWED_HOSTS = ['*']
DATABASES = {'default': dj_database_url.config()}
SOCIAL_AUTH_YAMMER_KEY = os.environ.get('SOCIAL_AUTH_YAMMER_KEY')
SOCIAL_AUTH_YAMMER_SECRET = os.environ.get('SOCIAL_AUTH_YAMMER_SECRET')
SOCIAL_AUTH_REDIRECT_IS_HTTPS = os.environ.get('SOCIAL_AUTH_REDIRECT_IS_HTTPS', False)
AWS_STORAGE_BUCKET_NAME = os.environ['AWS_STORAGE_BUCKET_NAME']
STATICFILES_STORAGE = 'core.storage.S3PipelineManifestStorage'
STATIC_URL = 'http://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
AWS_QUERYSTRING_AUTH = False
AWS_S3_FILE_OVERWRITE = True
PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_YUGLIFY_BINARY = '/app/.heroku/python/bin/yuglify'
|
<commit_before>from .base import *
import dj_database_url
if os.environ.get('DEBUG') == 'False':
DEBUG = False
else:
DEBUG = True
try:
from .local import *
except ImportError:
pass
ADMINS = ADMINS + (
)
ALLOWED_HOSTS = ['*']
DATABASES = {'default': dj_database_url.config()}
SOCIAL_AUTH_YAMMER_KEY = os.environ.get('SOCIAL_AUTH_YAMMER_KEY')
SOCIAL_AUTH_YAMMER_SECRET = os.environ.get('SOCIAL_AUTH_YAMMER_SECRET')
AWS_STORAGE_BUCKET_NAME = os.environ['AWS_STORAGE_BUCKET_NAME']
STATICFILES_STORAGE = 'core.storage.S3PipelineManifestStorage'
STATIC_URL = 'http://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
AWS_QUERYSTRING_AUTH = False
AWS_S3_FILE_OVERWRITE = True
PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_YUGLIFY_BINARY = '/app/.heroku/python/bin/yuglify'
<commit_msg>Add support for https redirect<commit_after>from .base import *
import dj_database_url
if os.environ.get('DEBUG') == 'False':
DEBUG = False
else:
DEBUG = True
try:
from .local import *
except ImportError:
pass
ADMINS = ADMINS + (
)
ALLOWED_HOSTS = ['*']
DATABASES = {'default': dj_database_url.config()}
SOCIAL_AUTH_YAMMER_KEY = os.environ.get('SOCIAL_AUTH_YAMMER_KEY')
SOCIAL_AUTH_YAMMER_SECRET = os.environ.get('SOCIAL_AUTH_YAMMER_SECRET')
SOCIAL_AUTH_REDIRECT_IS_HTTPS = os.environ.get('SOCIAL_AUTH_REDIRECT_IS_HTTPS', False)
AWS_STORAGE_BUCKET_NAME = os.environ['AWS_STORAGE_BUCKET_NAME']
STATICFILES_STORAGE = 'core.storage.S3PipelineManifestStorage'
STATIC_URL = 'http://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
AWS_QUERYSTRING_AUTH = False
AWS_S3_FILE_OVERWRITE = True
PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_YUGLIFY_BINARY = '/app/.heroku/python/bin/yuglify'
|
a6cb8d3c2d79b609a6d5d0550af57aa2b9328f7f
|
mopidy_vkontakte/actor.py
|
mopidy_vkontakte/actor.py
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = VKPlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
class VKPlaybackProvider(base.BasePlaybackProvider):
def play(self, track):
return super(VKPlaybackProvider, self).play(track)
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = base.BasePlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
|
Remove PlaybackProvider that does nothing
|
Remove PlaybackProvider that does nothing
|
Python
|
apache-2.0
|
sibuser/mopidy-vkontakte
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = VKPlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
class VKPlaybackProvider(base.BasePlaybackProvider):
def play(self, track):
return super(VKPlaybackProvider, self).play(track)
Remove PlaybackProvider that does nothing
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = base.BasePlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
|
<commit_before>from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = VKPlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
class VKPlaybackProvider(base.BasePlaybackProvider):
def play(self, track):
return super(VKPlaybackProvider, self).play(track)
<commit_msg>Remove PlaybackProvider that does nothing<commit_after>
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = base.BasePlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
|
from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = VKPlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
class VKPlaybackProvider(base.BasePlaybackProvider):
def play(self, track):
return super(VKPlaybackProvider, self).play(track)
Remove PlaybackProvider that does nothingfrom __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = base.BasePlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
|
<commit_before>from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = VKPlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
class VKPlaybackProvider(base.BasePlaybackProvider):
def play(self, track):
return super(VKPlaybackProvider, self).play(track)
<commit_msg>Remove PlaybackProvider that does nothing<commit_after>from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = base.BasePlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
|
fc762ed1183e5a6a97e0ed6d823227bf486c951e
|
ovp_organizations/serializers.py
|
ovp_organizations/serializers.py
|
from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
|
from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate],
required=False,
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', None)
if address_data:
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
|
Make address not required on OrganizationSerializer
|
Make address not required on OrganizationSerializer
|
Python
|
agpl-3.0
|
OpenVolunteeringPlatform/django-ovp-organizations,OpenVolunteeringPlatform/django-ovp-organizations
|
from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
Make address not required on OrganizationSerializer
|
from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate],
required=False,
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', None)
if address_data:
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
|
<commit_before>from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
<commit_msg>Make address not required on OrganizationSerializer<commit_after>
|
from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate],
required=False,
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', None)
if address_data:
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
|
from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
Make address not required on OrganizationSerializerfrom django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate],
required=False,
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', None)
if address_data:
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
|
<commit_before>from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
<commit_msg>Make address not required on OrganizationSerializer<commit_after>from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate],
required=False,
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', None)
if address_data:
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
|
5d5098db8e5a3b60cbba77aa04035bc35e3f1726
|
db_logger.py
|
db_logger.py
|
import threading
import time
import accounts
import args
import config
MAX_TEXT_LENGTH = 1024
enabled = bool(args.args['database'])
if enabled:
import MySQLdb
connected = False
conn = None
cur = None
db_lock = threading.RLock()
def _connect():
global conn, cur, connected
if not connected:
conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'),
database=config.get('db_logger.database'), charset='utf8mb4')
cur = conn.cursor()
connected = True
def log(message, kind, text_msg=None):
global connected, enabled
if enabled:
if not config.get('db_logger.host') or not config.get('db_logger.database'):
print('Incorrect database configuration!')
enabled = False
return
with db_lock:
try:
_connect()
if text_msg is None:
text_msg = message
text_msg = text_msg[:MAX_TEXT_LENGTH]
cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account))
conn.commit()
except MySQLdb.Error as e:
print(e, flush=True)
time.sleep(5)
connected = False
log(message, kind, text_msg)
|
import threading
import time
import accounts
import args
import config
import log as _log
MAX_TEXT_LENGTH = 1024
enabled = bool(args.args['database'])
if enabled:
import MySQLdb
connected = False
conn = None
cur = None
db_lock = threading.RLock()
def _connect():
global conn, cur, connected
if not connected:
conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'),
database=config.get('db_logger.database'), charset='utf8mb4')
cur = conn.cursor()
connected = True
def log(message, kind, text_msg=None):
global connected, enabled
if enabled:
if not config.get('db_logger.host') or not config.get('db_logger.database'):
print('Incorrect database configuration!')
enabled = False
return
with db_lock:
try:
_connect()
if text_msg is None:
text_msg = message
text_msg = text_msg[:MAX_TEXT_LENGTH]
cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account))
conn.commit()
except MySQLdb.Error as e:
print(e, flush=True)
_log.write('error', 'MySQL error: ' + str(e))
time.sleep(5)
connected = False
log(message, kind, text_msg)
|
Write db errors to error.log
|
Write db errors to error.log
|
Python
|
mit
|
kalinochkind/vkbot,kalinochkind/vkbot,kalinochkind/vkbot
|
import threading
import time
import accounts
import args
import config
MAX_TEXT_LENGTH = 1024
enabled = bool(args.args['database'])
if enabled:
import MySQLdb
connected = False
conn = None
cur = None
db_lock = threading.RLock()
def _connect():
global conn, cur, connected
if not connected:
conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'),
database=config.get('db_logger.database'), charset='utf8mb4')
cur = conn.cursor()
connected = True
def log(message, kind, text_msg=None):
global connected, enabled
if enabled:
if not config.get('db_logger.host') or not config.get('db_logger.database'):
print('Incorrect database configuration!')
enabled = False
return
with db_lock:
try:
_connect()
if text_msg is None:
text_msg = message
text_msg = text_msg[:MAX_TEXT_LENGTH]
cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account))
conn.commit()
except MySQLdb.Error as e:
print(e, flush=True)
time.sleep(5)
connected = False
log(message, kind, text_msg)
Write db errors to error.log
|
import threading
import time
import accounts
import args
import config
import log as _log
MAX_TEXT_LENGTH = 1024
enabled = bool(args.args['database'])
if enabled:
import MySQLdb
connected = False
conn = None
cur = None
db_lock = threading.RLock()
def _connect():
global conn, cur, connected
if not connected:
conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'),
database=config.get('db_logger.database'), charset='utf8mb4')
cur = conn.cursor()
connected = True
def log(message, kind, text_msg=None):
global connected, enabled
if enabled:
if not config.get('db_logger.host') or not config.get('db_logger.database'):
print('Incorrect database configuration!')
enabled = False
return
with db_lock:
try:
_connect()
if text_msg is None:
text_msg = message
text_msg = text_msg[:MAX_TEXT_LENGTH]
cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account))
conn.commit()
except MySQLdb.Error as e:
print(e, flush=True)
_log.write('error', 'MySQL error: ' + str(e))
time.sleep(5)
connected = False
log(message, kind, text_msg)
|
<commit_before>import threading
import time
import accounts
import args
import config
MAX_TEXT_LENGTH = 1024
enabled = bool(args.args['database'])
if enabled:
import MySQLdb
connected = False
conn = None
cur = None
db_lock = threading.RLock()
def _connect():
global conn, cur, connected
if not connected:
conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'),
database=config.get('db_logger.database'), charset='utf8mb4')
cur = conn.cursor()
connected = True
def log(message, kind, text_msg=None):
global connected, enabled
if enabled:
if not config.get('db_logger.host') or not config.get('db_logger.database'):
print('Incorrect database configuration!')
enabled = False
return
with db_lock:
try:
_connect()
if text_msg is None:
text_msg = message
text_msg = text_msg[:MAX_TEXT_LENGTH]
cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account))
conn.commit()
except MySQLdb.Error as e:
print(e, flush=True)
time.sleep(5)
connected = False
log(message, kind, text_msg)
<commit_msg>Write db errors to error.log<commit_after>
|
import threading
import time
import accounts
import args
import config
import log as _log
MAX_TEXT_LENGTH = 1024
enabled = bool(args.args['database'])
if enabled:
import MySQLdb
connected = False
conn = None
cur = None
db_lock = threading.RLock()
def _connect():
global conn, cur, connected
if not connected:
conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'),
database=config.get('db_logger.database'), charset='utf8mb4')
cur = conn.cursor()
connected = True
def log(message, kind, text_msg=None):
global connected, enabled
if enabled:
if not config.get('db_logger.host') or not config.get('db_logger.database'):
print('Incorrect database configuration!')
enabled = False
return
with db_lock:
try:
_connect()
if text_msg is None:
text_msg = message
text_msg = text_msg[:MAX_TEXT_LENGTH]
cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account))
conn.commit()
except MySQLdb.Error as e:
print(e, flush=True)
_log.write('error', 'MySQL error: ' + str(e))
time.sleep(5)
connected = False
log(message, kind, text_msg)
|
import threading
import time
import accounts
import args
import config
MAX_TEXT_LENGTH = 1024
enabled = bool(args.args['database'])
if enabled:
import MySQLdb
connected = False
conn = None
cur = None
db_lock = threading.RLock()
def _connect():
global conn, cur, connected
if not connected:
conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'),
database=config.get('db_logger.database'), charset='utf8mb4')
cur = conn.cursor()
connected = True
def log(message, kind, text_msg=None):
global connected, enabled
if enabled:
if not config.get('db_logger.host') or not config.get('db_logger.database'):
print('Incorrect database configuration!')
enabled = False
return
with db_lock:
try:
_connect()
if text_msg is None:
text_msg = message
text_msg = text_msg[:MAX_TEXT_LENGTH]
cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account))
conn.commit()
except MySQLdb.Error as e:
print(e, flush=True)
time.sleep(5)
connected = False
log(message, kind, text_msg)
Write db errors to error.logimport threading
import time
import accounts
import args
import config
import log as _log
MAX_TEXT_LENGTH = 1024
enabled = bool(args.args['database'])
if enabled:
import MySQLdb
connected = False
conn = None
cur = None
db_lock = threading.RLock()
def _connect():
global conn, cur, connected
if not connected:
conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'),
database=config.get('db_logger.database'), charset='utf8mb4')
cur = conn.cursor()
connected = True
def log(message, kind, text_msg=None):
global connected, enabled
if enabled:
if not config.get('db_logger.host') or not config.get('db_logger.database'):
print('Incorrect database configuration!')
enabled = False
return
with db_lock:
try:
_connect()
if text_msg is None:
text_msg = message
text_msg = text_msg[:MAX_TEXT_LENGTH]
cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account))
conn.commit()
except MySQLdb.Error as e:
print(e, flush=True)
_log.write('error', 'MySQL error: ' + str(e))
time.sleep(5)
connected = False
log(message, kind, text_msg)
|
<commit_before>import threading
import time
import accounts
import args
import config
MAX_TEXT_LENGTH = 1024
enabled = bool(args.args['database'])
if enabled:
import MySQLdb
connected = False
conn = None
cur = None
db_lock = threading.RLock()
def _connect():
global conn, cur, connected
if not connected:
conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'),
database=config.get('db_logger.database'), charset='utf8mb4')
cur = conn.cursor()
connected = True
def log(message, kind, text_msg=None):
global connected, enabled
if enabled:
if not config.get('db_logger.host') or not config.get('db_logger.database'):
print('Incorrect database configuration!')
enabled = False
return
with db_lock:
try:
_connect()
if text_msg is None:
text_msg = message
text_msg = text_msg[:MAX_TEXT_LENGTH]
cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account))
conn.commit()
except MySQLdb.Error as e:
print(e, flush=True)
time.sleep(5)
connected = False
log(message, kind, text_msg)
<commit_msg>Write db errors to error.log<commit_after>import threading
import time
import accounts
import args
import config
import log as _log
MAX_TEXT_LENGTH = 1024
enabled = bool(args.args['database'])
if enabled:
import MySQLdb
connected = False
conn = None
cur = None
db_lock = threading.RLock()
def _connect():
global conn, cur, connected
if not connected:
conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'),
database=config.get('db_logger.database'), charset='utf8mb4')
cur = conn.cursor()
connected = True
def log(message, kind, text_msg=None):
global connected, enabled
if enabled:
if not config.get('db_logger.host') or not config.get('db_logger.database'):
print('Incorrect database configuration!')
enabled = False
return
with db_lock:
try:
_connect()
if text_msg is None:
text_msg = message
text_msg = text_msg[:MAX_TEXT_LENGTH]
cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account))
conn.commit()
except MySQLdb.Error as e:
print(e, flush=True)
_log.write('error', 'MySQL error: ' + str(e))
time.sleep(5)
connected = False
log(message, kind, text_msg)
|
8f162be2d682ca00a5301f0ebecfbbd6038e657a
|
manage.py
|
manage.py
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "schoolidolapi.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "schoolidolapi.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Fix shebang and coding comment lines order
|
Fix shebang and coding comment lines order
|
Python
|
apache-2.0
|
laurenor/SchoolIdolAPI,dburr/SchoolIdolAPI,dburr/SchoolIdolAPI,rdsathene/SchoolIdolAPI,rdsathene/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI,laurenor/SchoolIdolAPI,dburr/SchoolIdolAPI,laurenor/SchoolIdolAPI,rdsathene/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "schoolidolapi.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Fix shebang and coding comment lines order
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "schoolidolapi.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before># -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "schoolidolapi.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Fix shebang and coding comment lines order<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "schoolidolapi.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "schoolidolapi.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Fix shebang and coding comment lines order#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "schoolidolapi.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before># -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "schoolidolapi.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Fix shebang and coding comment lines order<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "schoolidolapi.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
a817afa1580aeb59fcbe837893c9ec8c5e7e0667
|
anygit/clisetup.py
|
anygit/clisetup.py
|
import logging.config
import os
from paste.deploy import loadapp
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
logging.config.fileConfig(conf)
|
import logging.config
import os
from paste.deploy import loadapp
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
logging.config.fileConfig(conf)
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
|
Load the logging config right away so it actually works
|
Load the logging config right away so it actually works
|
Python
|
mit
|
ebroder/anygit,ebroder/anygit
|
import logging.config
import os
from paste.deploy import loadapp
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
logging.config.fileConfig(conf)
Load the logging config right away so it actually works
|
import logging.config
import os
from paste.deploy import loadapp
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
logging.config.fileConfig(conf)
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
|
<commit_before>import logging.config
import os
from paste.deploy import loadapp
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
logging.config.fileConfig(conf)
<commit_msg>Load the logging config right away so it actually works<commit_after>
|
import logging.config
import os
from paste.deploy import loadapp
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
logging.config.fileConfig(conf)
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
|
import logging.config
import os
from paste.deploy import loadapp
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
logging.config.fileConfig(conf)
Load the logging config right away so it actually worksimport logging.config
import os
from paste.deploy import loadapp
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
logging.config.fileConfig(conf)
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
|
<commit_before>import logging.config
import os
from paste.deploy import loadapp
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
logging.config.fileConfig(conf)
<commit_msg>Load the logging config right away so it actually works<commit_after>import logging.config
import os
from paste.deploy import loadapp
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
conf = os.path.join(DIR, '../conf/anygit.ini')
logging.config.fileConfig(conf)
application = loadapp('config:%s' % conf, relative_to='/')
app = loadapp('config:%s' % conf,relative_to=os.getcwd())
|
8b33cfa3e7fc39446f634d6ab45585e589a3cc85
|
marrow/mongo/core/document.py
|
marrow/mongo/core/document.py
|
# encoding: utf-8
from collections import OrderedDict as odict, MutableMapping
from itertools import chain
from marrow.schema import Container, Attribute
class Document(Container):
__foreign__ = 'object'
|
# encoding: utf-8
from collections import OrderedDict as odict, MutableMapping
from itertools import chain
from marrow.schema import Container, Attribute
from .util import py2, SENTINEL, adjust_attribute_sequence
class Document(Container):
__foreign__ = 'object'
# Mapping Protocol
def __getitem__(self, name):
return self.__data__[name]
def __setitem__(self, name, value):
self.__data__[name] = value
def __delitem__(self, name):
del self.__data__[name]
def __iter__(self):
return iter(self.__data__.keys())
def __len__(self):
return len(self.__data__)
if py2:
def keys(self):
return self.__data__.iterkeys()
def items(self):
return self.__data__.iteritems()
def values(self):
return self.__data__.itervalues()
else:
def keys(self):
return self.__data__.keys()
def items(self):
return self.__data__.items()
def values(self):
return self.__data__.values()
def __contains__(self, key):
return key in self.__data__
def __eq__(self, other):
return self.__data__ == other
def __ne__(self, other):
return self.__data__ != other
def get(self, key, default=None):
return self.__data__.get(key, default)
def clear(self):
self.__data__.clear()
def pop(self, name, default=SENTINEL):
if default is SENTINEL:
return self.__data__.pop(name)
return self.__data__.pop(name, default)
def popitem(self):
return self.__data__.popitem()
def update(self, *args, **kw):
self.__data__.update(*args, **kw)
def setdefault(self, key, value=None):
return self.__data__.setdefault(key, value)
MutableMapping.register(Document) # Metaclass conflict if we subclass.
|
Make compatible with direct use by pymongo.
|
Make compatible with direct use by pymongo.
I.e. for direct passing to collection.insert()
|
Python
|
mit
|
marrow/mongo,djdduty/mongo,djdduty/mongo
|
# encoding: utf-8
from collections import OrderedDict as odict, MutableMapping
from itertools import chain
from marrow.schema import Container, Attribute
class Document(Container):
__foreign__ = 'object'
Make compatible with direct use by pymongo.
I.e. for direct passing to collection.insert()
|
# encoding: utf-8
from collections import OrderedDict as odict, MutableMapping
from itertools import chain
from marrow.schema import Container, Attribute
from .util import py2, SENTINEL, adjust_attribute_sequence
class Document(Container):
__foreign__ = 'object'
# Mapping Protocol
def __getitem__(self, name):
return self.__data__[name]
def __setitem__(self, name, value):
self.__data__[name] = value
def __delitem__(self, name):
del self.__data__[name]
def __iter__(self):
return iter(self.__data__.keys())
def __len__(self):
return len(self.__data__)
if py2:
def keys(self):
return self.__data__.iterkeys()
def items(self):
return self.__data__.iteritems()
def values(self):
return self.__data__.itervalues()
else:
def keys(self):
return self.__data__.keys()
def items(self):
return self.__data__.items()
def values(self):
return self.__data__.values()
def __contains__(self, key):
return key in self.__data__
def __eq__(self, other):
return self.__data__ == other
def __ne__(self, other):
return self.__data__ != other
def get(self, key, default=None):
return self.__data__.get(key, default)
def clear(self):
self.__data__.clear()
def pop(self, name, default=SENTINEL):
if default is SENTINEL:
return self.__data__.pop(name)
return self.__data__.pop(name, default)
def popitem(self):
return self.__data__.popitem()
def update(self, *args, **kw):
self.__data__.update(*args, **kw)
def setdefault(self, key, value=None):
return self.__data__.setdefault(key, value)
MutableMapping.register(Document) # Metaclass conflict if we subclass.
|
<commit_before># encoding: utf-8
from collections import OrderedDict as odict, MutableMapping
from itertools import chain
from marrow.schema import Container, Attribute
class Document(Container):
__foreign__ = 'object'
<commit_msg>Make compatible with direct use by pymongo.
I.e. for direct passing to collection.insert()<commit_after>
|
# encoding: utf-8
from collections import OrderedDict as odict, MutableMapping
from itertools import chain
from marrow.schema import Container, Attribute
from .util import py2, SENTINEL, adjust_attribute_sequence
class Document(Container):
__foreign__ = 'object'
# Mapping Protocol
def __getitem__(self, name):
return self.__data__[name]
def __setitem__(self, name, value):
self.__data__[name] = value
def __delitem__(self, name):
del self.__data__[name]
def __iter__(self):
return iter(self.__data__.keys())
def __len__(self):
return len(self.__data__)
if py2:
def keys(self):
return self.__data__.iterkeys()
def items(self):
return self.__data__.iteritems()
def values(self):
return self.__data__.itervalues()
else:
def keys(self):
return self.__data__.keys()
def items(self):
return self.__data__.items()
def values(self):
return self.__data__.values()
def __contains__(self, key):
return key in self.__data__
def __eq__(self, other):
return self.__data__ == other
def __ne__(self, other):
return self.__data__ != other
def get(self, key, default=None):
return self.__data__.get(key, default)
def clear(self):
self.__data__.clear()
def pop(self, name, default=SENTINEL):
if default is SENTINEL:
return self.__data__.pop(name)
return self.__data__.pop(name, default)
def popitem(self):
return self.__data__.popitem()
def update(self, *args, **kw):
self.__data__.update(*args, **kw)
def setdefault(self, key, value=None):
return self.__data__.setdefault(key, value)
MutableMapping.register(Document) # Metaclass conflict if we subclass.
|
# encoding: utf-8
from collections import OrderedDict as odict, MutableMapping
from itertools import chain
from marrow.schema import Container, Attribute
class Document(Container):
__foreign__ = 'object'
Make compatible with direct use by pymongo.
I.e. for direct passing to collection.insert()# encoding: utf-8
from collections import OrderedDict as odict, MutableMapping
from itertools import chain
from marrow.schema import Container, Attribute
from .util import py2, SENTINEL, adjust_attribute_sequence
class Document(Container):
__foreign__ = 'object'
# Mapping Protocol
def __getitem__(self, name):
return self.__data__[name]
def __setitem__(self, name, value):
self.__data__[name] = value
def __delitem__(self, name):
del self.__data__[name]
def __iter__(self):
return iter(self.__data__.keys())
def __len__(self):
return len(self.__data__)
if py2:
def keys(self):
return self.__data__.iterkeys()
def items(self):
return self.__data__.iteritems()
def values(self):
return self.__data__.itervalues()
else:
def keys(self):
return self.__data__.keys()
def items(self):
return self.__data__.items()
def values(self):
return self.__data__.values()
def __contains__(self, key):
return key in self.__data__
def __eq__(self, other):
return self.__data__ == other
def __ne__(self, other):
return self.__data__ != other
def get(self, key, default=None):
return self.__data__.get(key, default)
def clear(self):
self.__data__.clear()
def pop(self, name, default=SENTINEL):
if default is SENTINEL:
return self.__data__.pop(name)
return self.__data__.pop(name, default)
def popitem(self):
return self.__data__.popitem()
def update(self, *args, **kw):
self.__data__.update(*args, **kw)
def setdefault(self, key, value=None):
return self.__data__.setdefault(key, value)
MutableMapping.register(Document) # Metaclass conflict if we subclass.
|
<commit_before># encoding: utf-8
from collections import OrderedDict as odict, MutableMapping
from itertools import chain
from marrow.schema import Container, Attribute
class Document(Container):
__foreign__ = 'object'
<commit_msg>Make compatible with direct use by pymongo.
I.e. for direct passing to collection.insert()<commit_after># encoding: utf-8
from collections import OrderedDict as odict, MutableMapping
from itertools import chain
from marrow.schema import Container, Attribute
from .util import py2, SENTINEL, adjust_attribute_sequence
class Document(Container):
__foreign__ = 'object'
# Mapping Protocol
def __getitem__(self, name):
return self.__data__[name]
def __setitem__(self, name, value):
self.__data__[name] = value
def __delitem__(self, name):
del self.__data__[name]
def __iter__(self):
return iter(self.__data__.keys())
def __len__(self):
return len(self.__data__)
if py2:
def keys(self):
return self.__data__.iterkeys()
def items(self):
return self.__data__.iteritems()
def values(self):
return self.__data__.itervalues()
else:
def keys(self):
return self.__data__.keys()
def items(self):
return self.__data__.items()
def values(self):
return self.__data__.values()
def __contains__(self, key):
return key in self.__data__
def __eq__(self, other):
return self.__data__ == other
def __ne__(self, other):
return self.__data__ != other
def get(self, key, default=None):
return self.__data__.get(key, default)
def clear(self):
self.__data__.clear()
def pop(self, name, default=SENTINEL):
if default is SENTINEL:
return self.__data__.pop(name)
return self.__data__.pop(name, default)
def popitem(self):
return self.__data__.popitem()
def update(self, *args, **kw):
self.__data__.update(*args, **kw)
def setdefault(self, key, value=None):
return self.__data__.setdefault(key, value)
MutableMapping.register(Document) # Metaclass conflict if we subclass.
|
0166d699096aa506e37b6a2df8e51f94895c0b4f
|
fireplace/cards/wog/neutral_rare.py
|
fireplace/cards/wog/neutral_rare.py
|
from ..utils import *
##
# Minions
class OG_034:
"Silithid Swarmer"
update = (NUM_ATTACKS_THIS_TURN(FRIENDLY_HERO) == 0) & (
Refresh(SELF, {GameTag.CANT_ATTACK: True})
)
class OG_254:
"Eater of Secrets"
play = (
Buff(SELF, "OG_254e") * Count(ENEMY_SECRETS),
Destroy(ENEMY_SECRETS)
)
OG_254e = buff(+1, +1)
class OG_322:
"Blackwater Pirate"
update = Refresh(FRIENDLY_HAND + WEAPON, {GameTag.COST: -2})
|
from ..utils import *
##
# Minions
class OG_034:
"Silithid Swarmer"
update = (NUM_ATTACKS_THIS_TURN(FRIENDLY_HERO) == 0) & (
Refresh(SELF, {GameTag.CANT_ATTACK: True})
)
class OG_147:
"Corrupted Healbot"
deathrattle = Heal(ENEMY_HERO, 8)
class OG_161:
"Corrupted Seer"
play = Hit(ALL_MINIONS - MURLOC, 2)
class OG_254:
"Eater of Secrets"
play = (
Buff(SELF, "OG_254e") * Count(ENEMY_SECRETS),
Destroy(ENEMY_SECRETS)
)
OG_254e = buff(+1, +1)
class OG_320:
"Midnight Drake"
play = Buff(SELF, "OG_320e") * Count(FRIENDLY_HAND)
OG_320e = buff(atk=1)
class OG_322:
"Blackwater Pirate"
update = Refresh(FRIENDLY_HAND + WEAPON, {GameTag.COST: -2})
|
Implement Corrupted Healbot, Corrupted Seer, and Midnight Drake
|
Implement Corrupted Healbot, Corrupted Seer, and Midnight Drake
|
Python
|
agpl-3.0
|
NightKev/fireplace,beheh/fireplace,jleclanche/fireplace
|
from ..utils import *
##
# Minions
class OG_034:
"Silithid Swarmer"
update = (NUM_ATTACKS_THIS_TURN(FRIENDLY_HERO) == 0) & (
Refresh(SELF, {GameTag.CANT_ATTACK: True})
)
class OG_254:
"Eater of Secrets"
play = (
Buff(SELF, "OG_254e") * Count(ENEMY_SECRETS),
Destroy(ENEMY_SECRETS)
)
OG_254e = buff(+1, +1)
class OG_322:
"Blackwater Pirate"
update = Refresh(FRIENDLY_HAND + WEAPON, {GameTag.COST: -2})
Implement Corrupted Healbot, Corrupted Seer, and Midnight Drake
|
from ..utils import *
##
# Minions
class OG_034:
"Silithid Swarmer"
update = (NUM_ATTACKS_THIS_TURN(FRIENDLY_HERO) == 0) & (
Refresh(SELF, {GameTag.CANT_ATTACK: True})
)
class OG_147:
"Corrupted Healbot"
deathrattle = Heal(ENEMY_HERO, 8)
class OG_161:
"Corrupted Seer"
play = Hit(ALL_MINIONS - MURLOC, 2)
class OG_254:
"Eater of Secrets"
play = (
Buff(SELF, "OG_254e") * Count(ENEMY_SECRETS),
Destroy(ENEMY_SECRETS)
)
OG_254e = buff(+1, +1)
class OG_320:
"Midnight Drake"
play = Buff(SELF, "OG_320e") * Count(FRIENDLY_HAND)
OG_320e = buff(atk=1)
class OG_322:
"Blackwater Pirate"
update = Refresh(FRIENDLY_HAND + WEAPON, {GameTag.COST: -2})
|
<commit_before>from ..utils import *
##
# Minions
class OG_034:
"Silithid Swarmer"
update = (NUM_ATTACKS_THIS_TURN(FRIENDLY_HERO) == 0) & (
Refresh(SELF, {GameTag.CANT_ATTACK: True})
)
class OG_254:
"Eater of Secrets"
play = (
Buff(SELF, "OG_254e") * Count(ENEMY_SECRETS),
Destroy(ENEMY_SECRETS)
)
OG_254e = buff(+1, +1)
class OG_322:
"Blackwater Pirate"
update = Refresh(FRIENDLY_HAND + WEAPON, {GameTag.COST: -2})
<commit_msg>Implement Corrupted Healbot, Corrupted Seer, and Midnight Drake<commit_after>
|
from ..utils import *
##
# Minions
class OG_034:
"Silithid Swarmer"
update = (NUM_ATTACKS_THIS_TURN(FRIENDLY_HERO) == 0) & (
Refresh(SELF, {GameTag.CANT_ATTACK: True})
)
class OG_147:
"Corrupted Healbot"
deathrattle = Heal(ENEMY_HERO, 8)
class OG_161:
"Corrupted Seer"
play = Hit(ALL_MINIONS - MURLOC, 2)
class OG_254:
"Eater of Secrets"
play = (
Buff(SELF, "OG_254e") * Count(ENEMY_SECRETS),
Destroy(ENEMY_SECRETS)
)
OG_254e = buff(+1, +1)
class OG_320:
"Midnight Drake"
play = Buff(SELF, "OG_320e") * Count(FRIENDLY_HAND)
OG_320e = buff(atk=1)
class OG_322:
"Blackwater Pirate"
update = Refresh(FRIENDLY_HAND + WEAPON, {GameTag.COST: -2})
|
from ..utils import *
##
# Minions
class OG_034:
"Silithid Swarmer"
update = (NUM_ATTACKS_THIS_TURN(FRIENDLY_HERO) == 0) & (
Refresh(SELF, {GameTag.CANT_ATTACK: True})
)
class OG_254:
"Eater of Secrets"
play = (
Buff(SELF, "OG_254e") * Count(ENEMY_SECRETS),
Destroy(ENEMY_SECRETS)
)
OG_254e = buff(+1, +1)
class OG_322:
"Blackwater Pirate"
update = Refresh(FRIENDLY_HAND + WEAPON, {GameTag.COST: -2})
Implement Corrupted Healbot, Corrupted Seer, and Midnight Drakefrom ..utils import *
##
# Minions
class OG_034:
"Silithid Swarmer"
update = (NUM_ATTACKS_THIS_TURN(FRIENDLY_HERO) == 0) & (
Refresh(SELF, {GameTag.CANT_ATTACK: True})
)
class OG_147:
"Corrupted Healbot"
deathrattle = Heal(ENEMY_HERO, 8)
class OG_161:
"Corrupted Seer"
play = Hit(ALL_MINIONS - MURLOC, 2)
class OG_254:
"Eater of Secrets"
play = (
Buff(SELF, "OG_254e") * Count(ENEMY_SECRETS),
Destroy(ENEMY_SECRETS)
)
OG_254e = buff(+1, +1)
class OG_320:
"Midnight Drake"
play = Buff(SELF, "OG_320e") * Count(FRIENDLY_HAND)
OG_320e = buff(atk=1)
class OG_322:
"Blackwater Pirate"
update = Refresh(FRIENDLY_HAND + WEAPON, {GameTag.COST: -2})
|
<commit_before>from ..utils import *
##
# Minions
class OG_034:
"Silithid Swarmer"
update = (NUM_ATTACKS_THIS_TURN(FRIENDLY_HERO) == 0) & (
Refresh(SELF, {GameTag.CANT_ATTACK: True})
)
class OG_254:
"Eater of Secrets"
play = (
Buff(SELF, "OG_254e") * Count(ENEMY_SECRETS),
Destroy(ENEMY_SECRETS)
)
OG_254e = buff(+1, +1)
class OG_322:
"Blackwater Pirate"
update = Refresh(FRIENDLY_HAND + WEAPON, {GameTag.COST: -2})
<commit_msg>Implement Corrupted Healbot, Corrupted Seer, and Midnight Drake<commit_after>from ..utils import *
##
# Minions
class OG_034:
"Silithid Swarmer"
update = (NUM_ATTACKS_THIS_TURN(FRIENDLY_HERO) == 0) & (
Refresh(SELF, {GameTag.CANT_ATTACK: True})
)
class OG_147:
"Corrupted Healbot"
deathrattle = Heal(ENEMY_HERO, 8)
class OG_161:
"Corrupted Seer"
play = Hit(ALL_MINIONS - MURLOC, 2)
class OG_254:
"Eater of Secrets"
play = (
Buff(SELF, "OG_254e") * Count(ENEMY_SECRETS),
Destroy(ENEMY_SECRETS)
)
OG_254e = buff(+1, +1)
class OG_320:
"Midnight Drake"
play = Buff(SELF, "OG_320e") * Count(FRIENDLY_HAND)
OG_320e = buff(atk=1)
class OG_322:
"Blackwater Pirate"
update = Refresh(FRIENDLY_HAND + WEAPON, {GameTag.COST: -2})
|
35111353ab8d8cae320b49520fe693114fed160f
|
bin/parsers/DeploysServiceLookup.py
|
bin/parsers/DeploysServiceLookup.py
|
if 'r2' in alert['resource'].lower():
alert['service'] = [ 'R2' ]
elif 'content-api' in alert['resource'].lower():
alert['service'] = [ 'ContentAPI' ]
elif 'flexible' in alert['resource'].lower():
alert['service'] = [ 'FlexibleContent' ]
elif 'frontend' in alert['resource'].lower():
alert['service'] = [ 'Frontend' ]
elif 'mobile' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'android' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'ios' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'identity' in alert['resource'].lower():
alert['service'] = [ 'Identity' ]
elif 'microapps' in alert['resource'].lower():
alert['service'] = [ 'MicroApp' ]
else:
alert['service'] = [ 'Unknown' ]
|
if alert['resource'].startswith('R1'):
alert['service'] = [ 'R1' ]
elif alert['resource'].startswith('R2'):
alert['service'] = [ 'R2' ]
elif 'content-api' in alert['resource'].lower():
alert['service'] = [ 'ContentAPI' ]
elif alert['resource'].startswith('frontend'):
alert['service'] = [ 'Frontend' ]
elif 'flexible' in alert['resource'].lower():
alert['service'] = [ 'FlexibleContent' ]
elif alert['resource'].startswith('Identity'):
alert['service'] = [ 'Identity' ]
elif alert['resource'].startswith('Mobile'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('Android'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('iOS'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('Soulmates'):
alert['service'] = [ 'Soulmates' ]
elif alert['resource'].startswith('Microapps'):
alert['service'] = [ 'MicroApp' ]
elif alert['resource'].startswith('Mutualisation'):
alert['service'] = [ 'Mutualisation' ]
elif alert['resource'].startswith('Ophan'):
alert['service'] = [ 'Ophan' ]
else:
alert['service'] = [ 'Unknown' ]
|
Add more service lookups for Deploys
|
Add more service lookups for Deploys
|
Python
|
apache-2.0
|
guardian/alerta,0312birdzhang/alerta,skob/alerta,mrkeng/alerta,guardian/alerta,mrkeng/alerta,guardian/alerta,0312birdzhang/alerta,mrkeng/alerta,0312birdzhang/alerta,skob/alerta,skob/alerta,mrkeng/alerta,guardian/alerta,skob/alerta
|
if 'r2' in alert['resource'].lower():
alert['service'] = [ 'R2' ]
elif 'content-api' in alert['resource'].lower():
alert['service'] = [ 'ContentAPI' ]
elif 'flexible' in alert['resource'].lower():
alert['service'] = [ 'FlexibleContent' ]
elif 'frontend' in alert['resource'].lower():
alert['service'] = [ 'Frontend' ]
elif 'mobile' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'android' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'ios' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'identity' in alert['resource'].lower():
alert['service'] = [ 'Identity' ]
elif 'microapps' in alert['resource'].lower():
alert['service'] = [ 'MicroApp' ]
else:
alert['service'] = [ 'Unknown' ]
Add more service lookups for Deploys
|
if alert['resource'].startswith('R1'):
alert['service'] = [ 'R1' ]
elif alert['resource'].startswith('R2'):
alert['service'] = [ 'R2' ]
elif 'content-api' in alert['resource'].lower():
alert['service'] = [ 'ContentAPI' ]
elif alert['resource'].startswith('frontend'):
alert['service'] = [ 'Frontend' ]
elif 'flexible' in alert['resource'].lower():
alert['service'] = [ 'FlexibleContent' ]
elif alert['resource'].startswith('Identity'):
alert['service'] = [ 'Identity' ]
elif alert['resource'].startswith('Mobile'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('Android'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('iOS'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('Soulmates'):
alert['service'] = [ 'Soulmates' ]
elif alert['resource'].startswith('Microapps'):
alert['service'] = [ 'MicroApp' ]
elif alert['resource'].startswith('Mutualisation'):
alert['service'] = [ 'Mutualisation' ]
elif alert['resource'].startswith('Ophan'):
alert['service'] = [ 'Ophan' ]
else:
alert['service'] = [ 'Unknown' ]
|
<commit_before>
if 'r2' in alert['resource'].lower():
alert['service'] = [ 'R2' ]
elif 'content-api' in alert['resource'].lower():
alert['service'] = [ 'ContentAPI' ]
elif 'flexible' in alert['resource'].lower():
alert['service'] = [ 'FlexibleContent' ]
elif 'frontend' in alert['resource'].lower():
alert['service'] = [ 'Frontend' ]
elif 'mobile' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'android' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'ios' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'identity' in alert['resource'].lower():
alert['service'] = [ 'Identity' ]
elif 'microapps' in alert['resource'].lower():
alert['service'] = [ 'MicroApp' ]
else:
alert['service'] = [ 'Unknown' ]
<commit_msg>Add more service lookups for Deploys<commit_after>
|
if alert['resource'].startswith('R1'):
alert['service'] = [ 'R1' ]
elif alert['resource'].startswith('R2'):
alert['service'] = [ 'R2' ]
elif 'content-api' in alert['resource'].lower():
alert['service'] = [ 'ContentAPI' ]
elif alert['resource'].startswith('frontend'):
alert['service'] = [ 'Frontend' ]
elif 'flexible' in alert['resource'].lower():
alert['service'] = [ 'FlexibleContent' ]
elif alert['resource'].startswith('Identity'):
alert['service'] = [ 'Identity' ]
elif alert['resource'].startswith('Mobile'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('Android'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('iOS'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('Soulmates'):
alert['service'] = [ 'Soulmates' ]
elif alert['resource'].startswith('Microapps'):
alert['service'] = [ 'MicroApp' ]
elif alert['resource'].startswith('Mutualisation'):
alert['service'] = [ 'Mutualisation' ]
elif alert['resource'].startswith('Ophan'):
alert['service'] = [ 'Ophan' ]
else:
alert['service'] = [ 'Unknown' ]
|
if 'r2' in alert['resource'].lower():
alert['service'] = [ 'R2' ]
elif 'content-api' in alert['resource'].lower():
alert['service'] = [ 'ContentAPI' ]
elif 'flexible' in alert['resource'].lower():
alert['service'] = [ 'FlexibleContent' ]
elif 'frontend' in alert['resource'].lower():
alert['service'] = [ 'Frontend' ]
elif 'mobile' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'android' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'ios' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'identity' in alert['resource'].lower():
alert['service'] = [ 'Identity' ]
elif 'microapps' in alert['resource'].lower():
alert['service'] = [ 'MicroApp' ]
else:
alert['service'] = [ 'Unknown' ]
Add more service lookups for Deploys
if alert['resource'].startswith('R1'):
alert['service'] = [ 'R1' ]
elif alert['resource'].startswith('R2'):
alert['service'] = [ 'R2' ]
elif 'content-api' in alert['resource'].lower():
alert['service'] = [ 'ContentAPI' ]
elif alert['resource'].startswith('frontend'):
alert['service'] = [ 'Frontend' ]
elif 'flexible' in alert['resource'].lower():
alert['service'] = [ 'FlexibleContent' ]
elif alert['resource'].startswith('Identity'):
alert['service'] = [ 'Identity' ]
elif alert['resource'].startswith('Mobile'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('Android'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('iOS'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('Soulmates'):
alert['service'] = [ 'Soulmates' ]
elif alert['resource'].startswith('Microapps'):
alert['service'] = [ 'MicroApp' ]
elif alert['resource'].startswith('Mutualisation'):
alert['service'] = [ 'Mutualisation' ]
elif alert['resource'].startswith('Ophan'):
alert['service'] = [ 'Ophan' ]
else:
alert['service'] = [ 'Unknown' ]
|
<commit_before>
if 'r2' in alert['resource'].lower():
alert['service'] = [ 'R2' ]
elif 'content-api' in alert['resource'].lower():
alert['service'] = [ 'ContentAPI' ]
elif 'flexible' in alert['resource'].lower():
alert['service'] = [ 'FlexibleContent' ]
elif 'frontend' in alert['resource'].lower():
alert['service'] = [ 'Frontend' ]
elif 'mobile' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'android' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'ios' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'identity' in alert['resource'].lower():
alert['service'] = [ 'Identity' ]
elif 'microapps' in alert['resource'].lower():
alert['service'] = [ 'MicroApp' ]
else:
alert['service'] = [ 'Unknown' ]
<commit_msg>Add more service lookups for Deploys<commit_after>
if alert['resource'].startswith('R1'):
alert['service'] = [ 'R1' ]
elif alert['resource'].startswith('R2'):
alert['service'] = [ 'R2' ]
elif 'content-api' in alert['resource'].lower():
alert['service'] = [ 'ContentAPI' ]
elif alert['resource'].startswith('frontend'):
alert['service'] = [ 'Frontend' ]
elif 'flexible' in alert['resource'].lower():
alert['service'] = [ 'FlexibleContent' ]
elif alert['resource'].startswith('Identity'):
alert['service'] = [ 'Identity' ]
elif alert['resource'].startswith('Mobile'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('Android'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('iOS'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('Soulmates'):
alert['service'] = [ 'Soulmates' ]
elif alert['resource'].startswith('Microapps'):
alert['service'] = [ 'MicroApp' ]
elif alert['resource'].startswith('Mutualisation'):
alert['service'] = [ 'Mutualisation' ]
elif alert['resource'].startswith('Ophan'):
alert['service'] = [ 'Ophan' ]
else:
alert['service'] = [ 'Unknown' ]
|
373e4628f248b9ce2cc9e5cb271dc2640208ff05
|
bluebottle/notifications/signals.py
|
bluebottle/notifications/signals.py
|
from django.core.exceptions import ValidationError
from django.dispatch import receiver
from django.forms.models import model_to_dict
from django_fsm import pre_transition, post_transition
@receiver(pre_transition)
def validate_transition_form(sender, instance, name, method_kwargs, **kwargs):
transition = method_kwargs['transition']
if transition.form:
form = transition.form(data=model_to_dict(instance))
if form.errors:
raise ValidationError(
dict(
(form.fields[field].label, errors)
for field, errors in form.errors.items()
)
)
@receiver(post_transition)
def transition_messages(sender, instance, name, method_kwargs, **kwargs):
# Only try to send messages if 'send_messages' is not False.
transition = method_kwargs['transition']
if method_kwargs.get('send_messages'):
for message in getattr(transition, 'messages', []):
message(instance).compose_and_send()
|
from django.core.exceptions import ValidationError
from django.dispatch import receiver
from django.forms.models import model_to_dict
from django_fsm import pre_transition, post_transition
@receiver(pre_transition)
def validate_transition_form(sender, instance, name=None, method_kwargs=None, **kwargs):
if not method_kwargs:
return
transition = method_kwargs['transition']
if transition.form:
form = transition.form(data=model_to_dict(instance))
if form.errors:
raise ValidationError(
dict(
(form.fields[field].label, errors)
for field, errors in form.errors.items()
)
)
@receiver(post_transition)
def transition_messages(sender, instance, name=None, method_kwargs=None, **kwargs):
if not method_kwargs:
return
# Only try to send messages if 'send_messages' is not False.
transition = method_kwargs['transition']
if method_kwargs.get('send_messages'):
for message in getattr(transition, 'messages', []):
message(instance).compose_and_send()
|
Fix for weird signal we send ourselves
|
Fix for weird signal we send ourselves
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
from django.core.exceptions import ValidationError
from django.dispatch import receiver
from django.forms.models import model_to_dict
from django_fsm import pre_transition, post_transition
@receiver(pre_transition)
def validate_transition_form(sender, instance, name, method_kwargs, **kwargs):
transition = method_kwargs['transition']
if transition.form:
form = transition.form(data=model_to_dict(instance))
if form.errors:
raise ValidationError(
dict(
(form.fields[field].label, errors)
for field, errors in form.errors.items()
)
)
@receiver(post_transition)
def transition_messages(sender, instance, name, method_kwargs, **kwargs):
# Only try to send messages if 'send_messages' is not False.
transition = method_kwargs['transition']
if method_kwargs.get('send_messages'):
for message in getattr(transition, 'messages', []):
message(instance).compose_and_send()
Fix for weird signal we send ourselves
|
from django.core.exceptions import ValidationError
from django.dispatch import receiver
from django.forms.models import model_to_dict
from django_fsm import pre_transition, post_transition
@receiver(pre_transition)
def validate_transition_form(sender, instance, name=None, method_kwargs=None, **kwargs):
if not method_kwargs:
return
transition = method_kwargs['transition']
if transition.form:
form = transition.form(data=model_to_dict(instance))
if form.errors:
raise ValidationError(
dict(
(form.fields[field].label, errors)
for field, errors in form.errors.items()
)
)
@receiver(post_transition)
def transition_messages(sender, instance, name=None, method_kwargs=None, **kwargs):
if not method_kwargs:
return
# Only try to send messages if 'send_messages' is not False.
transition = method_kwargs['transition']
if method_kwargs.get('send_messages'):
for message in getattr(transition, 'messages', []):
message(instance).compose_and_send()
|
<commit_before>from django.core.exceptions import ValidationError
from django.dispatch import receiver
from django.forms.models import model_to_dict
from django_fsm import pre_transition, post_transition
@receiver(pre_transition)
def validate_transition_form(sender, instance, name, method_kwargs, **kwargs):
transition = method_kwargs['transition']
if transition.form:
form = transition.form(data=model_to_dict(instance))
if form.errors:
raise ValidationError(
dict(
(form.fields[field].label, errors)
for field, errors in form.errors.items()
)
)
@receiver(post_transition)
def transition_messages(sender, instance, name, method_kwargs, **kwargs):
# Only try to send messages if 'send_messages' is not False.
transition = method_kwargs['transition']
if method_kwargs.get('send_messages'):
for message in getattr(transition, 'messages', []):
message(instance).compose_and_send()
<commit_msg>Fix for weird signal we send ourselves<commit_after>
|
from django.core.exceptions import ValidationError
from django.dispatch import receiver
from django.forms.models import model_to_dict
from django_fsm import pre_transition, post_transition
@receiver(pre_transition)
def validate_transition_form(sender, instance, name=None, method_kwargs=None, **kwargs):
if not method_kwargs:
return
transition = method_kwargs['transition']
if transition.form:
form = transition.form(data=model_to_dict(instance))
if form.errors:
raise ValidationError(
dict(
(form.fields[field].label, errors)
for field, errors in form.errors.items()
)
)
@receiver(post_transition)
def transition_messages(sender, instance, name=None, method_kwargs=None, **kwargs):
if not method_kwargs:
return
# Only try to send messages if 'send_messages' is not False.
transition = method_kwargs['transition']
if method_kwargs.get('send_messages'):
for message in getattr(transition, 'messages', []):
message(instance).compose_and_send()
|
from django.core.exceptions import ValidationError
from django.dispatch import receiver
from django.forms.models import model_to_dict
from django_fsm import pre_transition, post_transition
@receiver(pre_transition)
def validate_transition_form(sender, instance, name, method_kwargs, **kwargs):
transition = method_kwargs['transition']
if transition.form:
form = transition.form(data=model_to_dict(instance))
if form.errors:
raise ValidationError(
dict(
(form.fields[field].label, errors)
for field, errors in form.errors.items()
)
)
@receiver(post_transition)
def transition_messages(sender, instance, name, method_kwargs, **kwargs):
# Only try to send messages if 'send_messages' is not False.
transition = method_kwargs['transition']
if method_kwargs.get('send_messages'):
for message in getattr(transition, 'messages', []):
message(instance).compose_and_send()
Fix for weird signal we send ourselvesfrom django.core.exceptions import ValidationError
from django.dispatch import receiver
from django.forms.models import model_to_dict
from django_fsm import pre_transition, post_transition
@receiver(pre_transition)
def validate_transition_form(sender, instance, name=None, method_kwargs=None, **kwargs):
if not method_kwargs:
return
transition = method_kwargs['transition']
if transition.form:
form = transition.form(data=model_to_dict(instance))
if form.errors:
raise ValidationError(
dict(
(form.fields[field].label, errors)
for field, errors in form.errors.items()
)
)
@receiver(post_transition)
def transition_messages(sender, instance, name=None, method_kwargs=None, **kwargs):
if not method_kwargs:
return
# Only try to send messages if 'send_messages' is not False.
transition = method_kwargs['transition']
if method_kwargs.get('send_messages'):
for message in getattr(transition, 'messages', []):
message(instance).compose_and_send()
|
<commit_before>from django.core.exceptions import ValidationError
from django.dispatch import receiver
from django.forms.models import model_to_dict
from django_fsm import pre_transition, post_transition
@receiver(pre_transition)
def validate_transition_form(sender, instance, name, method_kwargs, **kwargs):
transition = method_kwargs['transition']
if transition.form:
form = transition.form(data=model_to_dict(instance))
if form.errors:
raise ValidationError(
dict(
(form.fields[field].label, errors)
for field, errors in form.errors.items()
)
)
@receiver(post_transition)
def transition_messages(sender, instance, name, method_kwargs, **kwargs):
# Only try to send messages if 'send_messages' is not False.
transition = method_kwargs['transition']
if method_kwargs.get('send_messages'):
for message in getattr(transition, 'messages', []):
message(instance).compose_and_send()
<commit_msg>Fix for weird signal we send ourselves<commit_after>from django.core.exceptions import ValidationError
from django.dispatch import receiver
from django.forms.models import model_to_dict
from django_fsm import pre_transition, post_transition
@receiver(pre_transition)
def validate_transition_form(sender, instance, name=None, method_kwargs=None, **kwargs):
if not method_kwargs:
return
transition = method_kwargs['transition']
if transition.form:
form = transition.form(data=model_to_dict(instance))
if form.errors:
raise ValidationError(
dict(
(form.fields[field].label, errors)
for field, errors in form.errors.items()
)
)
@receiver(post_transition)
def transition_messages(sender, instance, name=None, method_kwargs=None, **kwargs):
if not method_kwargs:
return
# Only try to send messages if 'send_messages' is not False.
transition = method_kwargs['transition']
if method_kwargs.get('send_messages'):
for message in getattr(transition, 'messages', []):
message(instance).compose_and_send()
|
ef72ce81c2d51cf99e44041a871a82c512badb8c
|
people/serializers.py
|
people/serializers.py
|
from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
|
from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
phone_number = serializers.IntegerField(validators=[lambda x: len(str(x)) == 10])
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
|
Make the phone number an int
|
Make the phone number an int
|
Python
|
apache-2.0
|
rameshgopalakrishnan/v_excel_inventory,rameshgopalakrishnan/v_excel_inventory,rameshgopalakrishnan/v_excel_inventory
|
from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
Make the phone number an int
|
from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
phone_number = serializers.IntegerField(validators=[lambda x: len(str(x)) == 10])
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
|
<commit_before>from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
<commit_msg>Make the phone number an int<commit_after>
|
from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
phone_number = serializers.IntegerField(validators=[lambda x: len(str(x)) == 10])
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
|
from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
Make the phone number an intfrom rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
phone_number = serializers.IntegerField(validators=[lambda x: len(str(x)) == 10])
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
|
<commit_before>from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
<commit_msg>Make the phone number an int<commit_after>from rest_framework import serializers
from people.models import Customer
from people.models import InternalUser
class CustomerSerializer(serializers.ModelSerializer):
phone_number = serializers.IntegerField(validators=[lambda x: len(str(x)) == 10])
class Meta:
model = Customer
fields = '__all__'
class InternalUserSerializer(serializers.ModelSerializer):
class Meta:
model = InternalUser
fields = '__all__'
|
9a4cf8d594708ef57e41113bad4c76f26f3adc13
|
apps/accounts/managers.py
|
apps/accounts/managers.py
|
"""
Objects managers for the user accounts app.
"""
from django.db import models
class UserProfileManager(models.Manager):
"""
Manager class for the ``UserProfile`` data model.
"""
def get_subscribers_for_newsletter(self):
"""
Return a queryset of all users accepting to receive the newsletter.
"""
return self.filter(accept_newsletter=True)
|
"""
Objects managers for the user accounts app.
"""
import datetime
from django.db import models
from django.utils import timezone
from .settings import ONLINE_USER_TIME_WINDOW_SECONDS
class UserProfileManager(models.Manager):
"""
Manager class for the ``UserProfile`` data model.
"""
def get_subscribers_for_newsletter(self):
"""
Return a queryset of all user accounts who accept to receive the newsletter.
"""
return self.filter(accept_newsletter=True)
def get_online_users_accounts(self):
"""
Return a queryset of all user accounts currently online.
"""
offline_threshold = timezone.now() - datetime.timedelta(seconds=ONLINE_USER_TIME_WINDOW_SECONDS)
return self.filter(online_status_public=True,
last_activity_date__isnull=False,
last_activity_date__gt=offline_threshold)
def get_active_users_accounts(self):
"""
Return a queryset of all active users.
"""
return self.filter(user__is_active=True)
|
Move the get_online_users and get_active_users methods to the manager class.
|
Move the get_online_users and get_active_users methods to the manager class.
|
Python
|
agpl-3.0
|
TamiaLab/carnetdumaker,TamiaLab/carnetdumaker,TamiaLab/carnetdumaker,TamiaLab/carnetdumaker
|
"""
Objects managers for the user accounts app.
"""
from django.db import models
class UserProfileManager(models.Manager):
"""
Manager class for the ``UserProfile`` data model.
"""
def get_subscribers_for_newsletter(self):
"""
Return a queryset of all users accepting to receive the newsletter.
"""
return self.filter(accept_newsletter=True)
Move the get_online_users and get_active_users methods to the manager class.
|
"""
Objects managers for the user accounts app.
"""
import datetime
from django.db import models
from django.utils import timezone
from .settings import ONLINE_USER_TIME_WINDOW_SECONDS
class UserProfileManager(models.Manager):
"""
Manager class for the ``UserProfile`` data model.
"""
def get_subscribers_for_newsletter(self):
"""
Return a queryset of all user accounts who accept to receive the newsletter.
"""
return self.filter(accept_newsletter=True)
def get_online_users_accounts(self):
"""
Return a queryset of all user accounts currently online.
"""
offline_threshold = timezone.now() - datetime.timedelta(seconds=ONLINE_USER_TIME_WINDOW_SECONDS)
return self.filter(online_status_public=True,
last_activity_date__isnull=False,
last_activity_date__gt=offline_threshold)
def get_active_users_accounts(self):
"""
Return a queryset of all active users.
"""
return self.filter(user__is_active=True)
|
<commit_before>"""
Objects managers for the user accounts app.
"""
from django.db import models
class UserProfileManager(models.Manager):
"""
Manager class for the ``UserProfile`` data model.
"""
def get_subscribers_for_newsletter(self):
"""
Return a queryset of all users accepting to receive the newsletter.
"""
return self.filter(accept_newsletter=True)
<commit_msg>Move the get_online_users and get_active_users methods to the manager class.<commit_after>
|
"""
Objects managers for the user accounts app.
"""
import datetime
from django.db import models
from django.utils import timezone
from .settings import ONLINE_USER_TIME_WINDOW_SECONDS
class UserProfileManager(models.Manager):
"""
Manager class for the ``UserProfile`` data model.
"""
def get_subscribers_for_newsletter(self):
"""
Return a queryset of all user accounts who accept to receive the newsletter.
"""
return self.filter(accept_newsletter=True)
def get_online_users_accounts(self):
"""
Return a queryset of all user accounts currently online.
"""
offline_threshold = timezone.now() - datetime.timedelta(seconds=ONLINE_USER_TIME_WINDOW_SECONDS)
return self.filter(online_status_public=True,
last_activity_date__isnull=False,
last_activity_date__gt=offline_threshold)
def get_active_users_accounts(self):
"""
Return a queryset of all active users.
"""
return self.filter(user__is_active=True)
|
"""
Objects managers for the user accounts app.
"""
from django.db import models
class UserProfileManager(models.Manager):
"""
Manager class for the ``UserProfile`` data model.
"""
def get_subscribers_for_newsletter(self):
"""
Return a queryset of all users accepting to receive the newsletter.
"""
return self.filter(accept_newsletter=True)
Move the get_online_users and get_active_users methods to the manager class."""
Objects managers for the user accounts app.
"""
import datetime
from django.db import models
from django.utils import timezone
from .settings import ONLINE_USER_TIME_WINDOW_SECONDS
class UserProfileManager(models.Manager):
"""
Manager class for the ``UserProfile`` data model.
"""
def get_subscribers_for_newsletter(self):
"""
Return a queryset of all user accounts who accept to receive the newsletter.
"""
return self.filter(accept_newsletter=True)
def get_online_users_accounts(self):
"""
Return a queryset of all user accounts currently online.
"""
offline_threshold = timezone.now() - datetime.timedelta(seconds=ONLINE_USER_TIME_WINDOW_SECONDS)
return self.filter(online_status_public=True,
last_activity_date__isnull=False,
last_activity_date__gt=offline_threshold)
def get_active_users_accounts(self):
"""
Return a queryset of all active users.
"""
return self.filter(user__is_active=True)
|
<commit_before>"""
Objects managers for the user accounts app.
"""
from django.db import models
class UserProfileManager(models.Manager):
"""
Manager class for the ``UserProfile`` data model.
"""
def get_subscribers_for_newsletter(self):
"""
Return a queryset of all users accepting to receive the newsletter.
"""
return self.filter(accept_newsletter=True)
<commit_msg>Move the get_online_users and get_active_users methods to the manager class.<commit_after>"""
Objects managers for the user accounts app.
"""
import datetime
from django.db import models
from django.utils import timezone
from .settings import ONLINE_USER_TIME_WINDOW_SECONDS
class UserProfileManager(models.Manager):
"""
Manager class for the ``UserProfile`` data model.
"""
def get_subscribers_for_newsletter(self):
"""
Return a queryset of all user accounts who accept to receive the newsletter.
"""
return self.filter(accept_newsletter=True)
def get_online_users_accounts(self):
"""
Return a queryset of all user accounts currently online.
"""
offline_threshold = timezone.now() - datetime.timedelta(seconds=ONLINE_USER_TIME_WINDOW_SECONDS)
return self.filter(online_status_public=True,
last_activity_date__isnull=False,
last_activity_date__gt=offline_threshold)
def get_active_users_accounts(self):
"""
Return a queryset of all active users.
"""
return self.filter(user__is_active=True)
|
38d1631872d9987b8241a020934560e053aa23b0
|
api/middleware.py
|
api/middleware.py
|
class AddResponseHeader:
def process_response(self, req, resp, resource):
resp.set_header('Access-Control-Allow-Origin', 'http://localhost:8000')
resp.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT')
resp.set_header('Access-Control-Allow-Headers', 'Content-Type')
components = [AddResponseHeader()]
|
class AddResponseHeader:
def process_response(self, req, resp, resource):
resp.set_header('Access-Control-Allow-Origin', 'http://localhost:8000')
resp.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE')
resp.set_header('Access-Control-Allow-Headers', 'Content-Type')
components = [AddResponseHeader()]
|
Add DELETE to CORS header
|
Add DELETE to CORS header
|
Python
|
mit
|
thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline
|
class AddResponseHeader:
def process_response(self, req, resp, resource):
resp.set_header('Access-Control-Allow-Origin', 'http://localhost:8000')
resp.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT')
resp.set_header('Access-Control-Allow-Headers', 'Content-Type')
components = [AddResponseHeader()]
Add DELETE to CORS header
|
class AddResponseHeader:
def process_response(self, req, resp, resource):
resp.set_header('Access-Control-Allow-Origin', 'http://localhost:8000')
resp.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE')
resp.set_header('Access-Control-Allow-Headers', 'Content-Type')
components = [AddResponseHeader()]
|
<commit_before>class AddResponseHeader:
def process_response(self, req, resp, resource):
resp.set_header('Access-Control-Allow-Origin', 'http://localhost:8000')
resp.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT')
resp.set_header('Access-Control-Allow-Headers', 'Content-Type')
components = [AddResponseHeader()]
<commit_msg>Add DELETE to CORS header<commit_after>
|
class AddResponseHeader:
def process_response(self, req, resp, resource):
resp.set_header('Access-Control-Allow-Origin', 'http://localhost:8000')
resp.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE')
resp.set_header('Access-Control-Allow-Headers', 'Content-Type')
components = [AddResponseHeader()]
|
class AddResponseHeader:
def process_response(self, req, resp, resource):
resp.set_header('Access-Control-Allow-Origin', 'http://localhost:8000')
resp.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT')
resp.set_header('Access-Control-Allow-Headers', 'Content-Type')
components = [AddResponseHeader()]
Add DELETE to CORS headerclass AddResponseHeader:
def process_response(self, req, resp, resource):
resp.set_header('Access-Control-Allow-Origin', 'http://localhost:8000')
resp.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE')
resp.set_header('Access-Control-Allow-Headers', 'Content-Type')
components = [AddResponseHeader()]
|
<commit_before>class AddResponseHeader:
def process_response(self, req, resp, resource):
resp.set_header('Access-Control-Allow-Origin', 'http://localhost:8000')
resp.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT')
resp.set_header('Access-Control-Allow-Headers', 'Content-Type')
components = [AddResponseHeader()]
<commit_msg>Add DELETE to CORS header<commit_after>class AddResponseHeader:
def process_response(self, req, resp, resource):
resp.set_header('Access-Control-Allow-Origin', 'http://localhost:8000')
resp.set_header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE')
resp.set_header('Access-Control-Allow-Headers', 'Content-Type')
components = [AddResponseHeader()]
|
83e7a59b0571560b387ef5f175e02d9f64f3cf7e
|
azulejo/azulejo.py
|
azulejo/azulejo.py
|
import gtk
import keybinder
import configuration
import logging
from azulejo_controller import AzulejoController
from configuration import AzulejoConfiguration
def dispatcher(dis_param):
(func, azulejo_obj, param) = dis_param
azulejo_obj.force_update()
func(azulejo_obj, param)
def run():
azulejo_obj = AzulejoController()
logging.basicConfig(level=logging.DEBUG)
for action in AzulejoConfiguration(True).get_config_data():
keybinder.bind(
action['keybind'],
dispatcher,
(
azulejo_obj.get_action_function(action['function']),
azulejo_obj,
action['parameters']
)
)
gtk.main()
|
import gtk
import keybinder
import configuration
import logging
from azulejo_controller import AzulejoController
from configuration import AzulejoConfiguration
def dispatcher(dis_param):
(func, azulejo_obj, param) = dis_param
azulejo_obj.force_update()
func(azulejo_obj, param)
def run(main=True):
azulejo_obj = AzulejoController()
logging.basicConfig(level=logging.DEBUG)
for action in AzulejoConfiguration(True).get_config_data():
keybinder.bind(
action['keybind'],
dispatcher,
(
azulejo_obj.get_action_function(action['function']),
azulejo_obj,
action['parameters']
)
)
if main:
gtk.main()
|
Make running gtk.main optional for testing
|
Make running gtk.main optional for testing
|
Python
|
mit
|
johnteslade/azulejo,johnteslade/azulejo
|
import gtk
import keybinder
import configuration
import logging
from azulejo_controller import AzulejoController
from configuration import AzulejoConfiguration
def dispatcher(dis_param):
(func, azulejo_obj, param) = dis_param
azulejo_obj.force_update()
func(azulejo_obj, param)
def run():
azulejo_obj = AzulejoController()
logging.basicConfig(level=logging.DEBUG)
for action in AzulejoConfiguration(True).get_config_data():
keybinder.bind(
action['keybind'],
dispatcher,
(
azulejo_obj.get_action_function(action['function']),
azulejo_obj,
action['parameters']
)
)
gtk.main()
Make running gtk.main optional for testing
|
import gtk
import keybinder
import configuration
import logging
from azulejo_controller import AzulejoController
from configuration import AzulejoConfiguration
def dispatcher(dis_param):
(func, azulejo_obj, param) = dis_param
azulejo_obj.force_update()
func(azulejo_obj, param)
def run(main=True):
azulejo_obj = AzulejoController()
logging.basicConfig(level=logging.DEBUG)
for action in AzulejoConfiguration(True).get_config_data():
keybinder.bind(
action['keybind'],
dispatcher,
(
azulejo_obj.get_action_function(action['function']),
azulejo_obj,
action['parameters']
)
)
if main:
gtk.main()
|
<commit_before>import gtk
import keybinder
import configuration
import logging
from azulejo_controller import AzulejoController
from configuration import AzulejoConfiguration
def dispatcher(dis_param):
(func, azulejo_obj, param) = dis_param
azulejo_obj.force_update()
func(azulejo_obj, param)
def run():
azulejo_obj = AzulejoController()
logging.basicConfig(level=logging.DEBUG)
for action in AzulejoConfiguration(True).get_config_data():
keybinder.bind(
action['keybind'],
dispatcher,
(
azulejo_obj.get_action_function(action['function']),
azulejo_obj,
action['parameters']
)
)
gtk.main()
<commit_msg>Make running gtk.main optional for testing<commit_after>
|
import gtk
import keybinder
import configuration
import logging
from azulejo_controller import AzulejoController
from configuration import AzulejoConfiguration
def dispatcher(dis_param):
(func, azulejo_obj, param) = dis_param
azulejo_obj.force_update()
func(azulejo_obj, param)
def run(main=True):
azulejo_obj = AzulejoController()
logging.basicConfig(level=logging.DEBUG)
for action in AzulejoConfiguration(True).get_config_data():
keybinder.bind(
action['keybind'],
dispatcher,
(
azulejo_obj.get_action_function(action['function']),
azulejo_obj,
action['parameters']
)
)
if main:
gtk.main()
|
import gtk
import keybinder
import configuration
import logging
from azulejo_controller import AzulejoController
from configuration import AzulejoConfiguration
def dispatcher(dis_param):
(func, azulejo_obj, param) = dis_param
azulejo_obj.force_update()
func(azulejo_obj, param)
def run():
azulejo_obj = AzulejoController()
logging.basicConfig(level=logging.DEBUG)
for action in AzulejoConfiguration(True).get_config_data():
keybinder.bind(
action['keybind'],
dispatcher,
(
azulejo_obj.get_action_function(action['function']),
azulejo_obj,
action['parameters']
)
)
gtk.main()
Make running gtk.main optional for testingimport gtk
import keybinder
import configuration
import logging
from azulejo_controller import AzulejoController
from configuration import AzulejoConfiguration
def dispatcher(dis_param):
(func, azulejo_obj, param) = dis_param
azulejo_obj.force_update()
func(azulejo_obj, param)
def run(main=True):
azulejo_obj = AzulejoController()
logging.basicConfig(level=logging.DEBUG)
for action in AzulejoConfiguration(True).get_config_data():
keybinder.bind(
action['keybind'],
dispatcher,
(
azulejo_obj.get_action_function(action['function']),
azulejo_obj,
action['parameters']
)
)
if main:
gtk.main()
|
<commit_before>import gtk
import keybinder
import configuration
import logging
from azulejo_controller import AzulejoController
from configuration import AzulejoConfiguration
def dispatcher(dis_param):
(func, azulejo_obj, param) = dis_param
azulejo_obj.force_update()
func(azulejo_obj, param)
def run():
azulejo_obj = AzulejoController()
logging.basicConfig(level=logging.DEBUG)
for action in AzulejoConfiguration(True).get_config_data():
keybinder.bind(
action['keybind'],
dispatcher,
(
azulejo_obj.get_action_function(action['function']),
azulejo_obj,
action['parameters']
)
)
gtk.main()
<commit_msg>Make running gtk.main optional for testing<commit_after>import gtk
import keybinder
import configuration
import logging
from azulejo_controller import AzulejoController
from configuration import AzulejoConfiguration
def dispatcher(dis_param):
(func, azulejo_obj, param) = dis_param
azulejo_obj.force_update()
func(azulejo_obj, param)
def run(main=True):
azulejo_obj = AzulejoController()
logging.basicConfig(level=logging.DEBUG)
for action in AzulejoConfiguration(True).get_config_data():
keybinder.bind(
action['keybind'],
dispatcher,
(
azulejo_obj.get_action_function(action['function']),
azulejo_obj,
action['parameters']
)
)
if main:
gtk.main()
|
9ba70ea12ce1dd1bb6363a4f86703b5bfce34732
|
app/settings/prod.py
|
app/settings/prod.py
|
import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
|
import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = [
'backend.agendaodonto.com'
]
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
|
Update allowed hosts to the new domain
|
fix: Update allowed hosts to the new domain
|
Python
|
agpl-3.0
|
agendaodonto/server,agendaodonto/server
|
import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
fix: Update allowed hosts to the new domain
|
import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = [
'backend.agendaodonto.com'
]
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
|
<commit_before>import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
<commit_msg>fix: Update allowed hosts to the new domain<commit_after>
|
import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = [
'backend.agendaodonto.com'
]
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
|
import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
fix: Update allowed hosts to the new domainimport dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = [
'backend.agendaodonto.com'
]
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
|
<commit_before>import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
<commit_msg>fix: Update allowed hosts to the new domain<commit_after>import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = [
'backend.agendaodonto.com'
]
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
|
1a2cd182ec709e0f7c64626a4467abf98f2951c2
|
analyzer/darwin/modules/packages/bash.py
|
analyzer/darwin/modules/packages/bash.py
|
#!/usr/bin/env python
# Copyright (C) 2015 Dmitry Rodionov
# This file is part of my GSoC'15 project for Cuckoo Sandbox:
# http://www.cuckoosandbox.org
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from lib.core.packages import Package
class Bash(Package):
""" Bash shell script analysys package. """
def start(self):
# Some scripts are not executable, so we have to use /bin/bash to
# invoke them
self.args = [self.target] + self.args
self.target = "/bin/bash"
if "method" in self.options:
method = self.options["method"]
else: # fallback to dtruss
method = "dtruss"
if "dtruss" in method:
for x in self._start_dtruss():
yield x
else:
yield "Invalid analysis method \"%S\" for package \"Bash\"" % method
|
#!/usr/bin/env python
# Copyright (C) 2015 Dmitry Rodionov
# This file is part of my GSoC'15 project for Cuckoo Sandbox:
# http://www.cuckoosandbox.org
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from lib.core.packages import Package
class Bash(Package):
""" Bash shell script analysys package. """
def start(self):
# Some scripts are not executable, so we have to use /bin/bash to
# invoke them
self.args = [self.target] + self.args
self.target = "/bin/bash"
if "method" in self.options:
method = self.options["method"]
else: # fallback to dtruss
method = "dtruss"
if "dtruss" in method:
for x in self._start_dtruss():
yield x
elif "apicalls" in method:
for x in self._start_apicalls():
yield x
else:
yield "Invalid analysis method \"%S\" for package \"Bash\"" % method
|
Add support for apicalls in Bash package
|
Add support for apicalls in Bash package
|
Python
|
mit
|
cuckoobox/cuckoo,rodionovd/cuckoo-osx-analyzer,cuckoobox/cuckoo,cuckoobox/cuckoo,rodionovd/cuckoo-osx-analyzer,cuckoobox/cuckoo,cuckoobox/cuckoo,rodionovd/cuckoo-osx-analyzer
|
#!/usr/bin/env python
# Copyright (C) 2015 Dmitry Rodionov
# This file is part of my GSoC'15 project for Cuckoo Sandbox:
# http://www.cuckoosandbox.org
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from lib.core.packages import Package
class Bash(Package):
""" Bash shell script analysys package. """
def start(self):
# Some scripts are not executable, so we have to use /bin/bash to
# invoke them
self.args = [self.target] + self.args
self.target = "/bin/bash"
if "method" in self.options:
method = self.options["method"]
else: # fallback to dtruss
method = "dtruss"
if "dtruss" in method:
for x in self._start_dtruss():
yield x
else:
yield "Invalid analysis method \"%S\" for package \"Bash\"" % method
Add support for apicalls in Bash package
|
#!/usr/bin/env python
# Copyright (C) 2015 Dmitry Rodionov
# This file is part of my GSoC'15 project for Cuckoo Sandbox:
# http://www.cuckoosandbox.org
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from lib.core.packages import Package
class Bash(Package):
""" Bash shell script analysys package. """
def start(self):
# Some scripts are not executable, so we have to use /bin/bash to
# invoke them
self.args = [self.target] + self.args
self.target = "/bin/bash"
if "method" in self.options:
method = self.options["method"]
else: # fallback to dtruss
method = "dtruss"
if "dtruss" in method:
for x in self._start_dtruss():
yield x
elif "apicalls" in method:
for x in self._start_apicalls():
yield x
else:
yield "Invalid analysis method \"%S\" for package \"Bash\"" % method
|
<commit_before>#!/usr/bin/env python
# Copyright (C) 2015 Dmitry Rodionov
# This file is part of my GSoC'15 project for Cuckoo Sandbox:
# http://www.cuckoosandbox.org
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from lib.core.packages import Package
class Bash(Package):
""" Bash shell script analysys package. """
def start(self):
# Some scripts are not executable, so we have to use /bin/bash to
# invoke them
self.args = [self.target] + self.args
self.target = "/bin/bash"
if "method" in self.options:
method = self.options["method"]
else: # fallback to dtruss
method = "dtruss"
if "dtruss" in method:
for x in self._start_dtruss():
yield x
else:
yield "Invalid analysis method \"%S\" for package \"Bash\"" % method
<commit_msg>Add support for apicalls in Bash package<commit_after>
|
#!/usr/bin/env python
# Copyright (C) 2015 Dmitry Rodionov
# This file is part of my GSoC'15 project for Cuckoo Sandbox:
# http://www.cuckoosandbox.org
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from lib.core.packages import Package
class Bash(Package):
""" Bash shell script analysys package. """
def start(self):
# Some scripts are not executable, so we have to use /bin/bash to
# invoke them
self.args = [self.target] + self.args
self.target = "/bin/bash"
if "method" in self.options:
method = self.options["method"]
else: # fallback to dtruss
method = "dtruss"
if "dtruss" in method:
for x in self._start_dtruss():
yield x
elif "apicalls" in method:
for x in self._start_apicalls():
yield x
else:
yield "Invalid analysis method \"%S\" for package \"Bash\"" % method
|
#!/usr/bin/env python
# Copyright (C) 2015 Dmitry Rodionov
# This file is part of my GSoC'15 project for Cuckoo Sandbox:
# http://www.cuckoosandbox.org
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from lib.core.packages import Package
class Bash(Package):
""" Bash shell script analysys package. """
def start(self):
# Some scripts are not executable, so we have to use /bin/bash to
# invoke them
self.args = [self.target] + self.args
self.target = "/bin/bash"
if "method" in self.options:
method = self.options["method"]
else: # fallback to dtruss
method = "dtruss"
if "dtruss" in method:
for x in self._start_dtruss():
yield x
else:
yield "Invalid analysis method \"%S\" for package \"Bash\"" % method
Add support for apicalls in Bash package#!/usr/bin/env python
# Copyright (C) 2015 Dmitry Rodionov
# This file is part of my GSoC'15 project for Cuckoo Sandbox:
# http://www.cuckoosandbox.org
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from lib.core.packages import Package
class Bash(Package):
""" Bash shell script analysys package. """
def start(self):
# Some scripts are not executable, so we have to use /bin/bash to
# invoke them
self.args = [self.target] + self.args
self.target = "/bin/bash"
if "method" in self.options:
method = self.options["method"]
else: # fallback to dtruss
method = "dtruss"
if "dtruss" in method:
for x in self._start_dtruss():
yield x
elif "apicalls" in method:
for x in self._start_apicalls():
yield x
else:
yield "Invalid analysis method \"%S\" for package \"Bash\"" % method
|
<commit_before>#!/usr/bin/env python
# Copyright (C) 2015 Dmitry Rodionov
# This file is part of my GSoC'15 project for Cuckoo Sandbox:
# http://www.cuckoosandbox.org
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from lib.core.packages import Package
class Bash(Package):
""" Bash shell script analysys package. """
def start(self):
# Some scripts are not executable, so we have to use /bin/bash to
# invoke them
self.args = [self.target] + self.args
self.target = "/bin/bash"
if "method" in self.options:
method = self.options["method"]
else: # fallback to dtruss
method = "dtruss"
if "dtruss" in method:
for x in self._start_dtruss():
yield x
else:
yield "Invalid analysis method \"%S\" for package \"Bash\"" % method
<commit_msg>Add support for apicalls in Bash package<commit_after>#!/usr/bin/env python
# Copyright (C) 2015 Dmitry Rodionov
# This file is part of my GSoC'15 project for Cuckoo Sandbox:
# http://www.cuckoosandbox.org
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from lib.core.packages import Package
class Bash(Package):
""" Bash shell script analysys package. """
def start(self):
# Some scripts are not executable, so we have to use /bin/bash to
# invoke them
self.args = [self.target] + self.args
self.target = "/bin/bash"
if "method" in self.options:
method = self.options["method"]
else: # fallback to dtruss
method = "dtruss"
if "dtruss" in method:
for x in self._start_dtruss():
yield x
elif "apicalls" in method:
for x in self._start_apicalls():
yield x
else:
yield "Invalid analysis method \"%S\" for package \"Bash\"" % method
|
2752af0f94ba477ac95b00a05243719d1a01c354
|
src/checker/pluginManager.py
|
src/checker/pluginManager.py
|
""" Plugin manager is Checker's main module.
Plugin Manager is using Yapsy to find and load plugins from
a directory and loads them via PluginRunner.
"""
from yapsy.PluginManager import PluginManager
from pluginRunner import PluginRunner
from configLoader import ConfigLoader
from down import Scraper
import logging
import sys
def main():
""" Load configuration, find plugins, run plugin runner.
"""
if len(sys.argv) == 2:
# load configuration
cl = ConfigLoader()
cl.load(sys.argv[1])
# download initial transactions
s = Scraper(cl.getDbconf())
s.scrap(cl.getEntryPoints())
logging.getLogger("yapsy").addHandler(logging.StreamHandler())
# load plugins
manager = PluginManager()
manager.setPluginPlaces(["checker/plugin"])
manager.collectPlugins()
plugins = []
for pluginInfo in manager.getAllPlugins():
print(pluginInfo.name)
plugins.append(pluginInfo.plugin_object)
if len(plugins) == 0:
print("No plugins found")
runner = PluginRunner(cl.getDbconf(), cl.getUriAcceptor(),
cl.getTypeAcceptor(), cl.getMaxDepth())
# verify
runner.run(plugins)
else:
print("Usage: "+sys.argv[0]+" <configuration XML file>")
if __name__ == "__main__":
main()
|
""" Plugin manager is Checker's main module.
Plugin Manager is using Yapsy to find and load plugins from
a directory and loads them via PluginRunner.
"""
from yapsy.PluginManager import PluginManager
from pluginRunner import PluginRunner
from configLoader import ConfigLoader
from down import Scraper
import logging
import sys
def main():
""" Load configuration, find plugins, run plugin runner.
"""
if len(sys.argv) == 2:
# load configuration
cl = ConfigLoader()
cl.load(sys.argv[1])
# download initial transactions
s = Scraper(cl.getDbconf())
s.scrap(cl.getEntryPoints())
logging.getLogger("yapsy").addHandler(logging.StreamHandler())
# load plugins
manager = PluginManager()
manager.setPluginPlaces(["checker/plugin"])
manager.collectPlugins()
plugins = []
for pluginInfo in manager.getAllPlugins():
print(pluginInfo.name)
plugins.append(pluginInfo.plugin_object)
if len(plugins) == 0:
print("No plugins found")
runner = PluginRunner(cl.getDbconf(), cl.getUriAcceptor(),
cl.getTypeAcceptor(), cl.getMaxDepth())
# verify
runner.run(plugins)
else:
print("Usage: "+sys.argv[0]+" <configuration YAML file>")
if __name__ == "__main__":
main()
|
Fix typo - XML<->YAML configuration
|
Fix typo - XML<->YAML configuration
|
Python
|
mit
|
eghuro/crawlcheck
|
""" Plugin manager is Checker's main module.
Plugin Manager is using Yapsy to find and load plugins from
a directory and loads them via PluginRunner.
"""
from yapsy.PluginManager import PluginManager
from pluginRunner import PluginRunner
from configLoader import ConfigLoader
from down import Scraper
import logging
import sys
def main():
""" Load configuration, find plugins, run plugin runner.
"""
if len(sys.argv) == 2:
# load configuration
cl = ConfigLoader()
cl.load(sys.argv[1])
# download initial transactions
s = Scraper(cl.getDbconf())
s.scrap(cl.getEntryPoints())
logging.getLogger("yapsy").addHandler(logging.StreamHandler())
# load plugins
manager = PluginManager()
manager.setPluginPlaces(["checker/plugin"])
manager.collectPlugins()
plugins = []
for pluginInfo in manager.getAllPlugins():
print(pluginInfo.name)
plugins.append(pluginInfo.plugin_object)
if len(plugins) == 0:
print("No plugins found")
runner = PluginRunner(cl.getDbconf(), cl.getUriAcceptor(),
cl.getTypeAcceptor(), cl.getMaxDepth())
# verify
runner.run(plugins)
else:
print("Usage: "+sys.argv[0]+" <configuration XML file>")
if __name__ == "__main__":
main()
Fix typo - XML<->YAML configuration
|
""" Plugin manager is Checker's main module.
Plugin Manager is using Yapsy to find and load plugins from
a directory and loads them via PluginRunner.
"""
from yapsy.PluginManager import PluginManager
from pluginRunner import PluginRunner
from configLoader import ConfigLoader
from down import Scraper
import logging
import sys
def main():
""" Load configuration, find plugins, run plugin runner.
"""
if len(sys.argv) == 2:
# load configuration
cl = ConfigLoader()
cl.load(sys.argv[1])
# download initial transactions
s = Scraper(cl.getDbconf())
s.scrap(cl.getEntryPoints())
logging.getLogger("yapsy").addHandler(logging.StreamHandler())
# load plugins
manager = PluginManager()
manager.setPluginPlaces(["checker/plugin"])
manager.collectPlugins()
plugins = []
for pluginInfo in manager.getAllPlugins():
print(pluginInfo.name)
plugins.append(pluginInfo.plugin_object)
if len(plugins) == 0:
print("No plugins found")
runner = PluginRunner(cl.getDbconf(), cl.getUriAcceptor(),
cl.getTypeAcceptor(), cl.getMaxDepth())
# verify
runner.run(plugins)
else:
print("Usage: "+sys.argv[0]+" <configuration YAML file>")
if __name__ == "__main__":
main()
|
<commit_before>""" Plugin manager is Checker's main module.
Plugin Manager is using Yapsy to find and load plugins from
a directory and loads them via PluginRunner.
"""
from yapsy.PluginManager import PluginManager
from pluginRunner import PluginRunner
from configLoader import ConfigLoader
from down import Scraper
import logging
import sys
def main():
""" Load configuration, find plugins, run plugin runner.
"""
if len(sys.argv) == 2:
# load configuration
cl = ConfigLoader()
cl.load(sys.argv[1])
# download initial transactions
s = Scraper(cl.getDbconf())
s.scrap(cl.getEntryPoints())
logging.getLogger("yapsy").addHandler(logging.StreamHandler())
# load plugins
manager = PluginManager()
manager.setPluginPlaces(["checker/plugin"])
manager.collectPlugins()
plugins = []
for pluginInfo in manager.getAllPlugins():
print(pluginInfo.name)
plugins.append(pluginInfo.plugin_object)
if len(plugins) == 0:
print("No plugins found")
runner = PluginRunner(cl.getDbconf(), cl.getUriAcceptor(),
cl.getTypeAcceptor(), cl.getMaxDepth())
# verify
runner.run(plugins)
else:
print("Usage: "+sys.argv[0]+" <configuration XML file>")
if __name__ == "__main__":
main()
<commit_msg>Fix typo - XML<->YAML configuration<commit_after>
|
""" Plugin manager is Checker's main module.
Plugin Manager is using Yapsy to find and load plugins from
a directory and loads them via PluginRunner.
"""
from yapsy.PluginManager import PluginManager
from pluginRunner import PluginRunner
from configLoader import ConfigLoader
from down import Scraper
import logging
import sys
def main():
""" Load configuration, find plugins, run plugin runner.
"""
if len(sys.argv) == 2:
# load configuration
cl = ConfigLoader()
cl.load(sys.argv[1])
# download initial transactions
s = Scraper(cl.getDbconf())
s.scrap(cl.getEntryPoints())
logging.getLogger("yapsy").addHandler(logging.StreamHandler())
# load plugins
manager = PluginManager()
manager.setPluginPlaces(["checker/plugin"])
manager.collectPlugins()
plugins = []
for pluginInfo in manager.getAllPlugins():
print(pluginInfo.name)
plugins.append(pluginInfo.plugin_object)
if len(plugins) == 0:
print("No plugins found")
runner = PluginRunner(cl.getDbconf(), cl.getUriAcceptor(),
cl.getTypeAcceptor(), cl.getMaxDepth())
# verify
runner.run(plugins)
else:
print("Usage: "+sys.argv[0]+" <configuration YAML file>")
if __name__ == "__main__":
main()
|
""" Plugin manager is Checker's main module.
Plugin Manager is using Yapsy to find and load plugins from
a directory and loads them via PluginRunner.
"""
from yapsy.PluginManager import PluginManager
from pluginRunner import PluginRunner
from configLoader import ConfigLoader
from down import Scraper
import logging
import sys
def main():
""" Load configuration, find plugins, run plugin runner.
"""
if len(sys.argv) == 2:
# load configuration
cl = ConfigLoader()
cl.load(sys.argv[1])
# download initial transactions
s = Scraper(cl.getDbconf())
s.scrap(cl.getEntryPoints())
logging.getLogger("yapsy").addHandler(logging.StreamHandler())
# load plugins
manager = PluginManager()
manager.setPluginPlaces(["checker/plugin"])
manager.collectPlugins()
plugins = []
for pluginInfo in manager.getAllPlugins():
print(pluginInfo.name)
plugins.append(pluginInfo.plugin_object)
if len(plugins) == 0:
print("No plugins found")
runner = PluginRunner(cl.getDbconf(), cl.getUriAcceptor(),
cl.getTypeAcceptor(), cl.getMaxDepth())
# verify
runner.run(plugins)
else:
print("Usage: "+sys.argv[0]+" <configuration XML file>")
if __name__ == "__main__":
main()
Fix typo - XML<->YAML configuration""" Plugin manager is Checker's main module.
Plugin Manager is using Yapsy to find and load plugins from
a directory and loads them via PluginRunner.
"""
from yapsy.PluginManager import PluginManager
from pluginRunner import PluginRunner
from configLoader import ConfigLoader
from down import Scraper
import logging
import sys
def main():
""" Load configuration, find plugins, run plugin runner.
"""
if len(sys.argv) == 2:
# load configuration
cl = ConfigLoader()
cl.load(sys.argv[1])
# download initial transactions
s = Scraper(cl.getDbconf())
s.scrap(cl.getEntryPoints())
logging.getLogger("yapsy").addHandler(logging.StreamHandler())
# load plugins
manager = PluginManager()
manager.setPluginPlaces(["checker/plugin"])
manager.collectPlugins()
plugins = []
for pluginInfo in manager.getAllPlugins():
print(pluginInfo.name)
plugins.append(pluginInfo.plugin_object)
if len(plugins) == 0:
print("No plugins found")
runner = PluginRunner(cl.getDbconf(), cl.getUriAcceptor(),
cl.getTypeAcceptor(), cl.getMaxDepth())
# verify
runner.run(plugins)
else:
print("Usage: "+sys.argv[0]+" <configuration YAML file>")
if __name__ == "__main__":
main()
|
<commit_before>""" Plugin manager is Checker's main module.
Plugin Manager is using Yapsy to find and load plugins from
a directory and loads them via PluginRunner.
"""
from yapsy.PluginManager import PluginManager
from pluginRunner import PluginRunner
from configLoader import ConfigLoader
from down import Scraper
import logging
import sys
def main():
""" Load configuration, find plugins, run plugin runner.
"""
if len(sys.argv) == 2:
# load configuration
cl = ConfigLoader()
cl.load(sys.argv[1])
# download initial transactions
s = Scraper(cl.getDbconf())
s.scrap(cl.getEntryPoints())
logging.getLogger("yapsy").addHandler(logging.StreamHandler())
# load plugins
manager = PluginManager()
manager.setPluginPlaces(["checker/plugin"])
manager.collectPlugins()
plugins = []
for pluginInfo in manager.getAllPlugins():
print(pluginInfo.name)
plugins.append(pluginInfo.plugin_object)
if len(plugins) == 0:
print("No plugins found")
runner = PluginRunner(cl.getDbconf(), cl.getUriAcceptor(),
cl.getTypeAcceptor(), cl.getMaxDepth())
# verify
runner.run(plugins)
else:
print("Usage: "+sys.argv[0]+" <configuration XML file>")
if __name__ == "__main__":
main()
<commit_msg>Fix typo - XML<->YAML configuration<commit_after>""" Plugin manager is Checker's main module.
Plugin Manager is using Yapsy to find and load plugins from
a directory and loads them via PluginRunner.
"""
from yapsy.PluginManager import PluginManager
from pluginRunner import PluginRunner
from configLoader import ConfigLoader
from down import Scraper
import logging
import sys
def main():
""" Load configuration, find plugins, run plugin runner.
"""
if len(sys.argv) == 2:
# load configuration
cl = ConfigLoader()
cl.load(sys.argv[1])
# download initial transactions
s = Scraper(cl.getDbconf())
s.scrap(cl.getEntryPoints())
logging.getLogger("yapsy").addHandler(logging.StreamHandler())
# load plugins
manager = PluginManager()
manager.setPluginPlaces(["checker/plugin"])
manager.collectPlugins()
plugins = []
for pluginInfo in manager.getAllPlugins():
print(pluginInfo.name)
plugins.append(pluginInfo.plugin_object)
if len(plugins) == 0:
print("No plugins found")
runner = PluginRunner(cl.getDbconf(), cl.getUriAcceptor(),
cl.getTypeAcceptor(), cl.getMaxDepth())
# verify
runner.run(plugins)
else:
print("Usage: "+sys.argv[0]+" <configuration YAML file>")
if __name__ == "__main__":
main()
|
f9e5efe33c28cfe88fa67ccc883d4817552ea178
|
docs/conf.py
|
docs/conf.py
|
import os
import taxii_services
project = u'django-taxii-services'
copyright = u'2014, The MITRE Corporation'
version = taxii_services.__version__
release = version
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'django-taxii-services.tex', u'django-taxii-services Documentation',
u'Mark Davidson', 'manual'),
]
|
import os
import taxii_services
project = u'django-taxii-services'
copyright = u'2014, The MITRE Corporation'
version = taxii_services.__version__
release = version
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'django-taxii-services.tex', u'django-taxii-services Documentation',
u'Mark Davidson', 'manual'),
]
|
Fix zero-length field error when building docs in Python 2.6
|
Fix zero-length field error when building docs in Python 2.6
|
Python
|
bsd-3-clause
|
TAXIIProject/django-taxii-services
|
import os
import taxii_services
project = u'django-taxii-services'
copyright = u'2014, The MITRE Corporation'
version = taxii_services.__version__
release = version
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'django-taxii-services.tex', u'django-taxii-services Documentation',
u'Mark Davidson', 'manual'),
]
Fix zero-length field error when building docs in Python 2.6
|
import os
import taxii_services
project = u'django-taxii-services'
copyright = u'2014, The MITRE Corporation'
version = taxii_services.__version__
release = version
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'django-taxii-services.tex', u'django-taxii-services Documentation',
u'Mark Davidson', 'manual'),
]
|
<commit_before>import os
import taxii_services
project = u'django-taxii-services'
copyright = u'2014, The MITRE Corporation'
version = taxii_services.__version__
release = version
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'django-taxii-services.tex', u'django-taxii-services Documentation',
u'Mark Davidson', 'manual'),
]
<commit_msg>Fix zero-length field error when building docs in Python 2.6<commit_after>
|
import os
import taxii_services
project = u'django-taxii-services'
copyright = u'2014, The MITRE Corporation'
version = taxii_services.__version__
release = version
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'django-taxii-services.tex', u'django-taxii-services Documentation',
u'Mark Davidson', 'manual'),
]
|
import os
import taxii_services
project = u'django-taxii-services'
copyright = u'2014, The MITRE Corporation'
version = taxii_services.__version__
release = version
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'django-taxii-services.tex', u'django-taxii-services Documentation',
u'Mark Davidson', 'manual'),
]
Fix zero-length field error when building docs in Python 2.6import os
import taxii_services
project = u'django-taxii-services'
copyright = u'2014, The MITRE Corporation'
version = taxii_services.__version__
release = version
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'django-taxii-services.tex', u'django-taxii-services Documentation',
u'Mark Davidson', 'manual'),
]
|
<commit_before>import os
import taxii_services
project = u'django-taxii-services'
copyright = u'2014, The MITRE Corporation'
version = taxii_services.__version__
release = version
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'django-taxii-services.tex', u'django-taxii-services Documentation',
u'Mark Davidson', 'manual'),
]
<commit_msg>Fix zero-length field error when building docs in Python 2.6<commit_after>import os
import taxii_services
project = u'django-taxii-services'
copyright = u'2014, The MITRE Corporation'
version = taxii_services.__version__
release = version
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'django-taxii-services.tex', u'django-taxii-services Documentation',
u'Mark Davidson', 'manual'),
]
|
7bd491b74c800c31d51e4f80bcecb6b8f37efbd8
|
allaccess/__init__.py
|
allaccess/__init__.py
|
"""
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.9.0'
default_app_config = 'allaccess.apps.AllAccessConfig'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
|
"""
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.10.0.dev.0'
default_app_config = 'allaccess.apps.AllAccessConfig'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
|
Change version for master status.
|
Change version for master status.
|
Python
|
bsd-2-clause
|
iXioN/django-all-access,mlavin/django-all-access,mlavin/django-all-access,iXioN/django-all-access
|
"""
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.9.0'
default_app_config = 'allaccess.apps.AllAccessConfig'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
Change version for master status.
|
"""
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.10.0.dev.0'
default_app_config = 'allaccess.apps.AllAccessConfig'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
|
<commit_before>"""
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.9.0'
default_app_config = 'allaccess.apps.AllAccessConfig'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
<commit_msg>Change version for master status.<commit_after>
|
"""
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.10.0.dev.0'
default_app_config = 'allaccess.apps.AllAccessConfig'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
|
"""
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.9.0'
default_app_config = 'allaccess.apps.AllAccessConfig'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
Change version for master status."""
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.10.0.dev.0'
default_app_config = 'allaccess.apps.AllAccessConfig'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
|
<commit_before>"""
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.9.0'
default_app_config = 'allaccess.apps.AllAccessConfig'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
<commit_msg>Change version for master status.<commit_after>"""
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.10.0.dev.0'
default_app_config = 'allaccess.apps.AllAccessConfig'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
|
58811f1f6a4204a1c59d197daa9fb5fb7f6b25de
|
src/dynamic_graph/sot/dynamics/solver.py
|
src/dynamic_graph/sot/dynamics/solver.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011, Florent Lamiraux, Thomas Moulard, JRL, CNRS/AIST
#
# This file is part of dynamic-graph.
# dynamic-graph is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# dynamic-graph is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# dynamic-graph. If not, see <http://www.gnu.org/licenses/>.
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT
class Solver:
robot = None
sot = None
def __init__(self, robot):
self.robot = robot
self.sot = SOT('solver')
self.sot.signal('damping').value = 1e-6
self.sot.setNumberDofs(self.robot.dimension)
if robot.device:
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011, Florent Lamiraux, Thomas Moulard, JRL, CNRS/AIST
#
# This file is part of dynamic-graph.
# dynamic-graph is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# dynamic-graph is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# dynamic-graph. If not, see <http://www.gnu.org/licenses/>.
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT
class Solver:
robot = None
sot = None
def __init__(self, robot):
self.robot = robot
self.sot = SOT('solver')
self.sot.signal('damping').value = 1e-6
self.sot.setNumberDofs(self.robot.dimension)
if robot.device:
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
def push(self, taskName):
"""
Proxy method to push a task in the sot
"""
self.sot.push(taskName)
|
Add a proxy method push in Solver -> Solver.sot.push.
|
Add a proxy method push in Solver -> Solver.sot.push.
|
Python
|
bsd-2-clause
|
stack-of-tasks/sot-dynamic-pinocchio,stack-of-tasks/sot-dynamic-pinocchio,stack-of-tasks/sot-dynamic-pinocchio
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011, Florent Lamiraux, Thomas Moulard, JRL, CNRS/AIST
#
# This file is part of dynamic-graph.
# dynamic-graph is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# dynamic-graph is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# dynamic-graph. If not, see <http://www.gnu.org/licenses/>.
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT
class Solver:
robot = None
sot = None
def __init__(self, robot):
self.robot = robot
self.sot = SOT('solver')
self.sot.signal('damping').value = 1e-6
self.sot.setNumberDofs(self.robot.dimension)
if robot.device:
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
Add a proxy method push in Solver -> Solver.sot.push.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011, Florent Lamiraux, Thomas Moulard, JRL, CNRS/AIST
#
# This file is part of dynamic-graph.
# dynamic-graph is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# dynamic-graph is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# dynamic-graph. If not, see <http://www.gnu.org/licenses/>.
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT
class Solver:
robot = None
sot = None
def __init__(self, robot):
self.robot = robot
self.sot = SOT('solver')
self.sot.signal('damping').value = 1e-6
self.sot.setNumberDofs(self.robot.dimension)
if robot.device:
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
def push(self, taskName):
"""
Proxy method to push a task in the sot
"""
self.sot.push(taskName)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011, Florent Lamiraux, Thomas Moulard, JRL, CNRS/AIST
#
# This file is part of dynamic-graph.
# dynamic-graph is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# dynamic-graph is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# dynamic-graph. If not, see <http://www.gnu.org/licenses/>.
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT
class Solver:
robot = None
sot = None
def __init__(self, robot):
self.robot = robot
self.sot = SOT('solver')
self.sot.signal('damping').value = 1e-6
self.sot.setNumberDofs(self.robot.dimension)
if robot.device:
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
<commit_msg>Add a proxy method push in Solver -> Solver.sot.push.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011, Florent Lamiraux, Thomas Moulard, JRL, CNRS/AIST
#
# This file is part of dynamic-graph.
# dynamic-graph is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# dynamic-graph is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# dynamic-graph. If not, see <http://www.gnu.org/licenses/>.
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT
class Solver:
robot = None
sot = None
def __init__(self, robot):
self.robot = robot
self.sot = SOT('solver')
self.sot.signal('damping').value = 1e-6
self.sot.setNumberDofs(self.robot.dimension)
if robot.device:
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
def push(self, taskName):
"""
Proxy method to push a task in the sot
"""
self.sot.push(taskName)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011, Florent Lamiraux, Thomas Moulard, JRL, CNRS/AIST
#
# This file is part of dynamic-graph.
# dynamic-graph is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# dynamic-graph is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# dynamic-graph. If not, see <http://www.gnu.org/licenses/>.
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT
class Solver:
robot = None
sot = None
def __init__(self, robot):
self.robot = robot
self.sot = SOT('solver')
self.sot.signal('damping').value = 1e-6
self.sot.setNumberDofs(self.robot.dimension)
if robot.device:
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
Add a proxy method push in Solver -> Solver.sot.push.#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011, Florent Lamiraux, Thomas Moulard, JRL, CNRS/AIST
#
# This file is part of dynamic-graph.
# dynamic-graph is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# dynamic-graph is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# dynamic-graph. If not, see <http://www.gnu.org/licenses/>.
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT
class Solver:
robot = None
sot = None
def __init__(self, robot):
self.robot = robot
self.sot = SOT('solver')
self.sot.signal('damping').value = 1e-6
self.sot.setNumberDofs(self.robot.dimension)
if robot.device:
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
def push(self, taskName):
"""
Proxy method to push a task in the sot
"""
self.sot.push(taskName)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011, Florent Lamiraux, Thomas Moulard, JRL, CNRS/AIST
#
# This file is part of dynamic-graph.
# dynamic-graph is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# dynamic-graph is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# dynamic-graph. If not, see <http://www.gnu.org/licenses/>.
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT
class Solver:
robot = None
sot = None
def __init__(self, robot):
self.robot = robot
self.sot = SOT('solver')
self.sot.signal('damping').value = 1e-6
self.sot.setNumberDofs(self.robot.dimension)
if robot.device:
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
<commit_msg>Add a proxy method push in Solver -> Solver.sot.push.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011, Florent Lamiraux, Thomas Moulard, JRL, CNRS/AIST
#
# This file is part of dynamic-graph.
# dynamic-graph is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# dynamic-graph is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# dynamic-graph. If not, see <http://www.gnu.org/licenses/>.
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT
class Solver:
robot = None
sot = None
def __init__(self, robot):
self.robot = robot
self.sot = SOT('solver')
self.sot.signal('damping').value = 1e-6
self.sot.setNumberDofs(self.robot.dimension)
if robot.device:
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
def push(self, taskName):
"""
Proxy method to push a task in the sot
"""
self.sot.push(taskName)
|
84ee0b55021592235f590aa4cc52cc8b13800c32
|
drftutorial/catalog/views.py
|
drftutorial/catalog/views.py
|
from django.http import HttpResponse
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
|
from django.http import HttpResponse
from rest_framework import generics
from rest_framework.response import Response
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
|
Implement GET Product using ListAPIView
|
Implement GET Product using ListAPIView
|
Python
|
mit
|
andreagrandi/drf-tutorial
|
from django.http import HttpResponse
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
Implement GET Product using ListAPIView
|
from django.http import HttpResponse
from rest_framework import generics
from rest_framework.response import Response
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
|
<commit_before>from django.http import HttpResponse
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
<commit_msg>Implement GET Product using ListAPIView<commit_after>
|
from django.http import HttpResponse
from rest_framework import generics
from rest_framework.response import Response
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
|
from django.http import HttpResponse
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
Implement GET Product using ListAPIViewfrom django.http import HttpResponse
from rest_framework import generics
from rest_framework.response import Response
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
|
<commit_before>from django.http import HttpResponse
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
<commit_msg>Implement GET Product using ListAPIView<commit_after>from django.http import HttpResponse
from rest_framework import generics
from rest_framework.response import Response
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
|
cf8671568bee0f3b631aba986fad2d846f2c587a
|
test/chtest/dev_proc_sys.py
|
test/chtest/dev_proc_sys.py
|
#!/usr/bin/env python3
import os.path
import sys
# File in /sys seem to vary between Linux systems. Thus, try a few candidates
# and use the first one that exists. What we want is any file under /sys with
# permissions root:root -rw-------.
sys_file = None
for f in ("/sys/devices/cpu/rdpmc",
"/sys/kernel/mm/page_idle/bitmap",
"/sys/kernel/slab/request_sock_TCP/red_zone"):
if (os.path.exists(f)):
sys_file = f
break
if (sys_file is None):
print("ERROR\tno test candidates in /sys exist")
sys.exit(1)
problem_ct = 0
for f in ("/dev/mem", "/proc/kcore", sys_file):
try:
open(f, "rb").read(1)
print("RISK\t%s: read allowed" % f)
problem_ct += 1
except PermissionError:
print("SAFE\t%s: read not allowed" % f)
except OSError as x:
print("ERROR\t%s: exception: %s" % (f, x))
problem_ct += 1
sys.exit(problem_ct != 0)
|
#!/usr/bin/env python3
import os.path
import sys
# File in /sys seem to vary between Linux systems. Thus, try a few candidates
# and use the first one that exists. What we want is any file under /sys with
# permissions root:root -rw-------.
sys_file = None
for f in ("/sys/devices/cpu/rdpmc",
"/sys/kernel/mm/page_idle/bitmap",
"/sys/kernel/slab/request_sock_TCP/red_zone",
"sys/kernel/debug/kprobes/enabled"):
if (os.path.exists(f)):
sys_file = f
break
if (sys_file is None):
print("ERROR\tno test candidates in /sys exist")
sys.exit(1)
problem_ct = 0
for f in ("/dev/mem", "/proc/kcore", sys_file):
try:
open(f, "rb").read(1)
print("RISK\t%s: read allowed" % f)
problem_ct += 1
except PermissionError:
print("SAFE\t%s: read not allowed" % f)
except OSError as x:
print("ERROR\t%s: exception: %s" % (f, x))
problem_ct += 1
sys.exit(problem_ct != 0)
|
Add a further path inside /sys to test
|
Add a further path inside /sys to test
On (at least) a Debian "stretch" system, the charliecloud image contains
none of the tested paths inside /sys. This patch adds one that does
exist there.
Signed-off-by: Matthew Vernon <d2337109245c21c6e400ba5f0470cfb01956d9f2@sanger.ac.uk>
|
Python
|
apache-2.0
|
hpc/charliecloud,hpc/charliecloud,hpc/charliecloud
|
#!/usr/bin/env python3
import os.path
import sys
# File in /sys seem to vary between Linux systems. Thus, try a few candidates
# and use the first one that exists. What we want is any file under /sys with
# permissions root:root -rw-------.
sys_file = None
for f in ("/sys/devices/cpu/rdpmc",
"/sys/kernel/mm/page_idle/bitmap",
"/sys/kernel/slab/request_sock_TCP/red_zone"):
if (os.path.exists(f)):
sys_file = f
break
if (sys_file is None):
print("ERROR\tno test candidates in /sys exist")
sys.exit(1)
problem_ct = 0
for f in ("/dev/mem", "/proc/kcore", sys_file):
try:
open(f, "rb").read(1)
print("RISK\t%s: read allowed" % f)
problem_ct += 1
except PermissionError:
print("SAFE\t%s: read not allowed" % f)
except OSError as x:
print("ERROR\t%s: exception: %s" % (f, x))
problem_ct += 1
sys.exit(problem_ct != 0)
Add a further path inside /sys to test
On (at least) a Debian "stretch" system, the charliecloud image contains
none of the tested paths inside /sys. This patch adds one that does
exist there.
Signed-off-by: Matthew Vernon <d2337109245c21c6e400ba5f0470cfb01956d9f2@sanger.ac.uk>
|
#!/usr/bin/env python3
import os.path
import sys
# File in /sys seem to vary between Linux systems. Thus, try a few candidates
# and use the first one that exists. What we want is any file under /sys with
# permissions root:root -rw-------.
sys_file = None
for f in ("/sys/devices/cpu/rdpmc",
"/sys/kernel/mm/page_idle/bitmap",
"/sys/kernel/slab/request_sock_TCP/red_zone",
"sys/kernel/debug/kprobes/enabled"):
if (os.path.exists(f)):
sys_file = f
break
if (sys_file is None):
print("ERROR\tno test candidates in /sys exist")
sys.exit(1)
problem_ct = 0
for f in ("/dev/mem", "/proc/kcore", sys_file):
try:
open(f, "rb").read(1)
print("RISK\t%s: read allowed" % f)
problem_ct += 1
except PermissionError:
print("SAFE\t%s: read not allowed" % f)
except OSError as x:
print("ERROR\t%s: exception: %s" % (f, x))
problem_ct += 1
sys.exit(problem_ct != 0)
|
<commit_before>#!/usr/bin/env python3
import os.path
import sys
# File in /sys seem to vary between Linux systems. Thus, try a few candidates
# and use the first one that exists. What we want is any file under /sys with
# permissions root:root -rw-------.
sys_file = None
for f in ("/sys/devices/cpu/rdpmc",
"/sys/kernel/mm/page_idle/bitmap",
"/sys/kernel/slab/request_sock_TCP/red_zone"):
if (os.path.exists(f)):
sys_file = f
break
if (sys_file is None):
print("ERROR\tno test candidates in /sys exist")
sys.exit(1)
problem_ct = 0
for f in ("/dev/mem", "/proc/kcore", sys_file):
try:
open(f, "rb").read(1)
print("RISK\t%s: read allowed" % f)
problem_ct += 1
except PermissionError:
print("SAFE\t%s: read not allowed" % f)
except OSError as x:
print("ERROR\t%s: exception: %s" % (f, x))
problem_ct += 1
sys.exit(problem_ct != 0)
<commit_msg>Add a further path inside /sys to test
On (at least) a Debian "stretch" system, the charliecloud image contains
none of the tested paths inside /sys. This patch adds one that does
exist there.
Signed-off-by: Matthew Vernon <d2337109245c21c6e400ba5f0470cfb01956d9f2@sanger.ac.uk><commit_after>
|
#!/usr/bin/env python3
import os.path
import sys
# File in /sys seem to vary between Linux systems. Thus, try a few candidates
# and use the first one that exists. What we want is any file under /sys with
# permissions root:root -rw-------.
sys_file = None
for f in ("/sys/devices/cpu/rdpmc",
"/sys/kernel/mm/page_idle/bitmap",
"/sys/kernel/slab/request_sock_TCP/red_zone",
"sys/kernel/debug/kprobes/enabled"):
if (os.path.exists(f)):
sys_file = f
break
if (sys_file is None):
print("ERROR\tno test candidates in /sys exist")
sys.exit(1)
problem_ct = 0
for f in ("/dev/mem", "/proc/kcore", sys_file):
try:
open(f, "rb").read(1)
print("RISK\t%s: read allowed" % f)
problem_ct += 1
except PermissionError:
print("SAFE\t%s: read not allowed" % f)
except OSError as x:
print("ERROR\t%s: exception: %s" % (f, x))
problem_ct += 1
sys.exit(problem_ct != 0)
|
#!/usr/bin/env python3
import os.path
import sys
# File in /sys seem to vary between Linux systems. Thus, try a few candidates
# and use the first one that exists. What we want is any file under /sys with
# permissions root:root -rw-------.
sys_file = None
for f in ("/sys/devices/cpu/rdpmc",
"/sys/kernel/mm/page_idle/bitmap",
"/sys/kernel/slab/request_sock_TCP/red_zone"):
if (os.path.exists(f)):
sys_file = f
break
if (sys_file is None):
print("ERROR\tno test candidates in /sys exist")
sys.exit(1)
problem_ct = 0
for f in ("/dev/mem", "/proc/kcore", sys_file):
try:
open(f, "rb").read(1)
print("RISK\t%s: read allowed" % f)
problem_ct += 1
except PermissionError:
print("SAFE\t%s: read not allowed" % f)
except OSError as x:
print("ERROR\t%s: exception: %s" % (f, x))
problem_ct += 1
sys.exit(problem_ct != 0)
Add a further path inside /sys to test
On (at least) a Debian "stretch" system, the charliecloud image contains
none of the tested paths inside /sys. This patch adds one that does
exist there.
Signed-off-by: Matthew Vernon <d2337109245c21c6e400ba5f0470cfb01956d9f2@sanger.ac.uk>#!/usr/bin/env python3
import os.path
import sys
# File in /sys seem to vary between Linux systems. Thus, try a few candidates
# and use the first one that exists. What we want is any file under /sys with
# permissions root:root -rw-------.
sys_file = None
for f in ("/sys/devices/cpu/rdpmc",
"/sys/kernel/mm/page_idle/bitmap",
"/sys/kernel/slab/request_sock_TCP/red_zone",
"sys/kernel/debug/kprobes/enabled"):
if (os.path.exists(f)):
sys_file = f
break
if (sys_file is None):
print("ERROR\tno test candidates in /sys exist")
sys.exit(1)
problem_ct = 0
for f in ("/dev/mem", "/proc/kcore", sys_file):
try:
open(f, "rb").read(1)
print("RISK\t%s: read allowed" % f)
problem_ct += 1
except PermissionError:
print("SAFE\t%s: read not allowed" % f)
except OSError as x:
print("ERROR\t%s: exception: %s" % (f, x))
problem_ct += 1
sys.exit(problem_ct != 0)
|
<commit_before>#!/usr/bin/env python3
import os.path
import sys
# File in /sys seem to vary between Linux systems. Thus, try a few candidates
# and use the first one that exists. What we want is any file under /sys with
# permissions root:root -rw-------.
sys_file = None
for f in ("/sys/devices/cpu/rdpmc",
"/sys/kernel/mm/page_idle/bitmap",
"/sys/kernel/slab/request_sock_TCP/red_zone"):
if (os.path.exists(f)):
sys_file = f
break
if (sys_file is None):
print("ERROR\tno test candidates in /sys exist")
sys.exit(1)
problem_ct = 0
for f in ("/dev/mem", "/proc/kcore", sys_file):
try:
open(f, "rb").read(1)
print("RISK\t%s: read allowed" % f)
problem_ct += 1
except PermissionError:
print("SAFE\t%s: read not allowed" % f)
except OSError as x:
print("ERROR\t%s: exception: %s" % (f, x))
problem_ct += 1
sys.exit(problem_ct != 0)
<commit_msg>Add a further path inside /sys to test
On (at least) a Debian "stretch" system, the charliecloud image contains
none of the tested paths inside /sys. This patch adds one that does
exist there.
Signed-off-by: Matthew Vernon <d2337109245c21c6e400ba5f0470cfb01956d9f2@sanger.ac.uk><commit_after>#!/usr/bin/env python3
import os.path
import sys
# File in /sys seem to vary between Linux systems. Thus, try a few candidates
# and use the first one that exists. What we want is any file under /sys with
# permissions root:root -rw-------.
sys_file = None
for f in ("/sys/devices/cpu/rdpmc",
"/sys/kernel/mm/page_idle/bitmap",
"/sys/kernel/slab/request_sock_TCP/red_zone",
"sys/kernel/debug/kprobes/enabled"):
if (os.path.exists(f)):
sys_file = f
break
if (sys_file is None):
print("ERROR\tno test candidates in /sys exist")
sys.exit(1)
problem_ct = 0
for f in ("/dev/mem", "/proc/kcore", sys_file):
try:
open(f, "rb").read(1)
print("RISK\t%s: read allowed" % f)
problem_ct += 1
except PermissionError:
print("SAFE\t%s: read not allowed" % f)
except OSError as x:
print("ERROR\t%s: exception: %s" % (f, x))
problem_ct += 1
sys.exit(problem_ct != 0)
|
9d6dcda52e2cde4ee4788008051a53f301335f70
|
Lib/test/test_smtpnet.py
|
Lib/test/test_smtpnet.py
|
#!/usr/bin/env python
import unittest
from test import test_support
import smtplib
test_support.requires("network")
class SmtpSSLTest(unittest.TestCase):
testServer = 'smtp.gmail.com'
remotePort = 465
def test_connect(self):
test_support.get_attribute(smtplib, 'SMTP_SSLX')
server = smtplib.SMTP_SSL(self.testServer, self.remotePort)
server.ehlo()
server.quit()
def test_main():
test_support.run_unittest(SmtpSSLTest)
if __name__ == "__main__":
test_main()
|
#!/usr/bin/env python
import unittest
from test import test_support
import smtplib
test_support.requires("network")
class SmtpSSLTest(unittest.TestCase):
testServer = 'smtp.gmail.com'
remotePort = 465
def test_connect(self):
test_support.get_attribute(smtplib, 'SMTP_SSL')
server = smtplib.SMTP_SSL(self.testServer, self.remotePort)
server.ehlo()
server.quit()
def test_main():
test_support.run_unittest(SmtpSSLTest)
if __name__ == "__main__":
test_main()
|
Fix spelling left over from testing.
|
Fix spelling left over from testing.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
#!/usr/bin/env python
import unittest
from test import test_support
import smtplib
test_support.requires("network")
class SmtpSSLTest(unittest.TestCase):
testServer = 'smtp.gmail.com'
remotePort = 465
def test_connect(self):
test_support.get_attribute(smtplib, 'SMTP_SSLX')
server = smtplib.SMTP_SSL(self.testServer, self.remotePort)
server.ehlo()
server.quit()
def test_main():
test_support.run_unittest(SmtpSSLTest)
if __name__ == "__main__":
test_main()
Fix spelling left over from testing.
|
#!/usr/bin/env python
import unittest
from test import test_support
import smtplib
test_support.requires("network")
class SmtpSSLTest(unittest.TestCase):
testServer = 'smtp.gmail.com'
remotePort = 465
def test_connect(self):
test_support.get_attribute(smtplib, 'SMTP_SSL')
server = smtplib.SMTP_SSL(self.testServer, self.remotePort)
server.ehlo()
server.quit()
def test_main():
test_support.run_unittest(SmtpSSLTest)
if __name__ == "__main__":
test_main()
|
<commit_before>#!/usr/bin/env python
import unittest
from test import test_support
import smtplib
test_support.requires("network")
class SmtpSSLTest(unittest.TestCase):
testServer = 'smtp.gmail.com'
remotePort = 465
def test_connect(self):
test_support.get_attribute(smtplib, 'SMTP_SSLX')
server = smtplib.SMTP_SSL(self.testServer, self.remotePort)
server.ehlo()
server.quit()
def test_main():
test_support.run_unittest(SmtpSSLTest)
if __name__ == "__main__":
test_main()
<commit_msg>Fix spelling left over from testing.<commit_after>
|
#!/usr/bin/env python
import unittest
from test import test_support
import smtplib
test_support.requires("network")
class SmtpSSLTest(unittest.TestCase):
testServer = 'smtp.gmail.com'
remotePort = 465
def test_connect(self):
test_support.get_attribute(smtplib, 'SMTP_SSL')
server = smtplib.SMTP_SSL(self.testServer, self.remotePort)
server.ehlo()
server.quit()
def test_main():
test_support.run_unittest(SmtpSSLTest)
if __name__ == "__main__":
test_main()
|
#!/usr/bin/env python
import unittest
from test import test_support
import smtplib
test_support.requires("network")
class SmtpSSLTest(unittest.TestCase):
testServer = 'smtp.gmail.com'
remotePort = 465
def test_connect(self):
test_support.get_attribute(smtplib, 'SMTP_SSLX')
server = smtplib.SMTP_SSL(self.testServer, self.remotePort)
server.ehlo()
server.quit()
def test_main():
test_support.run_unittest(SmtpSSLTest)
if __name__ == "__main__":
test_main()
Fix spelling left over from testing.#!/usr/bin/env python
import unittest
from test import test_support
import smtplib
test_support.requires("network")
class SmtpSSLTest(unittest.TestCase):
testServer = 'smtp.gmail.com'
remotePort = 465
def test_connect(self):
test_support.get_attribute(smtplib, 'SMTP_SSL')
server = smtplib.SMTP_SSL(self.testServer, self.remotePort)
server.ehlo()
server.quit()
def test_main():
test_support.run_unittest(SmtpSSLTest)
if __name__ == "__main__":
test_main()
|
<commit_before>#!/usr/bin/env python
import unittest
from test import test_support
import smtplib
test_support.requires("network")
class SmtpSSLTest(unittest.TestCase):
testServer = 'smtp.gmail.com'
remotePort = 465
def test_connect(self):
test_support.get_attribute(smtplib, 'SMTP_SSLX')
server = smtplib.SMTP_SSL(self.testServer, self.remotePort)
server.ehlo()
server.quit()
def test_main():
test_support.run_unittest(SmtpSSLTest)
if __name__ == "__main__":
test_main()
<commit_msg>Fix spelling left over from testing.<commit_after>#!/usr/bin/env python
import unittest
from test import test_support
import smtplib
test_support.requires("network")
class SmtpSSLTest(unittest.TestCase):
testServer = 'smtp.gmail.com'
remotePort = 465
def test_connect(self):
test_support.get_attribute(smtplib, 'SMTP_SSL')
server = smtplib.SMTP_SSL(self.testServer, self.remotePort)
server.ehlo()
server.quit()
def test_main():
test_support.run_unittest(SmtpSSLTest)
if __name__ == "__main__":
test_main()
|
607065281f31ef690a50cf011b6e142891bbd6ff
|
storage.py
|
storage.py
|
from datetime import datetime
from tinydb import TinyDB, Query
from tinydb_serialization import Serializer, SerializationMiddleware
class DateTimeSerializer(Serializer):
OBJ_CLASS = datetime # The class this serializer handles
def encode(self, obj):
return obj.strftime('%Y-%m-%dT%H:%M:%S')
def decode(self, s):
return datetime.strptime(s, '%Y-%m-%dT%H:%M:%S')
class Storage:
def __init__(self):
serialization = SerializationMiddleware()
serialization.register_serializer(DateTimeSerializer(), 'TinyDate')
self.db = TinyDB('db.json', storage=serialization)
self.users = self.db.table('users')
def get_user(self, id, default):
if default is None:
default = {'id': id, 'name': '', 'last_report': None}
query = Query()
user = self.users.get(query.id == id)
if not user:
user = default
self.users.insert(user)
return user
def get_users_for_daily_meeting(self):
query = Query()
return self.users.search(
(~ query.last_report.exists()) |
(query.last_report.test(lambda d:
d and
d.date() < datetime.today().date()))
)
def save_user(self, user):
if not user and not 'id' in user:
raise Exception("Not a valid user")
query = Query()
self.users.update(user, query.id == user['id'])
|
from datetime import datetime
from tinydb import TinyDB, Query
from tinydb_serialization import Serializer, SerializationMiddleware
class DateTimeSerializer(Serializer):
OBJ_CLASS = datetime # The class this serializer handles
def encode(self, obj):
return obj.strftime('%Y-%m-%dT%H:%M:%S')
def decode(self, s):
return datetime.strptime(s, '%Y-%m-%dT%H:%M:%S')
class Storage:
def __init__(self):
serialization = SerializationMiddleware()
serialization.register_serializer(DateTimeSerializer(), 'TinyDate')
self.db = TinyDB('db.json', storage=serialization)
self.users = self.db.table('users')
def get_user(self, id, default):
if default is None:
default = {'id': id, 'name': '', 'last_report': None}
query = Query()
user = self.users.get(query.id == id)
if not user:
user = default
self.users.insert(user)
return user
def get_users_for_daily_meeting(self):
query = Query()
return self.users.search(
(~ query.last_report.exists()) |
(query.last_report.test(lambda d:
not d or
d.date() < datetime.today().date()))
)
def save_user(self, user):
if not user and not 'id' in user:
raise Exception("Not a valid user")
query = Query()
self.users.update(user, query.id == user['id'])
|
Fix users retrieval for daily meeting
|
Fix users retrieval for daily meeting
|
Python
|
mit
|
andreldm/slack-daily-meeting-bot,andreldm/slack-daily-meeting-bot
|
from datetime import datetime
from tinydb import TinyDB, Query
from tinydb_serialization import Serializer, SerializationMiddleware
class DateTimeSerializer(Serializer):
OBJ_CLASS = datetime # The class this serializer handles
def encode(self, obj):
return obj.strftime('%Y-%m-%dT%H:%M:%S')
def decode(self, s):
return datetime.strptime(s, '%Y-%m-%dT%H:%M:%S')
class Storage:
def __init__(self):
serialization = SerializationMiddleware()
serialization.register_serializer(DateTimeSerializer(), 'TinyDate')
self.db = TinyDB('db.json', storage=serialization)
self.users = self.db.table('users')
def get_user(self, id, default):
if default is None:
default = {'id': id, 'name': '', 'last_report': None}
query = Query()
user = self.users.get(query.id == id)
if not user:
user = default
self.users.insert(user)
return user
def get_users_for_daily_meeting(self):
query = Query()
return self.users.search(
(~ query.last_report.exists()) |
(query.last_report.test(lambda d:
d and
d.date() < datetime.today().date()))
)
def save_user(self, user):
if not user and not 'id' in user:
raise Exception("Not a valid user")
query = Query()
self.users.update(user, query.id == user['id'])
Fix users retrieval for daily meeting
|
from datetime import datetime
from tinydb import TinyDB, Query
from tinydb_serialization import Serializer, SerializationMiddleware
class DateTimeSerializer(Serializer):
OBJ_CLASS = datetime # The class this serializer handles
def encode(self, obj):
return obj.strftime('%Y-%m-%dT%H:%M:%S')
def decode(self, s):
return datetime.strptime(s, '%Y-%m-%dT%H:%M:%S')
class Storage:
def __init__(self):
serialization = SerializationMiddleware()
serialization.register_serializer(DateTimeSerializer(), 'TinyDate')
self.db = TinyDB('db.json', storage=serialization)
self.users = self.db.table('users')
def get_user(self, id, default):
if default is None:
default = {'id': id, 'name': '', 'last_report': None}
query = Query()
user = self.users.get(query.id == id)
if not user:
user = default
self.users.insert(user)
return user
def get_users_for_daily_meeting(self):
query = Query()
return self.users.search(
(~ query.last_report.exists()) |
(query.last_report.test(lambda d:
not d or
d.date() < datetime.today().date()))
)
def save_user(self, user):
if not user and not 'id' in user:
raise Exception("Not a valid user")
query = Query()
self.users.update(user, query.id == user['id'])
|
<commit_before>from datetime import datetime
from tinydb import TinyDB, Query
from tinydb_serialization import Serializer, SerializationMiddleware
class DateTimeSerializer(Serializer):
OBJ_CLASS = datetime # The class this serializer handles
def encode(self, obj):
return obj.strftime('%Y-%m-%dT%H:%M:%S')
def decode(self, s):
return datetime.strptime(s, '%Y-%m-%dT%H:%M:%S')
class Storage:
def __init__(self):
serialization = SerializationMiddleware()
serialization.register_serializer(DateTimeSerializer(), 'TinyDate')
self.db = TinyDB('db.json', storage=serialization)
self.users = self.db.table('users')
def get_user(self, id, default):
if default is None:
default = {'id': id, 'name': '', 'last_report': None}
query = Query()
user = self.users.get(query.id == id)
if not user:
user = default
self.users.insert(user)
return user
def get_users_for_daily_meeting(self):
query = Query()
return self.users.search(
(~ query.last_report.exists()) |
(query.last_report.test(lambda d:
d and
d.date() < datetime.today().date()))
)
def save_user(self, user):
if not user and not 'id' in user:
raise Exception("Not a valid user")
query = Query()
self.users.update(user, query.id == user['id'])
<commit_msg>Fix users retrieval for daily meeting<commit_after>
|
from datetime import datetime
from tinydb import TinyDB, Query
from tinydb_serialization import Serializer, SerializationMiddleware
class DateTimeSerializer(Serializer):
OBJ_CLASS = datetime # The class this serializer handles
def encode(self, obj):
return obj.strftime('%Y-%m-%dT%H:%M:%S')
def decode(self, s):
return datetime.strptime(s, '%Y-%m-%dT%H:%M:%S')
class Storage:
def __init__(self):
serialization = SerializationMiddleware()
serialization.register_serializer(DateTimeSerializer(), 'TinyDate')
self.db = TinyDB('db.json', storage=serialization)
self.users = self.db.table('users')
def get_user(self, id, default):
if default is None:
default = {'id': id, 'name': '', 'last_report': None}
query = Query()
user = self.users.get(query.id == id)
if not user:
user = default
self.users.insert(user)
return user
def get_users_for_daily_meeting(self):
query = Query()
return self.users.search(
(~ query.last_report.exists()) |
(query.last_report.test(lambda d:
not d or
d.date() < datetime.today().date()))
)
def save_user(self, user):
if not user and not 'id' in user:
raise Exception("Not a valid user")
query = Query()
self.users.update(user, query.id == user['id'])
|
from datetime import datetime
from tinydb import TinyDB, Query
from tinydb_serialization import Serializer, SerializationMiddleware
class DateTimeSerializer(Serializer):
OBJ_CLASS = datetime # The class this serializer handles
def encode(self, obj):
return obj.strftime('%Y-%m-%dT%H:%M:%S')
def decode(self, s):
return datetime.strptime(s, '%Y-%m-%dT%H:%M:%S')
class Storage:
def __init__(self):
serialization = SerializationMiddleware()
serialization.register_serializer(DateTimeSerializer(), 'TinyDate')
self.db = TinyDB('db.json', storage=serialization)
self.users = self.db.table('users')
def get_user(self, id, default):
if default is None:
default = {'id': id, 'name': '', 'last_report': None}
query = Query()
user = self.users.get(query.id == id)
if not user:
user = default
self.users.insert(user)
return user
def get_users_for_daily_meeting(self):
query = Query()
return self.users.search(
(~ query.last_report.exists()) |
(query.last_report.test(lambda d:
d and
d.date() < datetime.today().date()))
)
def save_user(self, user):
if not user and not 'id' in user:
raise Exception("Not a valid user")
query = Query()
self.users.update(user, query.id == user['id'])
Fix users retrieval for daily meetingfrom datetime import datetime
from tinydb import TinyDB, Query
from tinydb_serialization import Serializer, SerializationMiddleware
class DateTimeSerializer(Serializer):
OBJ_CLASS = datetime # The class this serializer handles
def encode(self, obj):
return obj.strftime('%Y-%m-%dT%H:%M:%S')
def decode(self, s):
return datetime.strptime(s, '%Y-%m-%dT%H:%M:%S')
class Storage:
def __init__(self):
serialization = SerializationMiddleware()
serialization.register_serializer(DateTimeSerializer(), 'TinyDate')
self.db = TinyDB('db.json', storage=serialization)
self.users = self.db.table('users')
def get_user(self, id, default):
if default is None:
default = {'id': id, 'name': '', 'last_report': None}
query = Query()
user = self.users.get(query.id == id)
if not user:
user = default
self.users.insert(user)
return user
def get_users_for_daily_meeting(self):
query = Query()
return self.users.search(
(~ query.last_report.exists()) |
(query.last_report.test(lambda d:
not d or
d.date() < datetime.today().date()))
)
def save_user(self, user):
if not user and not 'id' in user:
raise Exception("Not a valid user")
query = Query()
self.users.update(user, query.id == user['id'])
|
<commit_before>from datetime import datetime
from tinydb import TinyDB, Query
from tinydb_serialization import Serializer, SerializationMiddleware
class DateTimeSerializer(Serializer):
OBJ_CLASS = datetime # The class this serializer handles
def encode(self, obj):
return obj.strftime('%Y-%m-%dT%H:%M:%S')
def decode(self, s):
return datetime.strptime(s, '%Y-%m-%dT%H:%M:%S')
class Storage:
def __init__(self):
serialization = SerializationMiddleware()
serialization.register_serializer(DateTimeSerializer(), 'TinyDate')
self.db = TinyDB('db.json', storage=serialization)
self.users = self.db.table('users')
def get_user(self, id, default):
if default is None:
default = {'id': id, 'name': '', 'last_report': None}
query = Query()
user = self.users.get(query.id == id)
if not user:
user = default
self.users.insert(user)
return user
def get_users_for_daily_meeting(self):
query = Query()
return self.users.search(
(~ query.last_report.exists()) |
(query.last_report.test(lambda d:
d and
d.date() < datetime.today().date()))
)
def save_user(self, user):
if not user and not 'id' in user:
raise Exception("Not a valid user")
query = Query()
self.users.update(user, query.id == user['id'])
<commit_msg>Fix users retrieval for daily meeting<commit_after>from datetime import datetime
from tinydb import TinyDB, Query
from tinydb_serialization import Serializer, SerializationMiddleware
class DateTimeSerializer(Serializer):
OBJ_CLASS = datetime # The class this serializer handles
def encode(self, obj):
return obj.strftime('%Y-%m-%dT%H:%M:%S')
def decode(self, s):
return datetime.strptime(s, '%Y-%m-%dT%H:%M:%S')
class Storage:
def __init__(self):
serialization = SerializationMiddleware()
serialization.register_serializer(DateTimeSerializer(), 'TinyDate')
self.db = TinyDB('db.json', storage=serialization)
self.users = self.db.table('users')
def get_user(self, id, default):
if default is None:
default = {'id': id, 'name': '', 'last_report': None}
query = Query()
user = self.users.get(query.id == id)
if not user:
user = default
self.users.insert(user)
return user
def get_users_for_daily_meeting(self):
query = Query()
return self.users.search(
(~ query.last_report.exists()) |
(query.last_report.test(lambda d:
not d or
d.date() < datetime.today().date()))
)
def save_user(self, user):
if not user and not 'id' in user:
raise Exception("Not a valid user")
query = Query()
self.users.update(user, query.id == user['id'])
|
e7bc0b942ef3bdc85d6dbd360e3f012c1957d36f
|
src/aka.py
|
src/aka.py
|
#!/usr/bin/env python
import json
import os
import sys
def raw_aliases():
'''
Reads in the aliases file as a Python object
'''
with open(os.environ['AKA_ALIASES'], 'r') as f:
return json.loads(f.read())
def make_lookup(alias_object, current_path=[], current_command=[], result={}):
'''
Given a raw alias object, make it easily searchable
'''
def simple_lookup(command_pieces):
return lambda _: ' '.join(command_pieces)
for alias in alias_object:
p = current_path + [alias['token']]
c = current_command + [alias['command']]
result[tuple(p)] = simple_lookup(c)
make_lookup(alias.get('branches', []), p, c, result)
return result
def chopback_lookup(alias_dict, args):
i = len(args)
while i != 0:
seek = tuple(args[:i])
if seek in alias_dict:
return alias_dict[seek](args[i:])
return None
lookup = make_lookup(raw_aliases())
try:
command = chopback_lookup(lookup, sys.argv[1:])
os.system(command)
except KeyError:
print 'Couldn\'t find alias for', (' '.join(sys.argv[1:]) or 'the absence of a pattern')
|
#!/usr/bin/env python
import json
import os
import sys
def raw_aliases():
'''
Reads in the aliases file as a Python object
'''
with open(os.environ['AKA_ALIASES'], 'r') as f:
return json.loads(f.read())
def make_lookup(alias_object, current_path=[], current_command=[], result={}):
'''
Given a raw alias object, make it easily searchable
'''
def simple_lookup(command_pieces):
return lambda args: ' '.join(command_pieces + args)
for alias in alias_object:
p = current_path + [alias['token']]
c = current_command + [alias['command']]
result[tuple(p)] = simple_lookup(c)
make_lookup(alias.get('branches', []), p, c, result)
return result
def chopback_lookup(alias_dict, args):
for i in xrange(len(alias_dict), -1, -1):
seek = tuple(args[:i])
if seek in alias_dict:
return alias_dict[seek](args[i:])
return None
if __name__ == '__main__':
lookup = make_lookup(raw_aliases())
command = chopback_lookup(lookup, sys.argv[1:])
if command:
os.system(command)
else:
print 'Couldn\'t find alias for', (' '.join(sys.argv[1:]) or 'the absence of a pattern')
|
Fix error, made command assume extras are params for now
|
Fix error, made command assume extras are params for now
|
Python
|
mit
|
mjgpy3/aka,mjgpy3/aka
|
#!/usr/bin/env python
import json
import os
import sys
def raw_aliases():
'''
Reads in the aliases file as a Python object
'''
with open(os.environ['AKA_ALIASES'], 'r') as f:
return json.loads(f.read())
def make_lookup(alias_object, current_path=[], current_command=[], result={}):
'''
Given a raw alias object, make it easily searchable
'''
def simple_lookup(command_pieces):
return lambda _: ' '.join(command_pieces)
for alias in alias_object:
p = current_path + [alias['token']]
c = current_command + [alias['command']]
result[tuple(p)] = simple_lookup(c)
make_lookup(alias.get('branches', []), p, c, result)
return result
def chopback_lookup(alias_dict, args):
i = len(args)
while i != 0:
seek = tuple(args[:i])
if seek in alias_dict:
return alias_dict[seek](args[i:])
return None
lookup = make_lookup(raw_aliases())
try:
command = chopback_lookup(lookup, sys.argv[1:])
os.system(command)
except KeyError:
print 'Couldn\'t find alias for', (' '.join(sys.argv[1:]) or 'the absence of a pattern')
Fix error, made command assume extras are params for now
|
#!/usr/bin/env python
import json
import os
import sys
def raw_aliases():
'''
Reads in the aliases file as a Python object
'''
with open(os.environ['AKA_ALIASES'], 'r') as f:
return json.loads(f.read())
def make_lookup(alias_object, current_path=[], current_command=[], result={}):
'''
Given a raw alias object, make it easily searchable
'''
def simple_lookup(command_pieces):
return lambda args: ' '.join(command_pieces + args)
for alias in alias_object:
p = current_path + [alias['token']]
c = current_command + [alias['command']]
result[tuple(p)] = simple_lookup(c)
make_lookup(alias.get('branches', []), p, c, result)
return result
def chopback_lookup(alias_dict, args):
for i in xrange(len(alias_dict), -1, -1):
seek = tuple(args[:i])
if seek in alias_dict:
return alias_dict[seek](args[i:])
return None
if __name__ == '__main__':
lookup = make_lookup(raw_aliases())
command = chopback_lookup(lookup, sys.argv[1:])
if command:
os.system(command)
else:
print 'Couldn\'t find alias for', (' '.join(sys.argv[1:]) or 'the absence of a pattern')
|
<commit_before>#!/usr/bin/env python
import json
import os
import sys
def raw_aliases():
'''
Reads in the aliases file as a Python object
'''
with open(os.environ['AKA_ALIASES'], 'r') as f:
return json.loads(f.read())
def make_lookup(alias_object, current_path=[], current_command=[], result={}):
'''
Given a raw alias object, make it easily searchable
'''
def simple_lookup(command_pieces):
return lambda _: ' '.join(command_pieces)
for alias in alias_object:
p = current_path + [alias['token']]
c = current_command + [alias['command']]
result[tuple(p)] = simple_lookup(c)
make_lookup(alias.get('branches', []), p, c, result)
return result
def chopback_lookup(alias_dict, args):
i = len(args)
while i != 0:
seek = tuple(args[:i])
if seek in alias_dict:
return alias_dict[seek](args[i:])
return None
lookup = make_lookup(raw_aliases())
try:
command = chopback_lookup(lookup, sys.argv[1:])
os.system(command)
except KeyError:
print 'Couldn\'t find alias for', (' '.join(sys.argv[1:]) or 'the absence of a pattern')
<commit_msg>Fix error, made command assume extras are params for now<commit_after>
|
#!/usr/bin/env python
import json
import os
import sys
def raw_aliases():
'''
Reads in the aliases file as a Python object
'''
with open(os.environ['AKA_ALIASES'], 'r') as f:
return json.loads(f.read())
def make_lookup(alias_object, current_path=[], current_command=[], result={}):
'''
Given a raw alias object, make it easily searchable
'''
def simple_lookup(command_pieces):
return lambda args: ' '.join(command_pieces + args)
for alias in alias_object:
p = current_path + [alias['token']]
c = current_command + [alias['command']]
result[tuple(p)] = simple_lookup(c)
make_lookup(alias.get('branches', []), p, c, result)
return result
def chopback_lookup(alias_dict, args):
for i in xrange(len(alias_dict), -1, -1):
seek = tuple(args[:i])
if seek in alias_dict:
return alias_dict[seek](args[i:])
return None
if __name__ == '__main__':
lookup = make_lookup(raw_aliases())
command = chopback_lookup(lookup, sys.argv[1:])
if command:
os.system(command)
else:
print 'Couldn\'t find alias for', (' '.join(sys.argv[1:]) or 'the absence of a pattern')
|
#!/usr/bin/env python
import json
import os
import sys
def raw_aliases():
'''
Reads in the aliases file as a Python object
'''
with open(os.environ['AKA_ALIASES'], 'r') as f:
return json.loads(f.read())
def make_lookup(alias_object, current_path=[], current_command=[], result={}):
'''
Given a raw alias object, make it easily searchable
'''
def simple_lookup(command_pieces):
return lambda _: ' '.join(command_pieces)
for alias in alias_object:
p = current_path + [alias['token']]
c = current_command + [alias['command']]
result[tuple(p)] = simple_lookup(c)
make_lookup(alias.get('branches', []), p, c, result)
return result
def chopback_lookup(alias_dict, args):
i = len(args)
while i != 0:
seek = tuple(args[:i])
if seek in alias_dict:
return alias_dict[seek](args[i:])
return None
lookup = make_lookup(raw_aliases())
try:
command = chopback_lookup(lookup, sys.argv[1:])
os.system(command)
except KeyError:
print 'Couldn\'t find alias for', (' '.join(sys.argv[1:]) or 'the absence of a pattern')
Fix error, made command assume extras are params for now#!/usr/bin/env python
import json
import os
import sys
def raw_aliases():
'''
Reads in the aliases file as a Python object
'''
with open(os.environ['AKA_ALIASES'], 'r') as f:
return json.loads(f.read())
def make_lookup(alias_object, current_path=[], current_command=[], result={}):
'''
Given a raw alias object, make it easily searchable
'''
def simple_lookup(command_pieces):
return lambda args: ' '.join(command_pieces + args)
for alias in alias_object:
p = current_path + [alias['token']]
c = current_command + [alias['command']]
result[tuple(p)] = simple_lookup(c)
make_lookup(alias.get('branches', []), p, c, result)
return result
def chopback_lookup(alias_dict, args):
for i in xrange(len(alias_dict), -1, -1):
seek = tuple(args[:i])
if seek in alias_dict:
return alias_dict[seek](args[i:])
return None
if __name__ == '__main__':
lookup = make_lookup(raw_aliases())
command = chopback_lookup(lookup, sys.argv[1:])
if command:
os.system(command)
else:
print 'Couldn\'t find alias for', (' '.join(sys.argv[1:]) or 'the absence of a pattern')
|
<commit_before>#!/usr/bin/env python
import json
import os
import sys
def raw_aliases():
'''
Reads in the aliases file as a Python object
'''
with open(os.environ['AKA_ALIASES'], 'r') as f:
return json.loads(f.read())
def make_lookup(alias_object, current_path=[], current_command=[], result={}):
'''
Given a raw alias object, make it easily searchable
'''
def simple_lookup(command_pieces):
return lambda _: ' '.join(command_pieces)
for alias in alias_object:
p = current_path + [alias['token']]
c = current_command + [alias['command']]
result[tuple(p)] = simple_lookup(c)
make_lookup(alias.get('branches', []), p, c, result)
return result
def chopback_lookup(alias_dict, args):
i = len(args)
while i != 0:
seek = tuple(args[:i])
if seek in alias_dict:
return alias_dict[seek](args[i:])
return None
lookup = make_lookup(raw_aliases())
try:
command = chopback_lookup(lookup, sys.argv[1:])
os.system(command)
except KeyError:
print 'Couldn\'t find alias for', (' '.join(sys.argv[1:]) or 'the absence of a pattern')
<commit_msg>Fix error, made command assume extras are params for now<commit_after>#!/usr/bin/env python
import json
import os
import sys
def raw_aliases():
'''
Reads in the aliases file as a Python object
'''
with open(os.environ['AKA_ALIASES'], 'r') as f:
return json.loads(f.read())
def make_lookup(alias_object, current_path=[], current_command=[], result={}):
'''
Given a raw alias object, make it easily searchable
'''
def simple_lookup(command_pieces):
return lambda args: ' '.join(command_pieces + args)
for alias in alias_object:
p = current_path + [alias['token']]
c = current_command + [alias['command']]
result[tuple(p)] = simple_lookup(c)
make_lookup(alias.get('branches', []), p, c, result)
return result
def chopback_lookup(alias_dict, args):
for i in xrange(len(alias_dict), -1, -1):
seek = tuple(args[:i])
if seek in alias_dict:
return alias_dict[seek](args[i:])
return None
if __name__ == '__main__':
lookup = make_lookup(raw_aliases())
command = chopback_lookup(lookup, sys.argv[1:])
if command:
os.system(command)
else:
print 'Couldn\'t find alias for', (' '.join(sys.argv[1:]) or 'the absence of a pattern')
|
900f6e03f03301b51db6fb5855e59c31915b0aa0
|
autotime/__init__.py
|
autotime/__init__.py
|
from __future__ import print_function
try:
from time import monotonic
except ImportError:
from monotonic import monotonic
from IPython.core.magics.execution import _format_time as format_delta
class LineWatcher(object):
"""Class that implements a basic timer.
Notes
-----
* Register the `start` and `stop` methods with the IPython events API.
"""
__slots__ = ['start_time']
def start(self):
self.start_time = monotonic()
def stop(self):
delta = monotonic() - self.start_time
print('time: {}'.format(format_delta(delta)))
timer = LineWatcher()
def load_ipython_extension(ip):
timer.start()
ip.events.register('pre_run_cell', timer.start)
ip.events.register('post_run_cell', timer.stop)
def unload_ipython_extension(ip):
ip.events.unregister('pre_run_cell', timer.start)
ip.events.unregister('post_run_cell', timer.stop)
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
from __future__ import print_function
try:
from time import monotonic
except ImportError:
from monotonic import monotonic
from IPython.core.magics.execution import _format_time as format_delta
class LineWatcher(object):
"""Class that implements a basic timer.
Notes
-----
* Register the `start` and `stop` methods with the IPython events API.
"""
__slots__ = ['start_time']
def start(self):
self.start_time = monotonic()
def stop(self):
delta = monotonic() - self.start_time
print(u'time: {}'.format(format_delta(delta)))
timer = LineWatcher()
def load_ipython_extension(ip):
timer.start()
ip.events.register('pre_run_cell', timer.start)
ip.events.register('post_run_cell', timer.stop)
def unload_ipython_extension(ip):
ip.events.unregister('pre_run_cell', timer.start)
ip.events.unregister('post_run_cell', timer.stop)
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
Fix printing 'time: 303 µs' on py2.7
|
Fix printing 'time: 303 µs' on py2.7
|
Python
|
apache-2.0
|
cpcloud/ipython-autotime
|
from __future__ import print_function
try:
from time import monotonic
except ImportError:
from monotonic import monotonic
from IPython.core.magics.execution import _format_time as format_delta
class LineWatcher(object):
"""Class that implements a basic timer.
Notes
-----
* Register the `start` and `stop` methods with the IPython events API.
"""
__slots__ = ['start_time']
def start(self):
self.start_time = monotonic()
def stop(self):
delta = monotonic() - self.start_time
print('time: {}'.format(format_delta(delta)))
timer = LineWatcher()
def load_ipython_extension(ip):
timer.start()
ip.events.register('pre_run_cell', timer.start)
ip.events.register('post_run_cell', timer.stop)
def unload_ipython_extension(ip):
ip.events.unregister('pre_run_cell', timer.start)
ip.events.unregister('post_run_cell', timer.stop)
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
Fix printing 'time: 303 µs' on py2.7
|
from __future__ import print_function
try:
from time import monotonic
except ImportError:
from monotonic import monotonic
from IPython.core.magics.execution import _format_time as format_delta
class LineWatcher(object):
"""Class that implements a basic timer.
Notes
-----
* Register the `start` and `stop` methods with the IPython events API.
"""
__slots__ = ['start_time']
def start(self):
self.start_time = monotonic()
def stop(self):
delta = monotonic() - self.start_time
print(u'time: {}'.format(format_delta(delta)))
timer = LineWatcher()
def load_ipython_extension(ip):
timer.start()
ip.events.register('pre_run_cell', timer.start)
ip.events.register('post_run_cell', timer.stop)
def unload_ipython_extension(ip):
ip.events.unregister('pre_run_cell', timer.start)
ip.events.unregister('post_run_cell', timer.stop)
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
<commit_before>from __future__ import print_function
try:
from time import monotonic
except ImportError:
from monotonic import monotonic
from IPython.core.magics.execution import _format_time as format_delta
class LineWatcher(object):
"""Class that implements a basic timer.
Notes
-----
* Register the `start` and `stop` methods with the IPython events API.
"""
__slots__ = ['start_time']
def start(self):
self.start_time = monotonic()
def stop(self):
delta = monotonic() - self.start_time
print('time: {}'.format(format_delta(delta)))
timer = LineWatcher()
def load_ipython_extension(ip):
timer.start()
ip.events.register('pre_run_cell', timer.start)
ip.events.register('post_run_cell', timer.stop)
def unload_ipython_extension(ip):
ip.events.unregister('pre_run_cell', timer.start)
ip.events.unregister('post_run_cell', timer.stop)
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
<commit_msg>Fix printing 'time: 303 µs' on py2.7<commit_after>
|
from __future__ import print_function
try:
from time import monotonic
except ImportError:
from monotonic import monotonic
from IPython.core.magics.execution import _format_time as format_delta
class LineWatcher(object):
"""Class that implements a basic timer.
Notes
-----
* Register the `start` and `stop` methods with the IPython events API.
"""
__slots__ = ['start_time']
def start(self):
self.start_time = monotonic()
def stop(self):
delta = monotonic() - self.start_time
print(u'time: {}'.format(format_delta(delta)))
timer = LineWatcher()
def load_ipython_extension(ip):
timer.start()
ip.events.register('pre_run_cell', timer.start)
ip.events.register('post_run_cell', timer.stop)
def unload_ipython_extension(ip):
ip.events.unregister('pre_run_cell', timer.start)
ip.events.unregister('post_run_cell', timer.stop)
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
from __future__ import print_function
try:
from time import monotonic
except ImportError:
from monotonic import monotonic
from IPython.core.magics.execution import _format_time as format_delta
class LineWatcher(object):
"""Class that implements a basic timer.
Notes
-----
* Register the `start` and `stop` methods with the IPython events API.
"""
__slots__ = ['start_time']
def start(self):
self.start_time = monotonic()
def stop(self):
delta = monotonic() - self.start_time
print('time: {}'.format(format_delta(delta)))
timer = LineWatcher()
def load_ipython_extension(ip):
timer.start()
ip.events.register('pre_run_cell', timer.start)
ip.events.register('post_run_cell', timer.stop)
def unload_ipython_extension(ip):
ip.events.unregister('pre_run_cell', timer.start)
ip.events.unregister('post_run_cell', timer.stop)
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
Fix printing 'time: 303 µs' on py2.7from __future__ import print_function
try:
from time import monotonic
except ImportError:
from monotonic import monotonic
from IPython.core.magics.execution import _format_time as format_delta
class LineWatcher(object):
"""Class that implements a basic timer.
Notes
-----
* Register the `start` and `stop` methods with the IPython events API.
"""
__slots__ = ['start_time']
def start(self):
self.start_time = monotonic()
def stop(self):
delta = monotonic() - self.start_time
print(u'time: {}'.format(format_delta(delta)))
timer = LineWatcher()
def load_ipython_extension(ip):
timer.start()
ip.events.register('pre_run_cell', timer.start)
ip.events.register('post_run_cell', timer.stop)
def unload_ipython_extension(ip):
ip.events.unregister('pre_run_cell', timer.start)
ip.events.unregister('post_run_cell', timer.stop)
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
<commit_before>from __future__ import print_function
try:
from time import monotonic
except ImportError:
from monotonic import monotonic
from IPython.core.magics.execution import _format_time as format_delta
class LineWatcher(object):
"""Class that implements a basic timer.
Notes
-----
* Register the `start` and `stop` methods with the IPython events API.
"""
__slots__ = ['start_time']
def start(self):
self.start_time = monotonic()
def stop(self):
delta = monotonic() - self.start_time
print('time: {}'.format(format_delta(delta)))
timer = LineWatcher()
def load_ipython_extension(ip):
timer.start()
ip.events.register('pre_run_cell', timer.start)
ip.events.register('post_run_cell', timer.stop)
def unload_ipython_extension(ip):
ip.events.unregister('pre_run_cell', timer.start)
ip.events.unregister('post_run_cell', timer.stop)
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
<commit_msg>Fix printing 'time: 303 µs' on py2.7<commit_after>from __future__ import print_function
try:
from time import monotonic
except ImportError:
from monotonic import monotonic
from IPython.core.magics.execution import _format_time as format_delta
class LineWatcher(object):
"""Class that implements a basic timer.
Notes
-----
* Register the `start` and `stop` methods with the IPython events API.
"""
__slots__ = ['start_time']
def start(self):
self.start_time = monotonic()
def stop(self):
delta = monotonic() - self.start_time
print(u'time: {}'.format(format_delta(delta)))
timer = LineWatcher()
def load_ipython_extension(ip):
timer.start()
ip.events.register('pre_run_cell', timer.start)
ip.events.register('post_run_cell', timer.stop)
def unload_ipython_extension(ip):
ip.events.unregister('pre_run_cell', timer.start)
ip.events.unregister('post_run_cell', timer.stop)
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
123f6a34d9d423f380254b70a5013c0df592d4b6
|
tests/run_coverage.py
|
tests/run_coverage.py
|
#!/usr/bin/env python
"""Script to collect coverage information on ofStateManager"""
import os
import sys
import inspect
import coverage
import subprocess
def main():
"""Main function"""
arguments = ''
if len(sys.argv) > 1:
arguments = ' '.join(sys.argv[1:])
testdir = os.path.abspath(os.path.dirname(
inspect.getfile(inspect.currentframe())))
os.environ['COVERAGE_PROCESS_START'] = os.path.join(testdir, '.coveragerc')
os.environ['COVERAGE_FILE'] = os.path.join(testdir, '.coverage')
cov = coverage.coverage(source=os.path.join(testdir, '..'),
include=os.path.join(testdir, '..', 'ofStateManager.py'))
cov.erase()
subprocess.call('coverage run -m py.test ' + arguments, shell=True,
cwd=testdir)
cov.combine()
cov.html_report(directory=os.path.join(testdir, 'htmlcov'))
cov.report(show_missing=False)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""Script to collect coverage information on ofStateManager"""
import os
import sys
import inspect
import coverage
import subprocess
def main():
"""Main function"""
arguments = ''
if len(sys.argv) > 1:
arguments = ' '.join(sys.argv[1:])
testdir = os.path.abspath(os.path.dirname(
inspect.getfile(inspect.currentframe())))
script_path = os.path.join(testdir, '..', 'ofStateManager.py')
os.environ['COVERAGE_PROCESS_START'] = os.path.join(testdir, '.coveragerc')
os.environ['COVERAGE_FILE'] = os.path.join(testdir, '.coverage')
subprocess.call('coverage erase', shell=True, cwd=testdir)
subprocess.call('coverage run -m py.test ' + arguments, shell=True, cwd=testdir)
subprocess.call('coverage combine', shell=True, cwd=testdir)
subprocess.call('coverage html -d ' + os.path.join(testdir, 'htmlcov') +
' --include=' + script_path, shell=True, cwd=testdir)
subprocess.call('coverage report --include=' + script_path,
shell=True, cwd=testdir)
if __name__ == '__main__':
main()
|
Replace coverage API calls by subprocess calls.
|
Replace coverage API calls by subprocess calls.
|
Python
|
mit
|
bilderbuchi/ofStateManager
|
#!/usr/bin/env python
"""Script to collect coverage information on ofStateManager"""
import os
import sys
import inspect
import coverage
import subprocess
def main():
"""Main function"""
arguments = ''
if len(sys.argv) > 1:
arguments = ' '.join(sys.argv[1:])
testdir = os.path.abspath(os.path.dirname(
inspect.getfile(inspect.currentframe())))
os.environ['COVERAGE_PROCESS_START'] = os.path.join(testdir, '.coveragerc')
os.environ['COVERAGE_FILE'] = os.path.join(testdir, '.coverage')
cov = coverage.coverage(source=os.path.join(testdir, '..'),
include=os.path.join(testdir, '..', 'ofStateManager.py'))
cov.erase()
subprocess.call('coverage run -m py.test ' + arguments, shell=True,
cwd=testdir)
cov.combine()
cov.html_report(directory=os.path.join(testdir, 'htmlcov'))
cov.report(show_missing=False)
if __name__ == '__main__':
main()
Replace coverage API calls by subprocess calls.
|
#!/usr/bin/env python
"""Script to collect coverage information on ofStateManager"""
import os
import sys
import inspect
import coverage
import subprocess
def main():
"""Main function"""
arguments = ''
if len(sys.argv) > 1:
arguments = ' '.join(sys.argv[1:])
testdir = os.path.abspath(os.path.dirname(
inspect.getfile(inspect.currentframe())))
script_path = os.path.join(testdir, '..', 'ofStateManager.py')
os.environ['COVERAGE_PROCESS_START'] = os.path.join(testdir, '.coveragerc')
os.environ['COVERAGE_FILE'] = os.path.join(testdir, '.coverage')
subprocess.call('coverage erase', shell=True, cwd=testdir)
subprocess.call('coverage run -m py.test ' + arguments, shell=True, cwd=testdir)
subprocess.call('coverage combine', shell=True, cwd=testdir)
subprocess.call('coverage html -d ' + os.path.join(testdir, 'htmlcov') +
' --include=' + script_path, shell=True, cwd=testdir)
subprocess.call('coverage report --include=' + script_path,
shell=True, cwd=testdir)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
"""Script to collect coverage information on ofStateManager"""
import os
import sys
import inspect
import coverage
import subprocess
def main():
"""Main function"""
arguments = ''
if len(sys.argv) > 1:
arguments = ' '.join(sys.argv[1:])
testdir = os.path.abspath(os.path.dirname(
inspect.getfile(inspect.currentframe())))
os.environ['COVERAGE_PROCESS_START'] = os.path.join(testdir, '.coveragerc')
os.environ['COVERAGE_FILE'] = os.path.join(testdir, '.coverage')
cov = coverage.coverage(source=os.path.join(testdir, '..'),
include=os.path.join(testdir, '..', 'ofStateManager.py'))
cov.erase()
subprocess.call('coverage run -m py.test ' + arguments, shell=True,
cwd=testdir)
cov.combine()
cov.html_report(directory=os.path.join(testdir, 'htmlcov'))
cov.report(show_missing=False)
if __name__ == '__main__':
main()
<commit_msg>Replace coverage API calls by subprocess calls.<commit_after>
|
#!/usr/bin/env python
"""Script to collect coverage information on ofStateManager"""
import os
import sys
import inspect
import coverage
import subprocess
def main():
"""Main function"""
arguments = ''
if len(sys.argv) > 1:
arguments = ' '.join(sys.argv[1:])
testdir = os.path.abspath(os.path.dirname(
inspect.getfile(inspect.currentframe())))
script_path = os.path.join(testdir, '..', 'ofStateManager.py')
os.environ['COVERAGE_PROCESS_START'] = os.path.join(testdir, '.coveragerc')
os.environ['COVERAGE_FILE'] = os.path.join(testdir, '.coverage')
subprocess.call('coverage erase', shell=True, cwd=testdir)
subprocess.call('coverage run -m py.test ' + arguments, shell=True, cwd=testdir)
subprocess.call('coverage combine', shell=True, cwd=testdir)
subprocess.call('coverage html -d ' + os.path.join(testdir, 'htmlcov') +
' --include=' + script_path, shell=True, cwd=testdir)
subprocess.call('coverage report --include=' + script_path,
shell=True, cwd=testdir)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""Script to collect coverage information on ofStateManager"""
import os
import sys
import inspect
import coverage
import subprocess
def main():
"""Main function"""
arguments = ''
if len(sys.argv) > 1:
arguments = ' '.join(sys.argv[1:])
testdir = os.path.abspath(os.path.dirname(
inspect.getfile(inspect.currentframe())))
os.environ['COVERAGE_PROCESS_START'] = os.path.join(testdir, '.coveragerc')
os.environ['COVERAGE_FILE'] = os.path.join(testdir, '.coverage')
cov = coverage.coverage(source=os.path.join(testdir, '..'),
include=os.path.join(testdir, '..', 'ofStateManager.py'))
cov.erase()
subprocess.call('coverage run -m py.test ' + arguments, shell=True,
cwd=testdir)
cov.combine()
cov.html_report(directory=os.path.join(testdir, 'htmlcov'))
cov.report(show_missing=False)
if __name__ == '__main__':
main()
Replace coverage API calls by subprocess calls.#!/usr/bin/env python
"""Script to collect coverage information on ofStateManager"""
import os
import sys
import inspect
import coverage
import subprocess
def main():
"""Main function"""
arguments = ''
if len(sys.argv) > 1:
arguments = ' '.join(sys.argv[1:])
testdir = os.path.abspath(os.path.dirname(
inspect.getfile(inspect.currentframe())))
script_path = os.path.join(testdir, '..', 'ofStateManager.py')
os.environ['COVERAGE_PROCESS_START'] = os.path.join(testdir, '.coveragerc')
os.environ['COVERAGE_FILE'] = os.path.join(testdir, '.coverage')
subprocess.call('coverage erase', shell=True, cwd=testdir)
subprocess.call('coverage run -m py.test ' + arguments, shell=True, cwd=testdir)
subprocess.call('coverage combine', shell=True, cwd=testdir)
subprocess.call('coverage html -d ' + os.path.join(testdir, 'htmlcov') +
' --include=' + script_path, shell=True, cwd=testdir)
subprocess.call('coverage report --include=' + script_path,
shell=True, cwd=testdir)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
"""Script to collect coverage information on ofStateManager"""
import os
import sys
import inspect
import coverage
import subprocess
def main():
"""Main function"""
arguments = ''
if len(sys.argv) > 1:
arguments = ' '.join(sys.argv[1:])
testdir = os.path.abspath(os.path.dirname(
inspect.getfile(inspect.currentframe())))
os.environ['COVERAGE_PROCESS_START'] = os.path.join(testdir, '.coveragerc')
os.environ['COVERAGE_FILE'] = os.path.join(testdir, '.coverage')
cov = coverage.coverage(source=os.path.join(testdir, '..'),
include=os.path.join(testdir, '..', 'ofStateManager.py'))
cov.erase()
subprocess.call('coverage run -m py.test ' + arguments, shell=True,
cwd=testdir)
cov.combine()
cov.html_report(directory=os.path.join(testdir, 'htmlcov'))
cov.report(show_missing=False)
if __name__ == '__main__':
main()
<commit_msg>Replace coverage API calls by subprocess calls.<commit_after>#!/usr/bin/env python
"""Script to collect coverage information on ofStateManager"""
import os
import sys
import inspect
import coverage
import subprocess
def main():
"""Main function"""
arguments = ''
if len(sys.argv) > 1:
arguments = ' '.join(sys.argv[1:])
testdir = os.path.abspath(os.path.dirname(
inspect.getfile(inspect.currentframe())))
script_path = os.path.join(testdir, '..', 'ofStateManager.py')
os.environ['COVERAGE_PROCESS_START'] = os.path.join(testdir, '.coveragerc')
os.environ['COVERAGE_FILE'] = os.path.join(testdir, '.coverage')
subprocess.call('coverage erase', shell=True, cwd=testdir)
subprocess.call('coverage run -m py.test ' + arguments, shell=True, cwd=testdir)
subprocess.call('coverage combine', shell=True, cwd=testdir)
subprocess.call('coverage html -d ' + os.path.join(testdir, 'htmlcov') +
' --include=' + script_path, shell=True, cwd=testdir)
subprocess.call('coverage report --include=' + script_path,
shell=True, cwd=testdir)
if __name__ == '__main__':
main()
|
3f87a8679a39f8422b013d157d1e93bdfd47d315
|
tests/test_checker.py
|
tests/test_checker.py
|
import pytest
# TODO: Implement real tests!
#
# Right now this is just here as a stub so that we at least have some
# test for Travis to go through. We want complete test coverage,
# eventually.
def test_checker():
assert True
|
import pytest
from botbot import checker, problems
# TODO: Implement real tests!
#
# Right now this is just here as a stub so that we at least have some
# test for Travis to go through. We want complete test coverage,
# eventually.
def test_fastq_checker():
bad = checker.is_fastq("bad.fastq")
good = checker.is_fastq("good.py")
assert bad == problems.PROB_FILE_IS_FASTQ
assert good == problems.PROB_NO_PROBLEM
|
Add basic test for fastq file checker
|
Add basic test for fastq file checker
|
Python
|
mit
|
jackstanek/BotBot,jackstanek/BotBot
|
import pytest
# TODO: Implement real tests!
#
# Right now this is just here as a stub so that we at least have some
# test for Travis to go through. We want complete test coverage,
# eventually.
def test_checker():
assert True
Add basic test for fastq file checker
|
import pytest
from botbot import checker, problems
# TODO: Implement real tests!
#
# Right now this is just here as a stub so that we at least have some
# test for Travis to go through. We want complete test coverage,
# eventually.
def test_fastq_checker():
bad = checker.is_fastq("bad.fastq")
good = checker.is_fastq("good.py")
assert bad == problems.PROB_FILE_IS_FASTQ
assert good == problems.PROB_NO_PROBLEM
|
<commit_before>import pytest
# TODO: Implement real tests!
#
# Right now this is just here as a stub so that we at least have some
# test for Travis to go through. We want complete test coverage,
# eventually.
def test_checker():
assert True
<commit_msg>Add basic test for fastq file checker<commit_after>
|
import pytest
from botbot import checker, problems
# TODO: Implement real tests!
#
# Right now this is just here as a stub so that we at least have some
# test for Travis to go through. We want complete test coverage,
# eventually.
def test_fastq_checker():
bad = checker.is_fastq("bad.fastq")
good = checker.is_fastq("good.py")
assert bad == problems.PROB_FILE_IS_FASTQ
assert good == problems.PROB_NO_PROBLEM
|
import pytest
# TODO: Implement real tests!
#
# Right now this is just here as a stub so that we at least have some
# test for Travis to go through. We want complete test coverage,
# eventually.
def test_checker():
assert True
Add basic test for fastq file checkerimport pytest
from botbot import checker, problems
# TODO: Implement real tests!
#
# Right now this is just here as a stub so that we at least have some
# test for Travis to go through. We want complete test coverage,
# eventually.
def test_fastq_checker():
bad = checker.is_fastq("bad.fastq")
good = checker.is_fastq("good.py")
assert bad == problems.PROB_FILE_IS_FASTQ
assert good == problems.PROB_NO_PROBLEM
|
<commit_before>import pytest
# TODO: Implement real tests!
#
# Right now this is just here as a stub so that we at least have some
# test for Travis to go through. We want complete test coverage,
# eventually.
def test_checker():
assert True
<commit_msg>Add basic test for fastq file checker<commit_after>import pytest
from botbot import checker, problems
# TODO: Implement real tests!
#
# Right now this is just here as a stub so that we at least have some
# test for Travis to go through. We want complete test coverage,
# eventually.
def test_fastq_checker():
bad = checker.is_fastq("bad.fastq")
good = checker.is_fastq("good.py")
assert bad == problems.PROB_FILE_IS_FASTQ
assert good == problems.PROB_NO_PROBLEM
|
3e0c64d89b937659dac23cb78b148717b49735ca
|
tests/testapp/urls.py
|
tests/testapp/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.ExampleFormView.as_view(), name='upload'),
]
|
try:
from django.urls import path
except ImportError:
from django.conf.urls import url as path
from . import views
urlpatterns = [
path('', views.ExampleFormView.as_view(), name='upload'),
]
|
Fix test suite for Django 1.11
|
Fix test suite for Django 1.11
|
Python
|
mit
|
codingjoe/django-s3file,codingjoe/django-s3file,codingjoe/django-s3file
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.ExampleFormView.as_view(), name='upload'),
]
Fix test suite for Django 1.11
|
try:
from django.urls import path
except ImportError:
from django.conf.urls import url as path
from . import views
urlpatterns = [
path('', views.ExampleFormView.as_view(), name='upload'),
]
|
<commit_before>from django.urls import path
from . import views
urlpatterns = [
path('', views.ExampleFormView.as_view(), name='upload'),
]
<commit_msg>Fix test suite for Django 1.11<commit_after>
|
try:
from django.urls import path
except ImportError:
from django.conf.urls import url as path
from . import views
urlpatterns = [
path('', views.ExampleFormView.as_view(), name='upload'),
]
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.ExampleFormView.as_view(), name='upload'),
]
Fix test suite for Django 1.11try:
from django.urls import path
except ImportError:
from django.conf.urls import url as path
from . import views
urlpatterns = [
path('', views.ExampleFormView.as_view(), name='upload'),
]
|
<commit_before>from django.urls import path
from . import views
urlpatterns = [
path('', views.ExampleFormView.as_view(), name='upload'),
]
<commit_msg>Fix test suite for Django 1.11<commit_after>try:
from django.urls import path
except ImportError:
from django.conf.urls import url as path
from . import views
urlpatterns = [
path('', views.ExampleFormView.as_view(), name='upload'),
]
|
2d57647d54e7a68f8a8139fc2b5a3168dde5195f
|
server.py
|
server.py
|
import recommender
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('template.html')
@app.route('/graph')
def my_link():
# here we want to get the value of user (i.e. ?user=some-value)
seed = request.args.get('seed')
nsfw = bool(request.args.get('nsfw'))
breadth = int(request.args.get('breadth'))
depth = int(request.args.get('depth'))
rec = recommender.Recommender(breadth, depth, nsfw)
rec.load_dataset()
# Graph parameters
rec.output_path = 'static'
(result, msg) = rec.generate_graph(seed, True)
if result == 'Sucess':
filename = msg
html = "<img src='" + filename + "'></img>"
else:
html = msg
return html
if __name__ == '__main__':
app.run(debug=True)
|
import recommender
import os
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('template.html')
@app.route('/graph')
def my_link():
# here we want to get the value of user (i.e. ?user=some-value)
seed = request.args.get('seed')
nsfw = bool(request.args.get('nsfw'))
breadth = int(request.args.get('breadth'))
depth = int(request.args.get('depth'))
rec = recommender.Recommender(breadth, depth, nsfw)
rec.load_dataset()
# Graph parameters
rec.output_path = 'static'
(result, msg) = rec.generate_graph(seed, True)
if result == 'Sucess':
filename = msg
html = "<img src='" + filename + "'></img>"
else:
html = msg
return html
if __name__ == '__main__':
port = int(os.environ.get('PORT',5000))
app.run(host='0.0.0.0', port=port)
|
Set port by environment variable
|
Set port by environment variable
|
Python
|
mit
|
cdated/subredditor,cdated/reddit-crawler,cdated/subredditor,cdated/subredditor,cdated/subreddit-crawler,cdated/reddit-crawler,cdated/subreddit-crawler,cdated/subreddit-crawler,cdated/subreddit-crawler,cdated/subredditor
|
import recommender
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('template.html')
@app.route('/graph')
def my_link():
# here we want to get the value of user (i.e. ?user=some-value)
seed = request.args.get('seed')
nsfw = bool(request.args.get('nsfw'))
breadth = int(request.args.get('breadth'))
depth = int(request.args.get('depth'))
rec = recommender.Recommender(breadth, depth, nsfw)
rec.load_dataset()
# Graph parameters
rec.output_path = 'static'
(result, msg) = rec.generate_graph(seed, True)
if result == 'Sucess':
filename = msg
html = "<img src='" + filename + "'></img>"
else:
html = msg
return html
if __name__ == '__main__':
app.run(debug=True)
Set port by environment variable
|
import recommender
import os
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('template.html')
@app.route('/graph')
def my_link():
# here we want to get the value of user (i.e. ?user=some-value)
seed = request.args.get('seed')
nsfw = bool(request.args.get('nsfw'))
breadth = int(request.args.get('breadth'))
depth = int(request.args.get('depth'))
rec = recommender.Recommender(breadth, depth, nsfw)
rec.load_dataset()
# Graph parameters
rec.output_path = 'static'
(result, msg) = rec.generate_graph(seed, True)
if result == 'Sucess':
filename = msg
html = "<img src='" + filename + "'></img>"
else:
html = msg
return html
if __name__ == '__main__':
port = int(os.environ.get('PORT',5000))
app.run(host='0.0.0.0', port=port)
|
<commit_before>import recommender
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('template.html')
@app.route('/graph')
def my_link():
# here we want to get the value of user (i.e. ?user=some-value)
seed = request.args.get('seed')
nsfw = bool(request.args.get('nsfw'))
breadth = int(request.args.get('breadth'))
depth = int(request.args.get('depth'))
rec = recommender.Recommender(breadth, depth, nsfw)
rec.load_dataset()
# Graph parameters
rec.output_path = 'static'
(result, msg) = rec.generate_graph(seed, True)
if result == 'Sucess':
filename = msg
html = "<img src='" + filename + "'></img>"
else:
html = msg
return html
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Set port by environment variable<commit_after>
|
import recommender
import os
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('template.html')
@app.route('/graph')
def my_link():
# here we want to get the value of user (i.e. ?user=some-value)
seed = request.args.get('seed')
nsfw = bool(request.args.get('nsfw'))
breadth = int(request.args.get('breadth'))
depth = int(request.args.get('depth'))
rec = recommender.Recommender(breadth, depth, nsfw)
rec.load_dataset()
# Graph parameters
rec.output_path = 'static'
(result, msg) = rec.generate_graph(seed, True)
if result == 'Sucess':
filename = msg
html = "<img src='" + filename + "'></img>"
else:
html = msg
return html
if __name__ == '__main__':
port = int(os.environ.get('PORT',5000))
app.run(host='0.0.0.0', port=port)
|
import recommender
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('template.html')
@app.route('/graph')
def my_link():
# here we want to get the value of user (i.e. ?user=some-value)
seed = request.args.get('seed')
nsfw = bool(request.args.get('nsfw'))
breadth = int(request.args.get('breadth'))
depth = int(request.args.get('depth'))
rec = recommender.Recommender(breadth, depth, nsfw)
rec.load_dataset()
# Graph parameters
rec.output_path = 'static'
(result, msg) = rec.generate_graph(seed, True)
if result == 'Sucess':
filename = msg
html = "<img src='" + filename + "'></img>"
else:
html = msg
return html
if __name__ == '__main__':
app.run(debug=True)
Set port by environment variableimport recommender
import os
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('template.html')
@app.route('/graph')
def my_link():
# here we want to get the value of user (i.e. ?user=some-value)
seed = request.args.get('seed')
nsfw = bool(request.args.get('nsfw'))
breadth = int(request.args.get('breadth'))
depth = int(request.args.get('depth'))
rec = recommender.Recommender(breadth, depth, nsfw)
rec.load_dataset()
# Graph parameters
rec.output_path = 'static'
(result, msg) = rec.generate_graph(seed, True)
if result == 'Sucess':
filename = msg
html = "<img src='" + filename + "'></img>"
else:
html = msg
return html
if __name__ == '__main__':
port = int(os.environ.get('PORT',5000))
app.run(host='0.0.0.0', port=port)
|
<commit_before>import recommender
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('template.html')
@app.route('/graph')
def my_link():
# here we want to get the value of user (i.e. ?user=some-value)
seed = request.args.get('seed')
nsfw = bool(request.args.get('nsfw'))
breadth = int(request.args.get('breadth'))
depth = int(request.args.get('depth'))
rec = recommender.Recommender(breadth, depth, nsfw)
rec.load_dataset()
# Graph parameters
rec.output_path = 'static'
(result, msg) = rec.generate_graph(seed, True)
if result == 'Sucess':
filename = msg
html = "<img src='" + filename + "'></img>"
else:
html = msg
return html
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Set port by environment variable<commit_after>import recommender
import os
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('template.html')
@app.route('/graph')
def my_link():
# here we want to get the value of user (i.e. ?user=some-value)
seed = request.args.get('seed')
nsfw = bool(request.args.get('nsfw'))
breadth = int(request.args.get('breadth'))
depth = int(request.args.get('depth'))
rec = recommender.Recommender(breadth, depth, nsfw)
rec.load_dataset()
# Graph parameters
rec.output_path = 'static'
(result, msg) = rec.generate_graph(seed, True)
if result == 'Sucess':
filename = msg
html = "<img src='" + filename + "'></img>"
else:
html = msg
return html
if __name__ == '__main__':
port = int(os.environ.get('PORT',5000))
app.run(host='0.0.0.0', port=port)
|
fa5d78df781143d7e0105ccb1a5da923b2ca0b60
|
server.py
|
server.py
|
"""This module runs the api server."""
import os
from app import flask_app, db
from app.models import User, BucketList, BucketListItem
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.restful import Resource, Api
from app.api_v1.resources import TestResource
app = flask_app
api = Api(app=app, prefix='/api/v1.0')
manager = Manager(app)
migrate = Migrate(app, db)
# add resources
api.add_resource(TestResource, '/')
def make_shell_context():
"""Add app, database and models to the shell."""
return dict(app=app, db=db, User=User, BucketList=BucketList,
BucketListItem=BucketListItem)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def run_tests():
"""Run tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run()
|
"""This module runs the api server."""
import os
from app import flask_app, db
from app.models import User, BucketList, BucketListItem
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.restful import Resource, Api
from app.api_v1.resources import TestResource, BucketListApi
app = flask_app
api = Api(app=app, prefix='/api/v1.0')
manager = Manager(app)
migrate = Migrate(app, db)
# add resources
api.add_resource(TestResource, '/')
api.add_resource(BucketListApi, '/user/<user_id>/bucketlists/')
def make_shell_context():
"""Add app, database and models to the shell."""
return dict(app=app, db=db, User=User, BucketList=BucketList,
BucketListItem=BucketListItem)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def run_tests():
"""Run tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run()
|
Add BucketListApi resource to api.
|
[Feature] Add BucketListApi resource to api.
|
Python
|
mit
|
andela-akiura/bucketlist
|
"""This module runs the api server."""
import os
from app import flask_app, db
from app.models import User, BucketList, BucketListItem
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.restful import Resource, Api
from app.api_v1.resources import TestResource
app = flask_app
api = Api(app=app, prefix='/api/v1.0')
manager = Manager(app)
migrate = Migrate(app, db)
# add resources
api.add_resource(TestResource, '/')
def make_shell_context():
"""Add app, database and models to the shell."""
return dict(app=app, db=db, User=User, BucketList=BucketList,
BucketListItem=BucketListItem)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def run_tests():
"""Run tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run()
[Feature] Add BucketListApi resource to api.
|
"""This module runs the api server."""
import os
from app import flask_app, db
from app.models import User, BucketList, BucketListItem
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.restful import Resource, Api
from app.api_v1.resources import TestResource, BucketListApi
app = flask_app
api = Api(app=app, prefix='/api/v1.0')
manager = Manager(app)
migrate = Migrate(app, db)
# add resources
api.add_resource(TestResource, '/')
api.add_resource(BucketListApi, '/user/<user_id>/bucketlists/')
def make_shell_context():
"""Add app, database and models to the shell."""
return dict(app=app, db=db, User=User, BucketList=BucketList,
BucketListItem=BucketListItem)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def run_tests():
"""Run tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run()
|
<commit_before>"""This module runs the api server."""
import os
from app import flask_app, db
from app.models import User, BucketList, BucketListItem
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.restful import Resource, Api
from app.api_v1.resources import TestResource
app = flask_app
api = Api(app=app, prefix='/api/v1.0')
manager = Manager(app)
migrate = Migrate(app, db)
# add resources
api.add_resource(TestResource, '/')
def make_shell_context():
"""Add app, database and models to the shell."""
return dict(app=app, db=db, User=User, BucketList=BucketList,
BucketListItem=BucketListItem)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def run_tests():
"""Run tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run()
<commit_msg>[Feature] Add BucketListApi resource to api.<commit_after>
|
"""This module runs the api server."""
import os
from app import flask_app, db
from app.models import User, BucketList, BucketListItem
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.restful import Resource, Api
from app.api_v1.resources import TestResource, BucketListApi
app = flask_app
api = Api(app=app, prefix='/api/v1.0')
manager = Manager(app)
migrate = Migrate(app, db)
# add resources
api.add_resource(TestResource, '/')
api.add_resource(BucketListApi, '/user/<user_id>/bucketlists/')
def make_shell_context():
"""Add app, database and models to the shell."""
return dict(app=app, db=db, User=User, BucketList=BucketList,
BucketListItem=BucketListItem)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def run_tests():
"""Run tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run()
|
"""This module runs the api server."""
import os
from app import flask_app, db
from app.models import User, BucketList, BucketListItem
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.restful import Resource, Api
from app.api_v1.resources import TestResource
app = flask_app
api = Api(app=app, prefix='/api/v1.0')
manager = Manager(app)
migrate = Migrate(app, db)
# add resources
api.add_resource(TestResource, '/')
def make_shell_context():
"""Add app, database and models to the shell."""
return dict(app=app, db=db, User=User, BucketList=BucketList,
BucketListItem=BucketListItem)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def run_tests():
"""Run tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run()
[Feature] Add BucketListApi resource to api."""This module runs the api server."""
import os
from app import flask_app, db
from app.models import User, BucketList, BucketListItem
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.restful import Resource, Api
from app.api_v1.resources import TestResource, BucketListApi
app = flask_app
api = Api(app=app, prefix='/api/v1.0')
manager = Manager(app)
migrate = Migrate(app, db)
# add resources
api.add_resource(TestResource, '/')
api.add_resource(BucketListApi, '/user/<user_id>/bucketlists/')
def make_shell_context():
"""Add app, database and models to the shell."""
return dict(app=app, db=db, User=User, BucketList=BucketList,
BucketListItem=BucketListItem)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def run_tests():
"""Run tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run()
|
<commit_before>"""This module runs the api server."""
import os
from app import flask_app, db
from app.models import User, BucketList, BucketListItem
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.restful import Resource, Api
from app.api_v1.resources import TestResource
app = flask_app
api = Api(app=app, prefix='/api/v1.0')
manager = Manager(app)
migrate = Migrate(app, db)
# add resources
api.add_resource(TestResource, '/')
def make_shell_context():
"""Add app, database and models to the shell."""
return dict(app=app, db=db, User=User, BucketList=BucketList,
BucketListItem=BucketListItem)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def run_tests():
"""Run tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run()
<commit_msg>[Feature] Add BucketListApi resource to api.<commit_after>"""This module runs the api server."""
import os
from app import flask_app, db
from app.models import User, BucketList, BucketListItem
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.restful import Resource, Api
from app.api_v1.resources import TestResource, BucketListApi
app = flask_app
api = Api(app=app, prefix='/api/v1.0')
manager = Manager(app)
migrate = Migrate(app, db)
# add resources
api.add_resource(TestResource, '/')
api.add_resource(BucketListApi, '/user/<user_id>/bucketlists/')
def make_shell_context():
"""Add app, database and models to the shell."""
return dict(app=app, db=db, User=User, BucketList=BucketList,
BucketListItem=BucketListItem)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def run_tests():
"""Run tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run()
|
099a0b045548d5a93707a9ef99bece2578ed50ea
|
user_voting/models.py
|
user_voting/models.py
|
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.db import models
from user_voting.managers import VoteManager
SCORES = (
(u'+1', +1),
(u'-1', -1),
(u'?', 0),
)
class Vote(models.Model):
"""
A vote on an object by a User.
"""
user = models.ForeignKey(User)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
object = generic.GenericForeignKey('content_type', 'object_id')
score = models.SmallIntegerField()
objects = VoteManager()
class Meta:
db_table = 'user_votes'
# One vote per user per object
unique_together = (('user', 'content_type', 'object_id'),)
def __unicode__(self):
return u'%s: score %d by %s' % (self.object, self.score, self.user)
|
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.db import models
from user_voting.managers import VoteManager
SCORES = (
(u'+1', +1),
(u'-1', -1),
(u'?', 0),
)
class Vote(models.Model):
"""
A vote on an object by a User.
"""
user = models.ForeignKey(User)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
object = generic.GenericForeignKey('content_type', 'object_id')
score = models.SmallIntegerField()
date = models.DateTimeField(auto_now=True)
objects = VoteManager()
class Meta:
db_table = 'user_votes'
# One vote per user per object
unique_together = (('user', 'content_type', 'object_id'),)
def __unicode__(self):
return u'%s: score %d by %s' % (self.object, self.score, self.user)
|
Add date field for timestamps
|
user_voting: Add date field for timestamps
|
Python
|
agpl-3.0
|
kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu
|
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.db import models
from user_voting.managers import VoteManager
SCORES = (
(u'+1', +1),
(u'-1', -1),
(u'?', 0),
)
class Vote(models.Model):
"""
A vote on an object by a User.
"""
user = models.ForeignKey(User)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
object = generic.GenericForeignKey('content_type', 'object_id')
score = models.SmallIntegerField()
objects = VoteManager()
class Meta:
db_table = 'user_votes'
# One vote per user per object
unique_together = (('user', 'content_type', 'object_id'),)
def __unicode__(self):
return u'%s: score %d by %s' % (self.object, self.score, self.user)
user_voting: Add date field for timestamps
|
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.db import models
from user_voting.managers import VoteManager
SCORES = (
(u'+1', +1),
(u'-1', -1),
(u'?', 0),
)
class Vote(models.Model):
"""
A vote on an object by a User.
"""
user = models.ForeignKey(User)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
object = generic.GenericForeignKey('content_type', 'object_id')
score = models.SmallIntegerField()
date = models.DateTimeField(auto_now=True)
objects = VoteManager()
class Meta:
db_table = 'user_votes'
# One vote per user per object
unique_together = (('user', 'content_type', 'object_id'),)
def __unicode__(self):
return u'%s: score %d by %s' % (self.object, self.score, self.user)
|
<commit_before>from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.db import models
from user_voting.managers import VoteManager
SCORES = (
(u'+1', +1),
(u'-1', -1),
(u'?', 0),
)
class Vote(models.Model):
"""
A vote on an object by a User.
"""
user = models.ForeignKey(User)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
object = generic.GenericForeignKey('content_type', 'object_id')
score = models.SmallIntegerField()
objects = VoteManager()
class Meta:
db_table = 'user_votes'
# One vote per user per object
unique_together = (('user', 'content_type', 'object_id'),)
def __unicode__(self):
return u'%s: score %d by %s' % (self.object, self.score, self.user)
<commit_msg>user_voting: Add date field for timestamps<commit_after>
|
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.db import models
from user_voting.managers import VoteManager
SCORES = (
(u'+1', +1),
(u'-1', -1),
(u'?', 0),
)
class Vote(models.Model):
"""
A vote on an object by a User.
"""
user = models.ForeignKey(User)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
object = generic.GenericForeignKey('content_type', 'object_id')
score = models.SmallIntegerField()
date = models.DateTimeField(auto_now=True)
objects = VoteManager()
class Meta:
db_table = 'user_votes'
# One vote per user per object
unique_together = (('user', 'content_type', 'object_id'),)
def __unicode__(self):
return u'%s: score %d by %s' % (self.object, self.score, self.user)
|
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.db import models
from user_voting.managers import VoteManager
SCORES = (
(u'+1', +1),
(u'-1', -1),
(u'?', 0),
)
class Vote(models.Model):
"""
A vote on an object by a User.
"""
user = models.ForeignKey(User)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
object = generic.GenericForeignKey('content_type', 'object_id')
score = models.SmallIntegerField()
objects = VoteManager()
class Meta:
db_table = 'user_votes'
# One vote per user per object
unique_together = (('user', 'content_type', 'object_id'),)
def __unicode__(self):
return u'%s: score %d by %s' % (self.object, self.score, self.user)
user_voting: Add date field for timestampsfrom django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.db import models
from user_voting.managers import VoteManager
SCORES = (
(u'+1', +1),
(u'-1', -1),
(u'?', 0),
)
class Vote(models.Model):
"""
A vote on an object by a User.
"""
user = models.ForeignKey(User)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
object = generic.GenericForeignKey('content_type', 'object_id')
score = models.SmallIntegerField()
date = models.DateTimeField(auto_now=True)
objects = VoteManager()
class Meta:
db_table = 'user_votes'
# One vote per user per object
unique_together = (('user', 'content_type', 'object_id'),)
def __unicode__(self):
return u'%s: score %d by %s' % (self.object, self.score, self.user)
|
<commit_before>from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.db import models
from user_voting.managers import VoteManager
SCORES = (
(u'+1', +1),
(u'-1', -1),
(u'?', 0),
)
class Vote(models.Model):
"""
A vote on an object by a User.
"""
user = models.ForeignKey(User)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
object = generic.GenericForeignKey('content_type', 'object_id')
score = models.SmallIntegerField()
objects = VoteManager()
class Meta:
db_table = 'user_votes'
# One vote per user per object
unique_together = (('user', 'content_type', 'object_id'),)
def __unicode__(self):
return u'%s: score %d by %s' % (self.object, self.score, self.user)
<commit_msg>user_voting: Add date field for timestamps<commit_after>from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.db import models
from user_voting.managers import VoteManager
SCORES = (
(u'+1', +1),
(u'-1', -1),
(u'?', 0),
)
class Vote(models.Model):
"""
A vote on an object by a User.
"""
user = models.ForeignKey(User)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
object = generic.GenericForeignKey('content_type', 'object_id')
score = models.SmallIntegerField()
date = models.DateTimeField(auto_now=True)
objects = VoteManager()
class Meta:
db_table = 'user_votes'
# One vote per user per object
unique_together = (('user', 'content_type', 'object_id'),)
def __unicode__(self):
return u'%s: score %d by %s' % (self.object, self.score, self.user)
|
4f1f78ece6466f6070e35a5057642af716e08612
|
apps/careers/admin.py
|
apps/careers/admin.py
|
from cms.admin import SearchMetaBaseAdmin
from django.contrib import admin
from .models import Career
@admin.register(Career)
class CareerAdmin(SearchMetaBaseAdmin):
prepopulated_fields = {'slug': ('title',)}
fieldsets = (
(None, {
'fields': ('page', 'title', 'slug', 'location', 'summary',
'description', 'email_address', 'order')
}),
SearchMetaBaseAdmin.PUBLICATION_FIELDS,
SearchMetaBaseAdmin.SEO_FIELDS,
)
|
from cms.admin import SearchMetaBaseAdmin
from django.contrib import admin
from .models import Career
@admin.register(Career)
class CareerAdmin(SearchMetaBaseAdmin):
prepopulated_fields = {'slug': ['title']}
fieldsets = (
(None, {
'fields': ['page', 'title', 'slug', 'location', 'summary',
'description', 'email_address', 'order']
}),
SearchMetaBaseAdmin.PUBLICATION_FIELDS,
SearchMetaBaseAdmin.SEO_FIELDS,
)
|
Change tuples to lists where applicable
|
Change tuples to lists where applicable
|
Python
|
mit
|
onespacemedia/cms-jobs,onespacemedia/cms-jobs
|
from cms.admin import SearchMetaBaseAdmin
from django.contrib import admin
from .models import Career
@admin.register(Career)
class CareerAdmin(SearchMetaBaseAdmin):
prepopulated_fields = {'slug': ('title',)}
fieldsets = (
(None, {
'fields': ('page', 'title', 'slug', 'location', 'summary',
'description', 'email_address', 'order')
}),
SearchMetaBaseAdmin.PUBLICATION_FIELDS,
SearchMetaBaseAdmin.SEO_FIELDS,
)
Change tuples to lists where applicable
|
from cms.admin import SearchMetaBaseAdmin
from django.contrib import admin
from .models import Career
@admin.register(Career)
class CareerAdmin(SearchMetaBaseAdmin):
prepopulated_fields = {'slug': ['title']}
fieldsets = (
(None, {
'fields': ['page', 'title', 'slug', 'location', 'summary',
'description', 'email_address', 'order']
}),
SearchMetaBaseAdmin.PUBLICATION_FIELDS,
SearchMetaBaseAdmin.SEO_FIELDS,
)
|
<commit_before>from cms.admin import SearchMetaBaseAdmin
from django.contrib import admin
from .models import Career
@admin.register(Career)
class CareerAdmin(SearchMetaBaseAdmin):
prepopulated_fields = {'slug': ('title',)}
fieldsets = (
(None, {
'fields': ('page', 'title', 'slug', 'location', 'summary',
'description', 'email_address', 'order')
}),
SearchMetaBaseAdmin.PUBLICATION_FIELDS,
SearchMetaBaseAdmin.SEO_FIELDS,
)
<commit_msg>Change tuples to lists where applicable<commit_after>
|
from cms.admin import SearchMetaBaseAdmin
from django.contrib import admin
from .models import Career
@admin.register(Career)
class CareerAdmin(SearchMetaBaseAdmin):
prepopulated_fields = {'slug': ['title']}
fieldsets = (
(None, {
'fields': ['page', 'title', 'slug', 'location', 'summary',
'description', 'email_address', 'order']
}),
SearchMetaBaseAdmin.PUBLICATION_FIELDS,
SearchMetaBaseAdmin.SEO_FIELDS,
)
|
from cms.admin import SearchMetaBaseAdmin
from django.contrib import admin
from .models import Career
@admin.register(Career)
class CareerAdmin(SearchMetaBaseAdmin):
prepopulated_fields = {'slug': ('title',)}
fieldsets = (
(None, {
'fields': ('page', 'title', 'slug', 'location', 'summary',
'description', 'email_address', 'order')
}),
SearchMetaBaseAdmin.PUBLICATION_FIELDS,
SearchMetaBaseAdmin.SEO_FIELDS,
)
Change tuples to lists where applicablefrom cms.admin import SearchMetaBaseAdmin
from django.contrib import admin
from .models import Career
@admin.register(Career)
class CareerAdmin(SearchMetaBaseAdmin):
prepopulated_fields = {'slug': ['title']}
fieldsets = (
(None, {
'fields': ['page', 'title', 'slug', 'location', 'summary',
'description', 'email_address', 'order']
}),
SearchMetaBaseAdmin.PUBLICATION_FIELDS,
SearchMetaBaseAdmin.SEO_FIELDS,
)
|
<commit_before>from cms.admin import SearchMetaBaseAdmin
from django.contrib import admin
from .models import Career
@admin.register(Career)
class CareerAdmin(SearchMetaBaseAdmin):
prepopulated_fields = {'slug': ('title',)}
fieldsets = (
(None, {
'fields': ('page', 'title', 'slug', 'location', 'summary',
'description', 'email_address', 'order')
}),
SearchMetaBaseAdmin.PUBLICATION_FIELDS,
SearchMetaBaseAdmin.SEO_FIELDS,
)
<commit_msg>Change tuples to lists where applicable<commit_after>from cms.admin import SearchMetaBaseAdmin
from django.contrib import admin
from .models import Career
@admin.register(Career)
class CareerAdmin(SearchMetaBaseAdmin):
prepopulated_fields = {'slug': ['title']}
fieldsets = (
(None, {
'fields': ['page', 'title', 'slug', 'location', 'summary',
'description', 'email_address', 'order']
}),
SearchMetaBaseAdmin.PUBLICATION_FIELDS,
SearchMetaBaseAdmin.SEO_FIELDS,
)
|
d0f1114fdcee63d65c5dd74501b3e329a12f8e53
|
indra/sources/eidos/eidos_reader.py
|
indra/sources/eidos/eidos_reader.py
|
from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system
"""
def __init__(self):
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
|
from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
|
Make Eidos reader instantiate when first reading
|
Make Eidos reader instantiate when first reading
|
Python
|
bsd-2-clause
|
johnbachman/belpy,johnbachman/indra,sorgerlab/indra,johnbachman/indra,bgyori/indra,sorgerlab/belpy,bgyori/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/belpy,pvtodorov/indra,pvtodorov/indra,bgyori/indra,johnbachman/indra,johnbachman/belpy,sorgerlab/belpy,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra
|
from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system
"""
def __init__(self):
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
Make Eidos reader instantiate when first reading
|
from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
|
<commit_before>from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system
"""
def __init__(self):
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
<commit_msg>Make Eidos reader instantiate when first reading<commit_after>
|
from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
|
from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system
"""
def __init__(self):
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
Make Eidos reader instantiate when first readingfrom indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
|
<commit_before>from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system
"""
def __init__(self):
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
<commit_msg>Make Eidos reader instantiate when first reading<commit_after>from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
|
ca042edc7f9709f2217b669fb5a68e9aac3ab61c
|
cbv/management/commands/cbv_dumpversion.py
|
cbv/management/commands/cbv_dumpversion.py
|
from django.core.management import call_command
from django.core.management.commands import LabelCommand
class Command(LabelCommand):
def handle_label(self, label, **options):
# Because django will use the default manager of each model, we
# monkeypatch the manager to filter by our label before calling
# the dumpdata command to dump only the subset of data we want.
# Set the
# Call the dumpdata command.
call_command('dumpdata', 'cbv')
|
import json
from django.db.models.query import QuerySet
from django.core.management import call_command
from django.core.management.base import LabelCommand
from django.core import serializers
from cbv import models
class Command(LabelCommand):
"""Dump the django cbv app data for a specific version."""
def handle_label(self, label, **options):
filtered_models = {
models.ProjectVersion: 'version_number',
models.Module: 'project_version__version_number',
models.ModuleAttribute: 'module__project_version__version_number',
models.Function: 'module__project_version__version_number',
models.Klass: 'module__project_version__version_number',
models.KlassAttribute: 'klass__module__project_version__version_number',
models.Method: 'klass__module__project_version__version_number',
}
objects = []
for model, version_arg in filtered_models.items():
filter_kwargs = {version_arg: label}
result = model.objects.filter(**filter_kwargs)
objects = objects + list(result)
for obj in objects:
obj.pk = None
dump = serializers.serialize('json', objects, indent=1, use_natural_keys=True)
self.stdout.write(dump)
|
Allow dumpdata of specific version of cbv.
|
Allow dumpdata of specific version of cbv.
|
Python
|
bsd-2-clause
|
abhijo89/django-cbv-inspector,refreshoxford/django-cbv-inspector,abhijo89/django-cbv-inspector,refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector,abhijo89/django-cbv-inspector,abhijo89/django-cbv-inspector,refreshoxford/django-cbv-inspector
|
from django.core.management import call_command
from django.core.management.commands import LabelCommand
class Command(LabelCommand):
def handle_label(self, label, **options):
# Because django will use the default manager of each model, we
# monkeypatch the manager to filter by our label before calling
# the dumpdata command to dump only the subset of data we want.
# Set the
# Call the dumpdata command.
call_command('dumpdata', 'cbv')
Allow dumpdata of specific version of cbv.
|
import json
from django.db.models.query import QuerySet
from django.core.management import call_command
from django.core.management.base import LabelCommand
from django.core import serializers
from cbv import models
class Command(LabelCommand):
"""Dump the django cbv app data for a specific version."""
def handle_label(self, label, **options):
filtered_models = {
models.ProjectVersion: 'version_number',
models.Module: 'project_version__version_number',
models.ModuleAttribute: 'module__project_version__version_number',
models.Function: 'module__project_version__version_number',
models.Klass: 'module__project_version__version_number',
models.KlassAttribute: 'klass__module__project_version__version_number',
models.Method: 'klass__module__project_version__version_number',
}
objects = []
for model, version_arg in filtered_models.items():
filter_kwargs = {version_arg: label}
result = model.objects.filter(**filter_kwargs)
objects = objects + list(result)
for obj in objects:
obj.pk = None
dump = serializers.serialize('json', objects, indent=1, use_natural_keys=True)
self.stdout.write(dump)
|
<commit_before>from django.core.management import call_command
from django.core.management.commands import LabelCommand
class Command(LabelCommand):
def handle_label(self, label, **options):
# Because django will use the default manager of each model, we
# monkeypatch the manager to filter by our label before calling
# the dumpdata command to dump only the subset of data we want.
# Set the
# Call the dumpdata command.
call_command('dumpdata', 'cbv')
<commit_msg>Allow dumpdata of specific version of cbv.<commit_after>
|
import json
from django.db.models.query import QuerySet
from django.core.management import call_command
from django.core.management.base import LabelCommand
from django.core import serializers
from cbv import models
class Command(LabelCommand):
"""Dump the django cbv app data for a specific version."""
def handle_label(self, label, **options):
filtered_models = {
models.ProjectVersion: 'version_number',
models.Module: 'project_version__version_number',
models.ModuleAttribute: 'module__project_version__version_number',
models.Function: 'module__project_version__version_number',
models.Klass: 'module__project_version__version_number',
models.KlassAttribute: 'klass__module__project_version__version_number',
models.Method: 'klass__module__project_version__version_number',
}
objects = []
for model, version_arg in filtered_models.items():
filter_kwargs = {version_arg: label}
result = model.objects.filter(**filter_kwargs)
objects = objects + list(result)
for obj in objects:
obj.pk = None
dump = serializers.serialize('json', objects, indent=1, use_natural_keys=True)
self.stdout.write(dump)
|
from django.core.management import call_command
from django.core.management.commands import LabelCommand
class Command(LabelCommand):
def handle_label(self, label, **options):
# Because django will use the default manager of each model, we
# monkeypatch the manager to filter by our label before calling
# the dumpdata command to dump only the subset of data we want.
# Set the
# Call the dumpdata command.
call_command('dumpdata', 'cbv')
Allow dumpdata of specific version of cbv.import json
from django.db.models.query import QuerySet
from django.core.management import call_command
from django.core.management.base import LabelCommand
from django.core import serializers
from cbv import models
class Command(LabelCommand):
"""Dump the django cbv app data for a specific version."""
def handle_label(self, label, **options):
filtered_models = {
models.ProjectVersion: 'version_number',
models.Module: 'project_version__version_number',
models.ModuleAttribute: 'module__project_version__version_number',
models.Function: 'module__project_version__version_number',
models.Klass: 'module__project_version__version_number',
models.KlassAttribute: 'klass__module__project_version__version_number',
models.Method: 'klass__module__project_version__version_number',
}
objects = []
for model, version_arg in filtered_models.items():
filter_kwargs = {version_arg: label}
result = model.objects.filter(**filter_kwargs)
objects = objects + list(result)
for obj in objects:
obj.pk = None
dump = serializers.serialize('json', objects, indent=1, use_natural_keys=True)
self.stdout.write(dump)
|
<commit_before>from django.core.management import call_command
from django.core.management.commands import LabelCommand
class Command(LabelCommand):
def handle_label(self, label, **options):
# Because django will use the default manager of each model, we
# monkeypatch the manager to filter by our label before calling
# the dumpdata command to dump only the subset of data we want.
# Set the
# Call the dumpdata command.
call_command('dumpdata', 'cbv')
<commit_msg>Allow dumpdata of specific version of cbv.<commit_after>import json
from django.db.models.query import QuerySet
from django.core.management import call_command
from django.core.management.base import LabelCommand
from django.core import serializers
from cbv import models
class Command(LabelCommand):
"""Dump the django cbv app data for a specific version."""
def handle_label(self, label, **options):
filtered_models = {
models.ProjectVersion: 'version_number',
models.Module: 'project_version__version_number',
models.ModuleAttribute: 'module__project_version__version_number',
models.Function: 'module__project_version__version_number',
models.Klass: 'module__project_version__version_number',
models.KlassAttribute: 'klass__module__project_version__version_number',
models.Method: 'klass__module__project_version__version_number',
}
objects = []
for model, version_arg in filtered_models.items():
filter_kwargs = {version_arg: label}
result = model.objects.filter(**filter_kwargs)
objects = objects + list(result)
for obj in objects:
obj.pk = None
dump = serializers.serialize('json', objects, indent=1, use_natural_keys=True)
self.stdout.write(dump)
|
7d02bd555d7519d485d00e02136d26a6e4e7096e
|
nova/db/sqlalchemy/migrate_repo/versions/034_change_instance_id_in_migrations.py
|
nova/db/sqlalchemy/migrate_repo/versions/034_change_instance_id_in_migrations.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
if migrate_engine.name == "mysql":
migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
"`migrations_ibfk_1`;")
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
Drop FK before dropping instance_id column.
|
Drop FK before dropping instance_id column.
|
Python
|
apache-2.0
|
sacharya/nova,jianghuaw/nova,leilihh/novaha,eneabio/nova,vladikr/nova_drafts,KarimAllah/nova,sileht/deb-openstack-nova,Stavitsky/nova,DirectXMan12/nova-hacking,akash1808/nova_test_latest,raildo/nova,gspilio/nova,tangfeixiong/nova,jianghuaw/nova,Juniper/nova,JioCloud/nova,zhimin711/nova,usc-isi/nova,orbitfp7/nova,JianyuWang/nova,vmturbo/nova,sebrandon1/nova,jeffrey4l/nova,Francis-Liu/animated-broccoli,psiwczak/openstack,MountainWei/nova,tianweizhang/nova,yrobla/nova,maelnor/nova,whitepages/nova,maoy/zknova,joker946/nova,russellb/nova,iuliat/nova,qwefi/nova,rahulunair/nova,berrange/nova,sileht/deb-openstack-nova,mahak/nova,fnordahl/nova,sridevikoushik31/openstack,Metaswitch/calico-nova,gooddata/openstack-nova,sebrandon1/nova,redhat-openstack/nova,eayunstack/nova,mandeepdhami/nova,tealover/nova,eharney/nova,yrobla/nova,CEG-FYP-OpenStack/scheduler,TieWei/nova,maelnor/nova,TwinkleChawla/nova,KarimAllah/nova,cloudbau/nova,isyippee/nova,mikalstill/nova,hanlind/nova,mgagne/nova,badock/nova,qwefi/nova,paulmathews/nova,kimjaejoong/nova,spring-week-topos/nova-week,plumgrid/plumgrid-nova,alaski/nova,petrutlucian94/nova,thomasem/nova,barnsnake351/nova,cernops/nova,akash1808/nova,Triv90/Nova,yrobla/nova,watonyweng/nova,akash1808/nova_test_latest,NoBodyCam/TftpPxeBootBareMetal,Tehsmash/nova,Juniper/nova,iuliat/nova,orbitfp7/nova,alexandrucoman/vbox-nova-driver,aristanetworks/arista-ovs-nova,fnordahl/nova,cernops/nova,zaina/nova,projectcalico/calico-nova,russellb/nova,apporc/nova,j-carpentier/nova,shahar-stratoscale/nova,DirectXMan12/nova-hacking,tealover/nova,vmturbo/nova,rahulunair/nova,JianyuWang/nova,varunarya10/nova_test_latest,imsplitbit/nova,klmitch/nova,silenceli/nova,NewpTone/stacklab-nova,apporc/nova,devendermishrajio/nova_test_latest,dawnpower/nova,alvarolopez/nova,felixma/nova,saleemjaveds/https-github.com-openstack-nova,adelina-t/nova,angdraug/nova,mikalstill/nova,akash1808/nova,Yuriy-Leonov/nova,CiscoSystems/nova,klmitch/nova,watonyweng/nova,devoid/nova,bgxavier/nova,citrix-openstack-build/nova,psiwczak/openstack,nikesh-mahalka/nova,sridevikoushik31/nova,CiscoSystems/nova,joker946/nova,JioCloud/nova,salv-orlando/MyRepo,rrader/nova-docker-plugin,kimjaejoong/nova,rickerc/nova_audit,savi-dev/nova,sridevikoushik31/nova,hanlind/nova,DirectXMan12/nova-hacking,blueboxgroup/nova,JioCloud/nova_test_latest,eonpatapon/nova,luogangyi/bcec-nova,belmiromoreira/nova,fajoy/nova,rickerc/nova_audit,double12gzh/nova,sileht/deb-openstack-nova,cloudbase/nova,eayunstack/nova,NeCTAR-RC/nova,aristanetworks/arista-ovs-nova,CCI-MOC/nova,sridevikoushik31/openstack,silenceli/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,virtualopensystems/nova,Juniper/nova,devendermishrajio/nova,tudorvio/nova,edulramirez/nova,bgxavier/nova,cyx1231st/nova,shootstar/novatest,varunarya10/nova_test_latest,maheshp/novatest,cernops/nova,imsplitbit/nova,maheshp/novatest,russellb/nova,josephsuh/extra-specs,mahak/nova,mgagne/nova,plumgrid/plumgrid-nova,gspilio/nova,sridevikoushik31/nova,luogangyi/bcec-nova,NoBodyCam/TftpPxeBootBareMetal,alaski/nova,cloudbau/nova,CloudServer/nova,bigswitch/nova,houshengbo/nova_vmware_compute_driver,dawnpower/nova,rajalokan/nova,belmiromoreira/nova,virtualopensystems/nova,saleemjaveds/https-github.com-openstack-nova,bclau/nova,eonpatapon/nova,Juniper/nova,citrix-openstack-build/nova,j-carpentier/nova,sacharya/nova,zhimin711/nova,Yusuke1987/openstack_template,angdraug/nova,mmnelemane/nova,eneabio/nova,cloudbase/nova,klmitch/nova,vmturbo/nova,openstack/nova,zaina/nova,edulramirez/nova,eharney/nova,josephsuh/extra-specs,cloudbase/nova,shail2810/nova,jianghuaw/nova,Triv90/Nova,NeCTAR-RC/nova,viggates/nova,zzicewind/nova,LoHChina/nova,vmturbo/nova,spring-week-topos/nova-week,noironetworks/nova,rajalokan/nova,openstack/nova,berrange/nova,takeshineshiro/nova,eneabio/nova,cloudbase/nova-virtualbox,felixma/nova,fajoy/nova,whitepages/nova,usc-isi/extra-specs,psiwczak/openstack,ruslanloman/nova,isyippee/nova,ruslanloman/nova,petrutlucian94/nova_dev,dstroppa/openstack-smartos-nova-grizzly,shahar-stratoscale/nova,bclau/nova,josephsuh/extra-specs,SUSE-Cloud/nova,vladikr/nova_drafts,noironetworks/nova,fajoy/nova,ntt-sic/nova,maoy/zknova,Francis-Liu/animated-broccoli,BeyondTheClouds/nova,blueboxgroup/nova,LoHChina/nova,cloudbase/nova-virtualbox,Triv90/Nova,jianghuaw/nova,SUSE-Cloud/nova,leilihh/novaha,devoid/nova,salv-orlando/MyRepo,Yuriy-Leonov/nova,jeffrey4l/nova,NewpTone/stacklab-nova,tangfeixiong/nova,zzicewind/nova,houshengbo/nova_vmware_compute_driver,yosshy/nova,BeyondTheClouds/nova,sridevikoushik31/openstack,aristanetworks/arista-ovs-nova,maheshp/novatest,OpenAcademy-OpenStack/nova-scheduler,mandeepdhami/nova,phenoxim/nova,paulmathews/nova,usc-isi/nova,TwinkleChawla/nova,mikalstill/nova,Metaswitch/calico-nova,ntt-sic/nova,KarimAllah/nova,houshengbo/nova_vmware_compute_driver,projectcalico/calico-nova,CloudServer/nova,savi-dev/nova,usc-isi/extra-specs,tanglei528/nova,yatinkumbhare/openstack-nova,tianweizhang/nova,Stavitsky/nova,gooddata/openstack-nova,redhat-openstack/nova,Yusuke1987/openstack_template,sridevikoushik31/nova,yatinkumbhare/openstack-nova,tanglei528/nova,leilihh/nova,dstroppa/openstack-smartos-nova-grizzly,klmitch/nova,mahak/nova,sebrandon1/nova,bigswitch/nova,rajalokan/nova,rrader/nova-docker-plugin,leilihh/nova,raildo/nova,mmnelemane/nova,ewindisch/nova,dstroppa/openstack-smartos-nova-grizzly,gooddata/openstack-nova,Tehsmash/nova,JioCloud/nova_test_latest,petrutlucian94/nova_dev,ted-gould/nova,rahulunair/nova,dims/nova,badock/nova,yosshy/nova,MountainWei/nova,scripnichenko/nova,double12gzh/nova,gspilio/nova,OpenAcademy-OpenStack/nova-scheduler,nikesh-mahalka/nova,shootstar/novatest,savi-dev/nova,scripnichenko/nova,gooddata/openstack-nova,devendermishrajio/nova_test_latest,usc-isi/extra-specs,CCI-MOC/nova,takeshineshiro/nova,usc-isi/nova,NewpTone/stacklab-nova,dims/nova,adelina-t/nova,phenoxim/nova,shail2810/nova,cyx1231st/nova,alvarolopez/nova,alexandrucoman/vbox-nova-driver,barnsnake351/nova,openstack/nova,affo/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,affo/nova,maoy/zknova,thomasem/nova,ted-gould/nova,petrutlucian94/nova,viggates/nova,CEG-FYP-OpenStack/scheduler,TieWei/nova,salv-orlando/MyRepo,hanlind/nova,rajalokan/nova,NoBodyCam/TftpPxeBootBareMetal,ewindisch/nova,BeyondTheClouds/nova,devendermishrajio/nova,paulmathews/nova,tudorvio/nova
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
Drop FK before dropping instance_id column.
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
if migrate_engine.name == "mysql":
migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
"`migrations_ibfk_1`;")
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
<commit_msg>Drop FK before dropping instance_id column.<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
if migrate_engine.name == "mysql":
migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
"`migrations_ibfk_1`;")
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
Drop FK before dropping instance_id column.# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
if migrate_engine.name == "mysql":
migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
"`migrations_ibfk_1`;")
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
<commit_msg>Drop FK before dropping instance_id column.<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from sqlalchemy import *
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
if migrate_engine.name == "mysql":
migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
"`migrations_ibfk_1`;")
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
643f666468d3e378cc0b39e501c253e33c267f0f
|
tests/python/PyUnitTests.py
|
tests/python/PyUnitTests.py
|
#!/bin/sh
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
python "%srcdir%"/tests/python/UnitTests.py
|
#!/bin/sh
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
DYLD_LIBRARY_PATH="%builddir%/.libs":"%builddir%/gdtoa/.libs":$DYLD_LIBRARY_PATH \
python "%srcdir%"/tests/python/UnitTests.py
|
Set DYLD_LIBRARY_PATH to find locally built dynamic libraries.
|
Set DYLD_LIBRARY_PATH to find locally built dynamic libraries.
|
Python
|
bsd-3-clause
|
duncanmortimer/ledger,paulbdavis/ledger,duncanmortimer/ledger,duncanmortimer/ledger,afh/ledger,paulbdavis/ledger,duncanmortimer/ledger,duncanmortimer/ledger,afh/ledger,ledger/ledger,jwakely/ledger,ledger/ledger,jwakely/ledger,ledger/ledger,ledger/ledger,paulbdavis/ledger,jwakely/ledger,afh/ledger,paulbdavis/ledger,ledger/ledger,duncanmortimer/ledger,jwakely/ledger,jwakely/ledger,duncanmortimer/ledger,paulbdavis/ledger,paulbdavis/ledger,afh/ledger,jwakely/ledger,jwakely/ledger,afh/ledger,paulbdavis/ledger
|
#!/bin/sh
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
python "%srcdir%"/tests/python/UnitTests.py
Set DYLD_LIBRARY_PATH to find locally built dynamic libraries.
|
#!/bin/sh
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
DYLD_LIBRARY_PATH="%builddir%/.libs":"%builddir%/gdtoa/.libs":$DYLD_LIBRARY_PATH \
python "%srcdir%"/tests/python/UnitTests.py
|
<commit_before>#!/bin/sh
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
python "%srcdir%"/tests/python/UnitTests.py
<commit_msg>Set DYLD_LIBRARY_PATH to find locally built dynamic libraries.<commit_after>
|
#!/bin/sh
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
DYLD_LIBRARY_PATH="%builddir%/.libs":"%builddir%/gdtoa/.libs":$DYLD_LIBRARY_PATH \
python "%srcdir%"/tests/python/UnitTests.py
|
#!/bin/sh
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
python "%srcdir%"/tests/python/UnitTests.py
Set DYLD_LIBRARY_PATH to find locally built dynamic libraries.#!/bin/sh
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
DYLD_LIBRARY_PATH="%builddir%/.libs":"%builddir%/gdtoa/.libs":$DYLD_LIBRARY_PATH \
python "%srcdir%"/tests/python/UnitTests.py
|
<commit_before>#!/bin/sh
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
python "%srcdir%"/tests/python/UnitTests.py
<commit_msg>Set DYLD_LIBRARY_PATH to find locally built dynamic libraries.<commit_after>#!/bin/sh
PYTHONPATH="%builddir%":"%srcdir%":$PYTHONPATH \
DYLD_LIBRARY_PATH="%builddir%/.libs":"%builddir%/gdtoa/.libs":$DYLD_LIBRARY_PATH \
python "%srcdir%"/tests/python/UnitTests.py
|
358bdb98ba4a17c75773c7b09853580f5e7dd4e7
|
tests/people_test.py
|
tests/people_test.py
|
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
assert len(fx_people.clamp_member_1.credits) == 1
for asso in fx_people.clamp_member_1.credits:
assert asso.person == fx_people.clamp_member_1
assert asso.work == fx_works.cardcaptor_sakura
assert asso.role == 'Artist'
|
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
assert fx_people.clamp_member_1.credits == {
fx_works.skura_member_asso_1
}
|
Adjust test_person_made_works to keep consistency.
|
Adjust test_person_made_works to keep consistency.
|
Python
|
mit
|
item4/cliche,item4/cliche,clicheio/cliche,clicheio/cliche,clicheio/cliche
|
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
assert len(fx_people.clamp_member_1.credits) == 1
for asso in fx_people.clamp_member_1.credits:
assert asso.person == fx_people.clamp_member_1
assert asso.work == fx_works.cardcaptor_sakura
assert asso.role == 'Artist'
Adjust test_person_made_works to keep consistency.
|
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
assert fx_people.clamp_member_1.credits == {
fx_works.skura_member_asso_1
}
|
<commit_before>
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
assert len(fx_people.clamp_member_1.credits) == 1
for asso in fx_people.clamp_member_1.credits:
assert asso.person == fx_people.clamp_member_1
assert asso.work == fx_works.cardcaptor_sakura
assert asso.role == 'Artist'
<commit_msg>Adjust test_person_made_works to keep consistency.<commit_after>
|
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
assert fx_people.clamp_member_1.credits == {
fx_works.skura_member_asso_1
}
|
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
assert len(fx_people.clamp_member_1.credits) == 1
for asso in fx_people.clamp_member_1.credits:
assert asso.person == fx_people.clamp_member_1
assert asso.work == fx_works.cardcaptor_sakura
assert asso.role == 'Artist'
Adjust test_person_made_works to keep consistency.
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
assert fx_people.clamp_member_1.credits == {
fx_works.skura_member_asso_1
}
|
<commit_before>
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
assert len(fx_people.clamp_member_1.credits) == 1
for asso in fx_people.clamp_member_1.credits:
assert asso.person == fx_people.clamp_member_1
assert asso.work == fx_works.cardcaptor_sakura
assert asso.role == 'Artist'
<commit_msg>Adjust test_person_made_works to keep consistency.<commit_after>
def test_team_has_members(fx_people, fx_teams):
assert fx_teams.clamp.members == {
fx_people.clamp_member_1,
fx_people.clamp_member_2,
fx_people.clamp_member_3,
fx_people.clamp_member_4
}
def test_person_has_awards(fx_people, fx_awards):
assert fx_people.peter_jackson.awards == {
fx_awards.hugo_award,
fx_awards.nebula_award
}
def test_person_made_works(fx_people, fx_works):
assert fx_people.clamp_member_1.credits == {
fx_works.skura_member_asso_1
}
|
bc3b2db26181681e0a0da93721a950e000c2a367
|
app/errors.py
|
app/errors.py
|
import bugsnag
from sanic.handlers import ErrorHandler
from . import settings
if settings.BUGSNAG_API_KEY:
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_state=settings.ENVIRONMENT,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
bugsnag.notify(exception, meta_data={"request": request})
return super().default(request, exception)
|
import bugsnag
from sanic.handlers import ErrorHandler
from . import settings
if settings.BUGSNAG_API_KEY:
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_state=settings.ENVIRONMENT,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
|
Include URL of request in error reports
|
Include URL of request in error reports
|
Python
|
mit
|
jacebrowning/memegen,jacebrowning/memegen
|
import bugsnag
from sanic.handlers import ErrorHandler
from . import settings
if settings.BUGSNAG_API_KEY:
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_state=settings.ENVIRONMENT,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
bugsnag.notify(exception, meta_data={"request": request})
return super().default(request, exception)
Include URL of request in error reports
|
import bugsnag
from sanic.handlers import ErrorHandler
from . import settings
if settings.BUGSNAG_API_KEY:
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_state=settings.ENVIRONMENT,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
|
<commit_before>import bugsnag
from sanic.handlers import ErrorHandler
from . import settings
if settings.BUGSNAG_API_KEY:
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_state=settings.ENVIRONMENT,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
bugsnag.notify(exception, meta_data={"request": request})
return super().default(request, exception)
<commit_msg>Include URL of request in error reports<commit_after>
|
import bugsnag
from sanic.handlers import ErrorHandler
from . import settings
if settings.BUGSNAG_API_KEY:
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_state=settings.ENVIRONMENT,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
|
import bugsnag
from sanic.handlers import ErrorHandler
from . import settings
if settings.BUGSNAG_API_KEY:
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_state=settings.ENVIRONMENT,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
bugsnag.notify(exception, meta_data={"request": request})
return super().default(request, exception)
Include URL of request in error reportsimport bugsnag
from sanic.handlers import ErrorHandler
from . import settings
if settings.BUGSNAG_API_KEY:
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_state=settings.ENVIRONMENT,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
|
<commit_before>import bugsnag
from sanic.handlers import ErrorHandler
from . import settings
if settings.BUGSNAG_API_KEY:
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_state=settings.ENVIRONMENT,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
bugsnag.notify(exception, meta_data={"request": request})
return super().default(request, exception)
<commit_msg>Include URL of request in error reports<commit_after>import bugsnag
from sanic.handlers import ErrorHandler
from . import settings
if settings.BUGSNAG_API_KEY:
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_state=settings.ENVIRONMENT,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
|
faf451637dffe420f47932621a4035347c978c70
|
msmbuilder/example_datasets/base.py
|
msmbuilder/example_datasets/base.py
|
"""Base IO code for all datasets
"""
# Copyright (c) 2007 David Cournapeau <cournape@gmail.com>
# 2010 Fabian Pedregosa <fabian.pedregosa@inria.fr>
# 2010 Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
# Adapted for msmbuilder from https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/datasets/base.py
import warnings
try:
from msmb_data.base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
except ImportError:
warnings.warn("Please install msmb_data", DeprecationWarning)
from msmb_data.old_base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
|
"""Base IO code for all datasets
"""
# Copyright (c) 2007 David Cournapeau <cournape@gmail.com>
# 2010 Fabian Pedregosa <fabian.pedregosa@inria.fr>
# 2010 Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
# Adapted for msmbuilder from https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/datasets/base.py
import warnings
try:
from msmb_data.base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
except ImportError:
warnings.warn("Please install msmb_data", DeprecationWarning)
from .old_base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
|
Fix for when msmb_data is not available
|
Fix for when msmb_data is not available
|
Python
|
lgpl-2.1
|
msmbuilder/msmbuilder,dr-nate/msmbuilder,msmbuilder/msmbuilder,rafwiewiora/msmbuilder,brookehus/msmbuilder,cxhernandez/msmbuilder,msmbuilder/msmbuilder,brookehus/msmbuilder,cxhernandez/msmbuilder,msultan/msmbuilder,Eigenstate/msmbuilder,peastman/msmbuilder,mpharrigan/mixtape,mpharrigan/mixtape,rafwiewiora/msmbuilder,peastman/msmbuilder,dr-nate/msmbuilder,dr-nate/msmbuilder,cxhernandez/msmbuilder,peastman/msmbuilder,rafwiewiora/msmbuilder,mpharrigan/mixtape,msultan/msmbuilder,Eigenstate/msmbuilder,msmbuilder/msmbuilder,brookehus/msmbuilder,dr-nate/msmbuilder,peastman/msmbuilder,brookehus/msmbuilder,Eigenstate/msmbuilder,mpharrigan/mixtape,cxhernandez/msmbuilder,dr-nate/msmbuilder,msultan/msmbuilder,mpharrigan/mixtape,rafwiewiora/msmbuilder,msultan/msmbuilder,msmbuilder/msmbuilder,Eigenstate/msmbuilder,peastman/msmbuilder,rafwiewiora/msmbuilder,cxhernandez/msmbuilder,Eigenstate/msmbuilder,msultan/msmbuilder,brookehus/msmbuilder
|
"""Base IO code for all datasets
"""
# Copyright (c) 2007 David Cournapeau <cournape@gmail.com>
# 2010 Fabian Pedregosa <fabian.pedregosa@inria.fr>
# 2010 Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
# Adapted for msmbuilder from https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/datasets/base.py
import warnings
try:
from msmb_data.base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
except ImportError:
warnings.warn("Please install msmb_data", DeprecationWarning)
from msmb_data.old_base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
Fix for when msmb_data is not available
|
"""Base IO code for all datasets
"""
# Copyright (c) 2007 David Cournapeau <cournape@gmail.com>
# 2010 Fabian Pedregosa <fabian.pedregosa@inria.fr>
# 2010 Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
# Adapted for msmbuilder from https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/datasets/base.py
import warnings
try:
from msmb_data.base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
except ImportError:
warnings.warn("Please install msmb_data", DeprecationWarning)
from .old_base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
|
<commit_before>"""Base IO code for all datasets
"""
# Copyright (c) 2007 David Cournapeau <cournape@gmail.com>
# 2010 Fabian Pedregosa <fabian.pedregosa@inria.fr>
# 2010 Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
# Adapted for msmbuilder from https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/datasets/base.py
import warnings
try:
from msmb_data.base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
except ImportError:
warnings.warn("Please install msmb_data", DeprecationWarning)
from msmb_data.old_base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
<commit_msg>Fix for when msmb_data is not available<commit_after>
|
"""Base IO code for all datasets
"""
# Copyright (c) 2007 David Cournapeau <cournape@gmail.com>
# 2010 Fabian Pedregosa <fabian.pedregosa@inria.fr>
# 2010 Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
# Adapted for msmbuilder from https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/datasets/base.py
import warnings
try:
from msmb_data.base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
except ImportError:
warnings.warn("Please install msmb_data", DeprecationWarning)
from .old_base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
|
"""Base IO code for all datasets
"""
# Copyright (c) 2007 David Cournapeau <cournape@gmail.com>
# 2010 Fabian Pedregosa <fabian.pedregosa@inria.fr>
# 2010 Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
# Adapted for msmbuilder from https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/datasets/base.py
import warnings
try:
from msmb_data.base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
except ImportError:
warnings.warn("Please install msmb_data", DeprecationWarning)
from msmb_data.old_base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
Fix for when msmb_data is not available"""Base IO code for all datasets
"""
# Copyright (c) 2007 David Cournapeau <cournape@gmail.com>
# 2010 Fabian Pedregosa <fabian.pedregosa@inria.fr>
# 2010 Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
# Adapted for msmbuilder from https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/datasets/base.py
import warnings
try:
from msmb_data.base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
except ImportError:
warnings.warn("Please install msmb_data", DeprecationWarning)
from .old_base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
|
<commit_before>"""Base IO code for all datasets
"""
# Copyright (c) 2007 David Cournapeau <cournape@gmail.com>
# 2010 Fabian Pedregosa <fabian.pedregosa@inria.fr>
# 2010 Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
# Adapted for msmbuilder from https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/datasets/base.py
import warnings
try:
from msmb_data.base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
except ImportError:
warnings.warn("Please install msmb_data", DeprecationWarning)
from msmb_data.old_base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
<commit_msg>Fix for when msmb_data is not available<commit_after>"""Base IO code for all datasets
"""
# Copyright (c) 2007 David Cournapeau <cournape@gmail.com>
# 2010 Fabian Pedregosa <fabian.pedregosa@inria.fr>
# 2010 Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
# Adapted for msmbuilder from https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/datasets/base.py
import warnings
try:
from msmb_data.base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
except ImportError:
warnings.warn("Please install msmb_data", DeprecationWarning)
from .old_base import (Bunch, Dataset, get_data_home,
clear_data_home, retry)
|
c87bbd461794c8d18c9b9811e44306f02e3309d3
|
comics/comics/kalscartoon.py
|
comics/comics/kalscartoon.py
|
from dateutil.parser import parse
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0]
if pub_date != parse(date.text_content()).date():
continue
anchor = blockdate.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
|
import re
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0].text_content()
regexp = pub_date.strftime('%b %d(st|nd|rd|th) %Y')
if not re.match(regexp, date):
continue
anchor = block.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
|
Switch to regexp based matching of date instead of dateutil
|
Switch to regexp based matching of date instead of dateutil
|
Python
|
agpl-3.0
|
datagutten/comics,klette/comics,jodal/comics,jodal/comics,jodal/comics,datagutten/comics,klette/comics,klette/comics,datagutten/comics,datagutten/comics,jodal/comics
|
from dateutil.parser import parse
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0]
if pub_date != parse(date.text_content()).date():
continue
anchor = blockdate.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
Switch to regexp based matching of date instead of dateutil
|
import re
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0].text_content()
regexp = pub_date.strftime('%b %d(st|nd|rd|th) %Y')
if not re.match(regexp, date):
continue
anchor = block.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
|
<commit_before>from dateutil.parser import parse
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0]
if pub_date != parse(date.text_content()).date():
continue
anchor = blockdate.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
<commit_msg>Switch to regexp based matching of date instead of dateutil<commit_after>
|
import re
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0].text_content()
regexp = pub_date.strftime('%b %d(st|nd|rd|th) %Y')
if not re.match(regexp, date):
continue
anchor = block.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
|
from dateutil.parser import parse
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0]
if pub_date != parse(date.text_content()).date():
continue
anchor = blockdate.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
Switch to regexp based matching of date instead of dateutilimport re
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0].text_content()
regexp = pub_date.strftime('%b %d(st|nd|rd|th) %Y')
if not re.match(regexp, date):
continue
anchor = block.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
|
<commit_before>from dateutil.parser import parse
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0]
if pub_date != parse(date.text_content()).date():
continue
anchor = blockdate.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
<commit_msg>Switch to regexp based matching of date instead of dateutil<commit_after>import re
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0].text_content()
regexp = pub_date.strftime('%b %d(st|nd|rd|th) %Y')
if not re.match(regexp, date):
continue
anchor = block.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.