commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c89e30d1a33df2d9d8c5ceb03df98d29b3b08724
|
spacy/tests/en/test_exceptions.py
|
spacy/tests/en/test_exceptions.py
|
# coding: utf-8
"""Test that tokenizer exceptions are handled correctly."""
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["e.g.", "p.m.", "Jan.", "Dec.", "Inc."])
def test_tokenizer_handles_abbr(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 1
def test_tokenizer_handles_exc_in_text(en_tokenizer):
text = "It's mediocre i.e. bad."
tokens = en_tokenizer(text)
assert len(tokens) == 6
assert tokens[3].text == "i.e."
|
# coding: utf-8
"""Test that tokenizer exceptions are handled correctly."""
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["e.g.", "p.m.", "Jan.", "Dec.", "Inc."])
def test_tokenizer_handles_abbr(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 1
def test_tokenizer_handles_exc_in_text(en_tokenizer):
text = "It's mediocre i.e. bad."
tokens = en_tokenizer(text)
assert len(tokens) == 6
assert tokens[3].text == "i.e."
@pytest.mark.parametrize('text', ["1am", "12a.m.", "11p.m.", "4pm"])
def test_tokenizer_handles_times(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 2
assert tokens[1].lemma_ in ["a.m.", "p.m."]
|
Add test for English time exceptions ("1a.m." etc.)
|
Add test for English time exceptions ("1a.m." etc.)
|
Python
|
mit
|
honnibal/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,recognai/spaCy,raphael0202/spaCy,aikramer2/spaCy,explosion/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,honnibal/spaCy,explosion/spaCy,raphael0202/spaCy,spacy-io/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,raphael0202/spaCy,recognai/spaCy,raphael0202/spaCy,recognai/spaCy,Gregory-Howard/spaCy,explosion/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,explosion/spaCy,recognai/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy,honnibal/spaCy,aikramer2/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,spacy-io/spaCy,explosion/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,raphael0202/spaCy,aikramer2/spaCy,explosion/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,Gregory-Howard/spaCy
|
# coding: utf-8
"""Test that tokenizer exceptions are handled correctly."""
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["e.g.", "p.m.", "Jan.", "Dec.", "Inc."])
def test_tokenizer_handles_abbr(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 1
def test_tokenizer_handles_exc_in_text(en_tokenizer):
text = "It's mediocre i.e. bad."
tokens = en_tokenizer(text)
assert len(tokens) == 6
assert tokens[3].text == "i.e."
Add test for English time exceptions ("1a.m." etc.)
|
# coding: utf-8
"""Test that tokenizer exceptions are handled correctly."""
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["e.g.", "p.m.", "Jan.", "Dec.", "Inc."])
def test_tokenizer_handles_abbr(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 1
def test_tokenizer_handles_exc_in_text(en_tokenizer):
text = "It's mediocre i.e. bad."
tokens = en_tokenizer(text)
assert len(tokens) == 6
assert tokens[3].text == "i.e."
@pytest.mark.parametrize('text', ["1am", "12a.m.", "11p.m.", "4pm"])
def test_tokenizer_handles_times(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 2
assert tokens[1].lemma_ in ["a.m.", "p.m."]
|
<commit_before># coding: utf-8
"""Test that tokenizer exceptions are handled correctly."""
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["e.g.", "p.m.", "Jan.", "Dec.", "Inc."])
def test_tokenizer_handles_abbr(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 1
def test_tokenizer_handles_exc_in_text(en_tokenizer):
text = "It's mediocre i.e. bad."
tokens = en_tokenizer(text)
assert len(tokens) == 6
assert tokens[3].text == "i.e."
<commit_msg>Add test for English time exceptions ("1a.m." etc.)<commit_after>
|
# coding: utf-8
"""Test that tokenizer exceptions are handled correctly."""
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["e.g.", "p.m.", "Jan.", "Dec.", "Inc."])
def test_tokenizer_handles_abbr(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 1
def test_tokenizer_handles_exc_in_text(en_tokenizer):
text = "It's mediocre i.e. bad."
tokens = en_tokenizer(text)
assert len(tokens) == 6
assert tokens[3].text == "i.e."
@pytest.mark.parametrize('text', ["1am", "12a.m.", "11p.m.", "4pm"])
def test_tokenizer_handles_times(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 2
assert tokens[1].lemma_ in ["a.m.", "p.m."]
|
# coding: utf-8
"""Test that tokenizer exceptions are handled correctly."""
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["e.g.", "p.m.", "Jan.", "Dec.", "Inc."])
def test_tokenizer_handles_abbr(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 1
def test_tokenizer_handles_exc_in_text(en_tokenizer):
text = "It's mediocre i.e. bad."
tokens = en_tokenizer(text)
assert len(tokens) == 6
assert tokens[3].text == "i.e."
Add test for English time exceptions ("1a.m." etc.)# coding: utf-8
"""Test that tokenizer exceptions are handled correctly."""
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["e.g.", "p.m.", "Jan.", "Dec.", "Inc."])
def test_tokenizer_handles_abbr(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 1
def test_tokenizer_handles_exc_in_text(en_tokenizer):
text = "It's mediocre i.e. bad."
tokens = en_tokenizer(text)
assert len(tokens) == 6
assert tokens[3].text == "i.e."
@pytest.mark.parametrize('text', ["1am", "12a.m.", "11p.m.", "4pm"])
def test_tokenizer_handles_times(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 2
assert tokens[1].lemma_ in ["a.m.", "p.m."]
|
<commit_before># coding: utf-8
"""Test that tokenizer exceptions are handled correctly."""
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["e.g.", "p.m.", "Jan.", "Dec.", "Inc."])
def test_tokenizer_handles_abbr(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 1
def test_tokenizer_handles_exc_in_text(en_tokenizer):
text = "It's mediocre i.e. bad."
tokens = en_tokenizer(text)
assert len(tokens) == 6
assert tokens[3].text == "i.e."
<commit_msg>Add test for English time exceptions ("1a.m." etc.)<commit_after># coding: utf-8
"""Test that tokenizer exceptions are handled correctly."""
from __future__ import unicode_literals
import pytest
@pytest.mark.parametrize('text', ["e.g.", "p.m.", "Jan.", "Dec.", "Inc."])
def test_tokenizer_handles_abbr(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 1
def test_tokenizer_handles_exc_in_text(en_tokenizer):
text = "It's mediocre i.e. bad."
tokens = en_tokenizer(text)
assert len(tokens) == 6
assert tokens[3].text == "i.e."
@pytest.mark.parametrize('text', ["1am", "12a.m.", "11p.m.", "4pm"])
def test_tokenizer_handles_times(en_tokenizer, text):
tokens = en_tokenizer(text)
assert len(tokens) == 2
assert tokens[1].lemma_ in ["a.m.", "p.m."]
|
99b1610fad7224d2efe03547c5114d2f046f50ca
|
bin/cgroup-limits.py
|
bin/cgroup-limits.py
|
#!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
|
#!/usr/bin/python
from __future__ import print_function
import sys
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit is None:
print("Warning: Can't detect memory limit from cgroups",
file=sys.stderr)
return
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
print("Warning: Can't detect number of CPU cores from cgroups",
file=sys.stderr)
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
|
Print warnings to standard error
|
Print warnings to standard error
|
Python
|
apache-2.0
|
soltysh/sti-base,mfojtik/sti-base,hhorak/sti-base,bparees/sti-base,openshift/sti-base,sclorg/s2i-base-container,openshift/sti-base,mfojtik/sti-base,bparees/sti-base
|
#!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
Print warnings to standard error
|
#!/usr/bin/python
from __future__ import print_function
import sys
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit is None:
print("Warning: Can't detect memory limit from cgroups",
file=sys.stderr)
return
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
print("Warning: Can't detect number of CPU cores from cgroups",
file=sys.stderr)
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
|
<commit_before>#!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
<commit_msg>Print warnings to standard error<commit_after>
|
#!/usr/bin/python
from __future__ import print_function
import sys
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit is None:
print("Warning: Can't detect memory limit from cgroups",
file=sys.stderr)
return
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
print("Warning: Can't detect number of CPU cores from cgroups",
file=sys.stderr)
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
|
#!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
Print warnings to standard error#!/usr/bin/python
from __future__ import print_function
import sys
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit is None:
print("Warning: Can't detect memory limit from cgroups",
file=sys.stderr)
return
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
print("Warning: Can't detect number of CPU cores from cgroups",
file=sys.stderr)
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
|
<commit_before>#!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
<commit_msg>Print warnings to standard error<commit_after>#!/usr/bin/python
from __future__ import print_function
import sys
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit is None:
print("Warning: Can't detect memory limit from cgroups",
file=sys.stderr)
return
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
print("Warning: Can't detect number of CPU cores from cgroups",
file=sys.stderr)
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
|
e93dadc8215f3946e4e7b64ca8ab3481fcf3c197
|
froide/foirequestfollower/apps.py
|
froide/foirequestfollower/apps.py
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class FoiRequestFollowerConfig(AppConfig):
name = 'froide.foirequestfollower'
verbose_name = _('FOI Request Follower')
def ready(self):
from froide.account import account_canceled
import froide.foirequestfollower.signals # noqa
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import FoiRequestFollower
if user is None:
return
FoiRequestFollower.objects.filter(user=user).delete()
|
import json
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class FoiRequestFollowerConfig(AppConfig):
name = 'froide.foirequestfollower'
verbose_name = _('FOI Request Follower')
def ready(self):
from froide.account import account_canceled
import froide.foirequestfollower.signals # noqa
from froide.account.export import registry
account_canceled.connect(cancel_user)
registry.register(export_user_data)
def cancel_user(sender, user=None, **kwargs):
from .models import FoiRequestFollower
if user is None:
return
FoiRequestFollower.objects.filter(user=user).delete()
def export_user_data(user):
from .models import FoiRequestFollower
from froide.foirequest.models.request import get_absolute_domain_short_url
following = FoiRequestFollower.objects.filter(
user=user
)
if not following:
return
yield ('followed_requests.json', json.dumps([
{
'timestamp': frf.timestamp.isoformat(),
'url': get_absolute_domain_short_url(frf.request_id),
}
for frf in following]).encode('utf-8')
)
|
Add user data export for foirequest follower
|
Add user data export for foirequest follower
|
Python
|
mit
|
fin/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class FoiRequestFollowerConfig(AppConfig):
name = 'froide.foirequestfollower'
verbose_name = _('FOI Request Follower')
def ready(self):
from froide.account import account_canceled
import froide.foirequestfollower.signals # noqa
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import FoiRequestFollower
if user is None:
return
FoiRequestFollower.objects.filter(user=user).delete()
Add user data export for foirequest follower
|
import json
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class FoiRequestFollowerConfig(AppConfig):
name = 'froide.foirequestfollower'
verbose_name = _('FOI Request Follower')
def ready(self):
from froide.account import account_canceled
import froide.foirequestfollower.signals # noqa
from froide.account.export import registry
account_canceled.connect(cancel_user)
registry.register(export_user_data)
def cancel_user(sender, user=None, **kwargs):
from .models import FoiRequestFollower
if user is None:
return
FoiRequestFollower.objects.filter(user=user).delete()
def export_user_data(user):
from .models import FoiRequestFollower
from froide.foirequest.models.request import get_absolute_domain_short_url
following = FoiRequestFollower.objects.filter(
user=user
)
if not following:
return
yield ('followed_requests.json', json.dumps([
{
'timestamp': frf.timestamp.isoformat(),
'url': get_absolute_domain_short_url(frf.request_id),
}
for frf in following]).encode('utf-8')
)
|
<commit_before>from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class FoiRequestFollowerConfig(AppConfig):
name = 'froide.foirequestfollower'
verbose_name = _('FOI Request Follower')
def ready(self):
from froide.account import account_canceled
import froide.foirequestfollower.signals # noqa
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import FoiRequestFollower
if user is None:
return
FoiRequestFollower.objects.filter(user=user).delete()
<commit_msg>Add user data export for foirequest follower<commit_after>
|
import json
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class FoiRequestFollowerConfig(AppConfig):
name = 'froide.foirequestfollower'
verbose_name = _('FOI Request Follower')
def ready(self):
from froide.account import account_canceled
import froide.foirequestfollower.signals # noqa
from froide.account.export import registry
account_canceled.connect(cancel_user)
registry.register(export_user_data)
def cancel_user(sender, user=None, **kwargs):
from .models import FoiRequestFollower
if user is None:
return
FoiRequestFollower.objects.filter(user=user).delete()
def export_user_data(user):
from .models import FoiRequestFollower
from froide.foirequest.models.request import get_absolute_domain_short_url
following = FoiRequestFollower.objects.filter(
user=user
)
if not following:
return
yield ('followed_requests.json', json.dumps([
{
'timestamp': frf.timestamp.isoformat(),
'url': get_absolute_domain_short_url(frf.request_id),
}
for frf in following]).encode('utf-8')
)
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class FoiRequestFollowerConfig(AppConfig):
name = 'froide.foirequestfollower'
verbose_name = _('FOI Request Follower')
def ready(self):
from froide.account import account_canceled
import froide.foirequestfollower.signals # noqa
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import FoiRequestFollower
if user is None:
return
FoiRequestFollower.objects.filter(user=user).delete()
Add user data export for foirequest followerimport json
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class FoiRequestFollowerConfig(AppConfig):
name = 'froide.foirequestfollower'
verbose_name = _('FOI Request Follower')
def ready(self):
from froide.account import account_canceled
import froide.foirequestfollower.signals # noqa
from froide.account.export import registry
account_canceled.connect(cancel_user)
registry.register(export_user_data)
def cancel_user(sender, user=None, **kwargs):
from .models import FoiRequestFollower
if user is None:
return
FoiRequestFollower.objects.filter(user=user).delete()
def export_user_data(user):
from .models import FoiRequestFollower
from froide.foirequest.models.request import get_absolute_domain_short_url
following = FoiRequestFollower.objects.filter(
user=user
)
if not following:
return
yield ('followed_requests.json', json.dumps([
{
'timestamp': frf.timestamp.isoformat(),
'url': get_absolute_domain_short_url(frf.request_id),
}
for frf in following]).encode('utf-8')
)
|
<commit_before>from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class FoiRequestFollowerConfig(AppConfig):
name = 'froide.foirequestfollower'
verbose_name = _('FOI Request Follower')
def ready(self):
from froide.account import account_canceled
import froide.foirequestfollower.signals # noqa
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import FoiRequestFollower
if user is None:
return
FoiRequestFollower.objects.filter(user=user).delete()
<commit_msg>Add user data export for foirequest follower<commit_after>import json
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class FoiRequestFollowerConfig(AppConfig):
name = 'froide.foirequestfollower'
verbose_name = _('FOI Request Follower')
def ready(self):
from froide.account import account_canceled
import froide.foirequestfollower.signals # noqa
from froide.account.export import registry
account_canceled.connect(cancel_user)
registry.register(export_user_data)
def cancel_user(sender, user=None, **kwargs):
from .models import FoiRequestFollower
if user is None:
return
FoiRequestFollower.objects.filter(user=user).delete()
def export_user_data(user):
from .models import FoiRequestFollower
from froide.foirequest.models.request import get_absolute_domain_short_url
following = FoiRequestFollower.objects.filter(
user=user
)
if not following:
return
yield ('followed_requests.json', json.dumps([
{
'timestamp': frf.timestamp.isoformat(),
'url': get_absolute_domain_short_url(frf.request_id),
}
for frf in following]).encode('utf-8')
)
|
482205f4235f5a741e55fd560bab4a4d75cb5303
|
versailes2geojson.py
|
versailes2geojson.py
|
import pandas as pd
import geojson
from geojson import FeatureCollection, Feature, Point
# read in original data
csv = pd.read_csv("versaille_stock.csv", sep="\t")
# fetch properties, remove Long and Lat
props = list(csv.columns.values)
props = [p for p in props if p not in ["Longitude",
"Latitude"]]
features = []
for row in csv.iterrows():
accession = row[1]
lat = accession["Latitude"]
lon = accession["Longitude"]
# automatically populate accession properties
feature_props = {p: accession[p] for p in props}
f = Feature(
geometry = Point((lat,lon))
)
features.append(f)
fc = FeatureCollection(features)
with open("accession_locations.json", "w") as fh:
geojson.dump(fc, fh)
|
import pandas as pd
import geojson
from geojson import FeatureCollection, Feature, Point
# read in original data
csv = pd.read_csv("versaille_stock.csv", sep="\t")
# fetch properties, remove Long and Lat
props = list(csv.columns.values)
props = [p for p in props if p not in ["Longitude",
"Latitude"]]
features = []
for row in csv.iterrows():
accession = row[1]
lat = accession["Latitude"]
lon = accession["Longitude"]
# automatically populate accession properties
feature_props = {p: accession[p] for p in props}
f = Feature(
geometry = Point((lon,lat))
)
features.append(f)
fc = FeatureCollection(features)
with open("accession_locations.json", "w") as fh:
geojson.dump(fc, fh)
|
Correct the order of coordinates.
|
Correct the order of coordinates.
Geojson expects lon, lat.
|
Python
|
apache-2.0
|
mkuzak/atmap
|
import pandas as pd
import geojson
from geojson import FeatureCollection, Feature, Point
# read in original data
csv = pd.read_csv("versaille_stock.csv", sep="\t")
# fetch properties, remove Long and Lat
props = list(csv.columns.values)
props = [p for p in props if p not in ["Longitude",
"Latitude"]]
features = []
for row in csv.iterrows():
accession = row[1]
lat = accession["Latitude"]
lon = accession["Longitude"]
# automatically populate accession properties
feature_props = {p: accession[p] for p in props}
f = Feature(
geometry = Point((lat,lon))
)
features.append(f)
fc = FeatureCollection(features)
with open("accession_locations.json", "w") as fh:
geojson.dump(fc, fh)Correct the order of coordinates.
Geojson expects lon, lat.
|
import pandas as pd
import geojson
from geojson import FeatureCollection, Feature, Point
# read in original data
csv = pd.read_csv("versaille_stock.csv", sep="\t")
# fetch properties, remove Long and Lat
props = list(csv.columns.values)
props = [p for p in props if p not in ["Longitude",
"Latitude"]]
features = []
for row in csv.iterrows():
accession = row[1]
lat = accession["Latitude"]
lon = accession["Longitude"]
# automatically populate accession properties
feature_props = {p: accession[p] for p in props}
f = Feature(
geometry = Point((lon,lat))
)
features.append(f)
fc = FeatureCollection(features)
with open("accession_locations.json", "w") as fh:
geojson.dump(fc, fh)
|
<commit_before>import pandas as pd
import geojson
from geojson import FeatureCollection, Feature, Point
# read in original data
csv = pd.read_csv("versaille_stock.csv", sep="\t")
# fetch properties, remove Long and Lat
props = list(csv.columns.values)
props = [p for p in props if p not in ["Longitude",
"Latitude"]]
features = []
for row in csv.iterrows():
accession = row[1]
lat = accession["Latitude"]
lon = accession["Longitude"]
# automatically populate accession properties
feature_props = {p: accession[p] for p in props}
f = Feature(
geometry = Point((lat,lon))
)
features.append(f)
fc = FeatureCollection(features)
with open("accession_locations.json", "w") as fh:
geojson.dump(fc, fh)<commit_msg>Correct the order of coordinates.
Geojson expects lon, lat.<commit_after>
|
import pandas as pd
import geojson
from geojson import FeatureCollection, Feature, Point
# read in original data
csv = pd.read_csv("versaille_stock.csv", sep="\t")
# fetch properties, remove Long and Lat
props = list(csv.columns.values)
props = [p for p in props if p not in ["Longitude",
"Latitude"]]
features = []
for row in csv.iterrows():
accession = row[1]
lat = accession["Latitude"]
lon = accession["Longitude"]
# automatically populate accession properties
feature_props = {p: accession[p] for p in props}
f = Feature(
geometry = Point((lon,lat))
)
features.append(f)
fc = FeatureCollection(features)
with open("accession_locations.json", "w") as fh:
geojson.dump(fc, fh)
|
import pandas as pd
import geojson
from geojson import FeatureCollection, Feature, Point
# read in original data
csv = pd.read_csv("versaille_stock.csv", sep="\t")
# fetch properties, remove Long and Lat
props = list(csv.columns.values)
props = [p for p in props if p not in ["Longitude",
"Latitude"]]
features = []
for row in csv.iterrows():
accession = row[1]
lat = accession["Latitude"]
lon = accession["Longitude"]
# automatically populate accession properties
feature_props = {p: accession[p] for p in props}
f = Feature(
geometry = Point((lat,lon))
)
features.append(f)
fc = FeatureCollection(features)
with open("accession_locations.json", "w") as fh:
geojson.dump(fc, fh)Correct the order of coordinates.
Geojson expects lon, lat.import pandas as pd
import geojson
from geojson import FeatureCollection, Feature, Point
# read in original data
csv = pd.read_csv("versaille_stock.csv", sep="\t")
# fetch properties, remove Long and Lat
props = list(csv.columns.values)
props = [p for p in props if p not in ["Longitude",
"Latitude"]]
features = []
for row in csv.iterrows():
accession = row[1]
lat = accession["Latitude"]
lon = accession["Longitude"]
# automatically populate accession properties
feature_props = {p: accession[p] for p in props}
f = Feature(
geometry = Point((lon,lat))
)
features.append(f)
fc = FeatureCollection(features)
with open("accession_locations.json", "w") as fh:
geojson.dump(fc, fh)
|
<commit_before>import pandas as pd
import geojson
from geojson import FeatureCollection, Feature, Point
# read in original data
csv = pd.read_csv("versaille_stock.csv", sep="\t")
# fetch properties, remove Long and Lat
props = list(csv.columns.values)
props = [p for p in props if p not in ["Longitude",
"Latitude"]]
features = []
for row in csv.iterrows():
accession = row[1]
lat = accession["Latitude"]
lon = accession["Longitude"]
# automatically populate accession properties
feature_props = {p: accession[p] for p in props}
f = Feature(
geometry = Point((lat,lon))
)
features.append(f)
fc = FeatureCollection(features)
with open("accession_locations.json", "w") as fh:
geojson.dump(fc, fh)<commit_msg>Correct the order of coordinates.
Geojson expects lon, lat.<commit_after>import pandas as pd
import geojson
from geojson import FeatureCollection, Feature, Point
# read in original data
csv = pd.read_csv("versaille_stock.csv", sep="\t")
# fetch properties, remove Long and Lat
props = list(csv.columns.values)
props = [p for p in props if p not in ["Longitude",
"Latitude"]]
features = []
for row in csv.iterrows():
accession = row[1]
lat = accession["Latitude"]
lon = accession["Longitude"]
# automatically populate accession properties
feature_props = {p: accession[p] for p in props}
f = Feature(
geometry = Point((lon,lat))
)
features.append(f)
fc = FeatureCollection(features)
with open("accession_locations.json", "w") as fh:
geojson.dump(fc, fh)
|
29ffe1df88927aa568d3e86b07e372e5ba589310
|
indra/sources/eidos/server.py
|
indra/sources/eidos/server.py
|
"""This is a Python-based web server that can be run to
read with Eidos. To run the server, do
python -m indra.sources.eidos.server
and then submit POST requests to the `localhost:5000/process_text` endpoint
with JSON content as `{'text': 'text to read'}`. The response will be the
Eidos JSON-LD output.
"""
import json
import requests
from flask import Flask, request
from indra.sources.eidos.reader import EidosReader
from indra.preassembler.make_wm_ontologies import wm_ont_url
wm_yml = requests.get(wm_ont_url).text
app = Flask(__name__)
@app.route('/process_text', methods=['POST'])
def process_text():
text = request.json.get('text')
if not text:
return {}
res = er.process_text(text, 'json_ld')
return json.dumps(res)
@app.route('/reground_text', methods=['POST'])
def reground_text():
text = request.json.get('text')
if not text:
return []
res = er.reground_texts([text], wm_yml)
return json.dumps(res)
if __name__ == '__main__':
er = EidosReader()
er.process_text('hello', 'json_ld')
app.run(host='0.0.0.0', port=6666)
|
"""This is a Python-based web server that can be run to
read with Eidos. To run the server, do
python -m indra.sources.eidos.server
and then submit POST requests to the `localhost:5000/process_text` endpoint
with JSON content as `{'text': 'text to read'}`. The response will be the
Eidos JSON-LD output.
"""
import json
import requests
from flask import Flask, request
from indra.sources.eidos.reader import EidosReader
from indra.preassembler.make_wm_ontologies import wm_ont_url
wm_yml = requests.get(wm_ont_url).text
app = Flask(__name__)
@app.route('/process_text', methods=['POST'])
def process_text():
text = request.json.get('text')
if not text:
return {}
res = er.process_text(text, 'json_ld')
return json.dumps(res)
@app.route('/reground_text', methods=['POST'])
def reground_text():
text = request.json.get('text')
if not text:
return []
if isinstance(text, str):
res = er.reground_texts([text], wm_yml)
elif isinstance(text, list):
res = er.reground_texts(text, wm_yml)
return json.dumps(res)
if __name__ == '__main__':
er = EidosReader()
er.process_text('hello', 'json_ld')
app.run(host='0.0.0.0', port=6666)
|
Allow one or multiple texts to reground
|
Allow one or multiple texts to reground
|
Python
|
bsd-2-clause
|
sorgerlab/belpy,johnbachman/indra,bgyori/indra,johnbachman/belpy,bgyori/indra,johnbachman/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/belpy,bgyori/indra,sorgerlab/belpy,sorgerlab/indra
|
"""This is a Python-based web server that can be run to
read with Eidos. To run the server, do
python -m indra.sources.eidos.server
and then submit POST requests to the `localhost:5000/process_text` endpoint
with JSON content as `{'text': 'text to read'}`. The response will be the
Eidos JSON-LD output.
"""
import json
import requests
from flask import Flask, request
from indra.sources.eidos.reader import EidosReader
from indra.preassembler.make_wm_ontologies import wm_ont_url
wm_yml = requests.get(wm_ont_url).text
app = Flask(__name__)
@app.route('/process_text', methods=['POST'])
def process_text():
text = request.json.get('text')
if not text:
return {}
res = er.process_text(text, 'json_ld')
return json.dumps(res)
@app.route('/reground_text', methods=['POST'])
def reground_text():
text = request.json.get('text')
if not text:
return []
res = er.reground_texts([text], wm_yml)
return json.dumps(res)
if __name__ == '__main__':
er = EidosReader()
er.process_text('hello', 'json_ld')
app.run(host='0.0.0.0', port=6666)
Allow one or multiple texts to reground
|
"""This is a Python-based web server that can be run to
read with Eidos. To run the server, do
python -m indra.sources.eidos.server
and then submit POST requests to the `localhost:5000/process_text` endpoint
with JSON content as `{'text': 'text to read'}`. The response will be the
Eidos JSON-LD output.
"""
import json
import requests
from flask import Flask, request
from indra.sources.eidos.reader import EidosReader
from indra.preassembler.make_wm_ontologies import wm_ont_url
wm_yml = requests.get(wm_ont_url).text
app = Flask(__name__)
@app.route('/process_text', methods=['POST'])
def process_text():
text = request.json.get('text')
if not text:
return {}
res = er.process_text(text, 'json_ld')
return json.dumps(res)
@app.route('/reground_text', methods=['POST'])
def reground_text():
text = request.json.get('text')
if not text:
return []
if isinstance(text, str):
res = er.reground_texts([text], wm_yml)
elif isinstance(text, list):
res = er.reground_texts(text, wm_yml)
return json.dumps(res)
if __name__ == '__main__':
er = EidosReader()
er.process_text('hello', 'json_ld')
app.run(host='0.0.0.0', port=6666)
|
<commit_before>"""This is a Python-based web server that can be run to
read with Eidos. To run the server, do
python -m indra.sources.eidos.server
and then submit POST requests to the `localhost:5000/process_text` endpoint
with JSON content as `{'text': 'text to read'}`. The response will be the
Eidos JSON-LD output.
"""
import json
import requests
from flask import Flask, request
from indra.sources.eidos.reader import EidosReader
from indra.preassembler.make_wm_ontologies import wm_ont_url
wm_yml = requests.get(wm_ont_url).text
app = Flask(__name__)
@app.route('/process_text', methods=['POST'])
def process_text():
text = request.json.get('text')
if not text:
return {}
res = er.process_text(text, 'json_ld')
return json.dumps(res)
@app.route('/reground_text', methods=['POST'])
def reground_text():
text = request.json.get('text')
if not text:
return []
res = er.reground_texts([text], wm_yml)
return json.dumps(res)
if __name__ == '__main__':
er = EidosReader()
er.process_text('hello', 'json_ld')
app.run(host='0.0.0.0', port=6666)
<commit_msg>Allow one or multiple texts to reground<commit_after>
|
"""This is a Python-based web server that can be run to
read with Eidos. To run the server, do
python -m indra.sources.eidos.server
and then submit POST requests to the `localhost:5000/process_text` endpoint
with JSON content as `{'text': 'text to read'}`. The response will be the
Eidos JSON-LD output.
"""
import json
import requests
from flask import Flask, request
from indra.sources.eidos.reader import EidosReader
from indra.preassembler.make_wm_ontologies import wm_ont_url
wm_yml = requests.get(wm_ont_url).text
app = Flask(__name__)
@app.route('/process_text', methods=['POST'])
def process_text():
text = request.json.get('text')
if not text:
return {}
res = er.process_text(text, 'json_ld')
return json.dumps(res)
@app.route('/reground_text', methods=['POST'])
def reground_text():
text = request.json.get('text')
if not text:
return []
if isinstance(text, str):
res = er.reground_texts([text], wm_yml)
elif isinstance(text, list):
res = er.reground_texts(text, wm_yml)
return json.dumps(res)
if __name__ == '__main__':
er = EidosReader()
er.process_text('hello', 'json_ld')
app.run(host='0.0.0.0', port=6666)
|
"""This is a Python-based web server that can be run to
read with Eidos. To run the server, do
python -m indra.sources.eidos.server
and then submit POST requests to the `localhost:5000/process_text` endpoint
with JSON content as `{'text': 'text to read'}`. The response will be the
Eidos JSON-LD output.
"""
import json
import requests
from flask import Flask, request
from indra.sources.eidos.reader import EidosReader
from indra.preassembler.make_wm_ontologies import wm_ont_url
wm_yml = requests.get(wm_ont_url).text
app = Flask(__name__)
@app.route('/process_text', methods=['POST'])
def process_text():
text = request.json.get('text')
if not text:
return {}
res = er.process_text(text, 'json_ld')
return json.dumps(res)
@app.route('/reground_text', methods=['POST'])
def reground_text():
text = request.json.get('text')
if not text:
return []
res = er.reground_texts([text], wm_yml)
return json.dumps(res)
if __name__ == '__main__':
er = EidosReader()
er.process_text('hello', 'json_ld')
app.run(host='0.0.0.0', port=6666)
Allow one or multiple texts to reground"""This is a Python-based web server that can be run to
read with Eidos. To run the server, do
python -m indra.sources.eidos.server
and then submit POST requests to the `localhost:5000/process_text` endpoint
with JSON content as `{'text': 'text to read'}`. The response will be the
Eidos JSON-LD output.
"""
import json
import requests
from flask import Flask, request
from indra.sources.eidos.reader import EidosReader
from indra.preassembler.make_wm_ontologies import wm_ont_url
wm_yml = requests.get(wm_ont_url).text
app = Flask(__name__)
@app.route('/process_text', methods=['POST'])
def process_text():
text = request.json.get('text')
if not text:
return {}
res = er.process_text(text, 'json_ld')
return json.dumps(res)
@app.route('/reground_text', methods=['POST'])
def reground_text():
text = request.json.get('text')
if not text:
return []
if isinstance(text, str):
res = er.reground_texts([text], wm_yml)
elif isinstance(text, list):
res = er.reground_texts(text, wm_yml)
return json.dumps(res)
if __name__ == '__main__':
er = EidosReader()
er.process_text('hello', 'json_ld')
app.run(host='0.0.0.0', port=6666)
|
<commit_before>"""This is a Python-based web server that can be run to
read with Eidos. To run the server, do
python -m indra.sources.eidos.server
and then submit POST requests to the `localhost:5000/process_text` endpoint
with JSON content as `{'text': 'text to read'}`. The response will be the
Eidos JSON-LD output.
"""
import json
import requests
from flask import Flask, request
from indra.sources.eidos.reader import EidosReader
from indra.preassembler.make_wm_ontologies import wm_ont_url
wm_yml = requests.get(wm_ont_url).text
app = Flask(__name__)
@app.route('/process_text', methods=['POST'])
def process_text():
text = request.json.get('text')
if not text:
return {}
res = er.process_text(text, 'json_ld')
return json.dumps(res)
@app.route('/reground_text', methods=['POST'])
def reground_text():
text = request.json.get('text')
if not text:
return []
res = er.reground_texts([text], wm_yml)
return json.dumps(res)
if __name__ == '__main__':
er = EidosReader()
er.process_text('hello', 'json_ld')
app.run(host='0.0.0.0', port=6666)
<commit_msg>Allow one or multiple texts to reground<commit_after>"""This is a Python-based web server that can be run to
read with Eidos. To run the server, do
python -m indra.sources.eidos.server
and then submit POST requests to the `localhost:5000/process_text` endpoint
with JSON content as `{'text': 'text to read'}`. The response will be the
Eidos JSON-LD output.
"""
import json
import requests
from flask import Flask, request
from indra.sources.eidos.reader import EidosReader
from indra.preassembler.make_wm_ontologies import wm_ont_url
wm_yml = requests.get(wm_ont_url).text
app = Flask(__name__)
@app.route('/process_text', methods=['POST'])
def process_text():
text = request.json.get('text')
if not text:
return {}
res = er.process_text(text, 'json_ld')
return json.dumps(res)
@app.route('/reground_text', methods=['POST'])
def reground_text():
text = request.json.get('text')
if not text:
return []
if isinstance(text, str):
res = er.reground_texts([text], wm_yml)
elif isinstance(text, list):
res = er.reground_texts(text, wm_yml)
return json.dumps(res)
if __name__ == '__main__':
er = EidosReader()
er.process_text('hello', 'json_ld')
app.run(host='0.0.0.0', port=6666)
|
46b60e3bb2b84685e27035a270e8ae81551f3f72
|
silver/management/commands/generate_docs.py
|
silver/management/commands/generate_docs.py
|
from optparse import make_option
from datetime import datetime as dt
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
from dateutil.relativedelta import *
from silver.documents_generator import DocumentsGenerator
from silver.models import Subscription
class Command(BaseCommand):
help = 'Generates the billing documents (Invoices, Proformas).'
option_list = BaseCommand.option_list + (
make_option('--subscription',
action='store',
dest='subscription_id',
type="int"),
make_option('--date',
action='store',
dest='billing_date',
type="string"),
)
def handle(self, *args, **options):
translation.activate(settings.LANGUAGE_CODE)
date = None
if options['billing_date']:
billing_date = dt.strptime(options['billing_date'], '%Y-%m-%d').date()
docs_generator = DocumentsGenerator()
if options['subscription_id']:
try:
subscription = Subscription.objects.get(id=options['subscription_id'])
docs_generator.generate(subscription=subscription)
self.stdout.write('Done. You can have a Club-Mate now. :)')
except Subscription.DoesNotExist:
msg = 'The subscription with the provided id does not exist.'
self.stdout.write(msg)
else:
docs_generator.generate(billing_date=billing_date)
self.stdout.write('Done. You can have a Club-Mate now. :)')
|
from optparse import make_option
from datetime import datetime as dt
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
from dateutil.relativedelta import *
from silver.documents_generator import DocumentsGenerator
from silver.models import Subscription
class Command(BaseCommand):
help = 'Generates the billing documents (Invoices, Proformas).'
option_list = BaseCommand.option_list + (
make_option('--subscription',
action='store',
dest='subscription_id',
type="int"),
make_option('--date',
action='store',
dest='billing_date',
type="string"),
)
def handle(self, *args, **options):
translation.activate('en-us')
date = None
if options['billing_date']:
billing_date = dt.strptime(options['billing_date'], '%Y-%m-%d').date()
docs_generator = DocumentsGenerator()
if options['subscription_id']:
try:
subscription = Subscription.objects.get(id=options['subscription_id'])
docs_generator.generate(subscription=subscription)
self.stdout.write('Done. You can have a Club-Mate now. :)')
except Subscription.DoesNotExist:
msg = 'The subscription with the provided id does not exist.'
self.stdout.write(msg)
else:
docs_generator.generate(billing_date=billing_date)
self.stdout.write('Done. You can have a Club-Mate now. :)')
|
Add language code in the command
|
Add language code in the command
|
Python
|
apache-2.0
|
PressLabs/silver,PressLabs/silver,PressLabs/silver
|
from optparse import make_option
from datetime import datetime as dt
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
from dateutil.relativedelta import *
from silver.documents_generator import DocumentsGenerator
from silver.models import Subscription
class Command(BaseCommand):
help = 'Generates the billing documents (Invoices, Proformas).'
option_list = BaseCommand.option_list + (
make_option('--subscription',
action='store',
dest='subscription_id',
type="int"),
make_option('--date',
action='store',
dest='billing_date',
type="string"),
)
def handle(self, *args, **options):
translation.activate(settings.LANGUAGE_CODE)
date = None
if options['billing_date']:
billing_date = dt.strptime(options['billing_date'], '%Y-%m-%d').date()
docs_generator = DocumentsGenerator()
if options['subscription_id']:
try:
subscription = Subscription.objects.get(id=options['subscription_id'])
docs_generator.generate(subscription=subscription)
self.stdout.write('Done. You can have a Club-Mate now. :)')
except Subscription.DoesNotExist:
msg = 'The subscription with the provided id does not exist.'
self.stdout.write(msg)
else:
docs_generator.generate(billing_date=billing_date)
self.stdout.write('Done. You can have a Club-Mate now. :)')
Add language code in the command
|
from optparse import make_option
from datetime import datetime as dt
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
from dateutil.relativedelta import *
from silver.documents_generator import DocumentsGenerator
from silver.models import Subscription
class Command(BaseCommand):
help = 'Generates the billing documents (Invoices, Proformas).'
option_list = BaseCommand.option_list + (
make_option('--subscription',
action='store',
dest='subscription_id',
type="int"),
make_option('--date',
action='store',
dest='billing_date',
type="string"),
)
def handle(self, *args, **options):
translation.activate('en-us')
date = None
if options['billing_date']:
billing_date = dt.strptime(options['billing_date'], '%Y-%m-%d').date()
docs_generator = DocumentsGenerator()
if options['subscription_id']:
try:
subscription = Subscription.objects.get(id=options['subscription_id'])
docs_generator.generate(subscription=subscription)
self.stdout.write('Done. You can have a Club-Mate now. :)')
except Subscription.DoesNotExist:
msg = 'The subscription with the provided id does not exist.'
self.stdout.write(msg)
else:
docs_generator.generate(billing_date=billing_date)
self.stdout.write('Done. You can have a Club-Mate now. :)')
|
<commit_before>from optparse import make_option
from datetime import datetime as dt
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
from dateutil.relativedelta import *
from silver.documents_generator import DocumentsGenerator
from silver.models import Subscription
class Command(BaseCommand):
help = 'Generates the billing documents (Invoices, Proformas).'
option_list = BaseCommand.option_list + (
make_option('--subscription',
action='store',
dest='subscription_id',
type="int"),
make_option('--date',
action='store',
dest='billing_date',
type="string"),
)
def handle(self, *args, **options):
translation.activate(settings.LANGUAGE_CODE)
date = None
if options['billing_date']:
billing_date = dt.strptime(options['billing_date'], '%Y-%m-%d').date()
docs_generator = DocumentsGenerator()
if options['subscription_id']:
try:
subscription = Subscription.objects.get(id=options['subscription_id'])
docs_generator.generate(subscription=subscription)
self.stdout.write('Done. You can have a Club-Mate now. :)')
except Subscription.DoesNotExist:
msg = 'The subscription with the provided id does not exist.'
self.stdout.write(msg)
else:
docs_generator.generate(billing_date=billing_date)
self.stdout.write('Done. You can have a Club-Mate now. :)')
<commit_msg>Add language code in the command<commit_after>
|
from optparse import make_option
from datetime import datetime as dt
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
from dateutil.relativedelta import *
from silver.documents_generator import DocumentsGenerator
from silver.models import Subscription
class Command(BaseCommand):
help = 'Generates the billing documents (Invoices, Proformas).'
option_list = BaseCommand.option_list + (
make_option('--subscription',
action='store',
dest='subscription_id',
type="int"),
make_option('--date',
action='store',
dest='billing_date',
type="string"),
)
def handle(self, *args, **options):
translation.activate('en-us')
date = None
if options['billing_date']:
billing_date = dt.strptime(options['billing_date'], '%Y-%m-%d').date()
docs_generator = DocumentsGenerator()
if options['subscription_id']:
try:
subscription = Subscription.objects.get(id=options['subscription_id'])
docs_generator.generate(subscription=subscription)
self.stdout.write('Done. You can have a Club-Mate now. :)')
except Subscription.DoesNotExist:
msg = 'The subscription with the provided id does not exist.'
self.stdout.write(msg)
else:
docs_generator.generate(billing_date=billing_date)
self.stdout.write('Done. You can have a Club-Mate now. :)')
|
from optparse import make_option
from datetime import datetime as dt
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
from dateutil.relativedelta import *
from silver.documents_generator import DocumentsGenerator
from silver.models import Subscription
class Command(BaseCommand):
help = 'Generates the billing documents (Invoices, Proformas).'
option_list = BaseCommand.option_list + (
make_option('--subscription',
action='store',
dest='subscription_id',
type="int"),
make_option('--date',
action='store',
dest='billing_date',
type="string"),
)
def handle(self, *args, **options):
translation.activate(settings.LANGUAGE_CODE)
date = None
if options['billing_date']:
billing_date = dt.strptime(options['billing_date'], '%Y-%m-%d').date()
docs_generator = DocumentsGenerator()
if options['subscription_id']:
try:
subscription = Subscription.objects.get(id=options['subscription_id'])
docs_generator.generate(subscription=subscription)
self.stdout.write('Done. You can have a Club-Mate now. :)')
except Subscription.DoesNotExist:
msg = 'The subscription with the provided id does not exist.'
self.stdout.write(msg)
else:
docs_generator.generate(billing_date=billing_date)
self.stdout.write('Done. You can have a Club-Mate now. :)')
Add language code in the commandfrom optparse import make_option
from datetime import datetime as dt
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
from dateutil.relativedelta import *
from silver.documents_generator import DocumentsGenerator
from silver.models import Subscription
class Command(BaseCommand):
help = 'Generates the billing documents (Invoices, Proformas).'
option_list = BaseCommand.option_list + (
make_option('--subscription',
action='store',
dest='subscription_id',
type="int"),
make_option('--date',
action='store',
dest='billing_date',
type="string"),
)
def handle(self, *args, **options):
translation.activate('en-us')
date = None
if options['billing_date']:
billing_date = dt.strptime(options['billing_date'], '%Y-%m-%d').date()
docs_generator = DocumentsGenerator()
if options['subscription_id']:
try:
subscription = Subscription.objects.get(id=options['subscription_id'])
docs_generator.generate(subscription=subscription)
self.stdout.write('Done. You can have a Club-Mate now. :)')
except Subscription.DoesNotExist:
msg = 'The subscription with the provided id does not exist.'
self.stdout.write(msg)
else:
docs_generator.generate(billing_date=billing_date)
self.stdout.write('Done. You can have a Club-Mate now. :)')
|
<commit_before>from optparse import make_option
from datetime import datetime as dt
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
from dateutil.relativedelta import *
from silver.documents_generator import DocumentsGenerator
from silver.models import Subscription
class Command(BaseCommand):
help = 'Generates the billing documents (Invoices, Proformas).'
option_list = BaseCommand.option_list + (
make_option('--subscription',
action='store',
dest='subscription_id',
type="int"),
make_option('--date',
action='store',
dest='billing_date',
type="string"),
)
def handle(self, *args, **options):
translation.activate(settings.LANGUAGE_CODE)
date = None
if options['billing_date']:
billing_date = dt.strptime(options['billing_date'], '%Y-%m-%d').date()
docs_generator = DocumentsGenerator()
if options['subscription_id']:
try:
subscription = Subscription.objects.get(id=options['subscription_id'])
docs_generator.generate(subscription=subscription)
self.stdout.write('Done. You can have a Club-Mate now. :)')
except Subscription.DoesNotExist:
msg = 'The subscription with the provided id does not exist.'
self.stdout.write(msg)
else:
docs_generator.generate(billing_date=billing_date)
self.stdout.write('Done. You can have a Club-Mate now. :)')
<commit_msg>Add language code in the command<commit_after>from optparse import make_option
from datetime import datetime as dt
from django.core.management.base import BaseCommand
from django.utils import translation
from django.conf import settings
from dateutil.relativedelta import *
from silver.documents_generator import DocumentsGenerator
from silver.models import Subscription
class Command(BaseCommand):
help = 'Generates the billing documents (Invoices, Proformas).'
option_list = BaseCommand.option_list + (
make_option('--subscription',
action='store',
dest='subscription_id',
type="int"),
make_option('--date',
action='store',
dest='billing_date',
type="string"),
)
def handle(self, *args, **options):
translation.activate('en-us')
date = None
if options['billing_date']:
billing_date = dt.strptime(options['billing_date'], '%Y-%m-%d').date()
docs_generator = DocumentsGenerator()
if options['subscription_id']:
try:
subscription = Subscription.objects.get(id=options['subscription_id'])
docs_generator.generate(subscription=subscription)
self.stdout.write('Done. You can have a Club-Mate now. :)')
except Subscription.DoesNotExist:
msg = 'The subscription with the provided id does not exist.'
self.stdout.write(msg)
else:
docs_generator.generate(billing_date=billing_date)
self.stdout.write('Done. You can have a Club-Mate now. :)')
|
13be198c8aec08f5738eecbb7da2bfdcafd57a48
|
pygraphc/clustering/MaxCliquesPercolationSA.py
|
pygraphc/clustering/MaxCliquesPercolationSA.py
|
from MaxCliquesPercolation import MaxCliquesPercolationWeighted
class MaxCliquesPercolationSA(MaxCliquesPercolationWeighted):
def __init__(self, graph, edges_weight, nodes_id, k, threshold):
super(MaxCliquesPercolationSA, self).__init__(graph, edges_weight, nodes_id, k, threshold)
def get_maxcliques_percolation_sa(self):
pass
|
from MaxCliquesPercolation import MaxCliquesPercolationWeighted
from pygraphc.optimization.SimulatedAnnealing import SimulatedAnnealing
from numpy import linspace
class MaxCliquesPercolationSA(MaxCliquesPercolationWeighted):
def __init__(self, graph, edges_weight, nodes_id, k, threshold, tmin, tmax, alpha, energy_type, max_iteration):
super(MaxCliquesPercolationSA, self).__init__(graph, edges_weight, nodes_id, k, threshold)
self.Tmin = tmin
self.Tmax = tmax
self.alpha = alpha
self.energy_type = energy_type
self.max_iteration = max_iteration
def get_maxcliques_percolation_sa(self):
# run max_clique
max_cliques = self._find_maxcliques()
# get maximal node for all maximal cliques to generate k
max_node = 0
for max_clique in max_cliques:
current_len = len(max_clique)
if max_node < current_len:
max_node = current_len
parameters = {
'k': list(xrange(2, max_node)),
'I': linspace(0.1, 0.9, 9)
}
sa = SimulatedAnnealing(self.Tmin, self.Tmax, self.alpha, parameters, self.energy_type, self.max_iteration)
initial_parameter = sa.get_parameter()
# get maximal clique percolation
|
Add constructor and get method with SA
|
Add constructor and get method with SA
|
Python
|
mit
|
studiawan/pygraphc
|
from MaxCliquesPercolation import MaxCliquesPercolationWeighted
class MaxCliquesPercolationSA(MaxCliquesPercolationWeighted):
def __init__(self, graph, edges_weight, nodes_id, k, threshold):
super(MaxCliquesPercolationSA, self).__init__(graph, edges_weight, nodes_id, k, threshold)
def get_maxcliques_percolation_sa(self):
pass
Add constructor and get method with SA
|
from MaxCliquesPercolation import MaxCliquesPercolationWeighted
from pygraphc.optimization.SimulatedAnnealing import SimulatedAnnealing
from numpy import linspace
class MaxCliquesPercolationSA(MaxCliquesPercolationWeighted):
def __init__(self, graph, edges_weight, nodes_id, k, threshold, tmin, tmax, alpha, energy_type, max_iteration):
super(MaxCliquesPercolationSA, self).__init__(graph, edges_weight, nodes_id, k, threshold)
self.Tmin = tmin
self.Tmax = tmax
self.alpha = alpha
self.energy_type = energy_type
self.max_iteration = max_iteration
def get_maxcliques_percolation_sa(self):
# run max_clique
max_cliques = self._find_maxcliques()
# get maximal node for all maximal cliques to generate k
max_node = 0
for max_clique in max_cliques:
current_len = len(max_clique)
if max_node < current_len:
max_node = current_len
parameters = {
'k': list(xrange(2, max_node)),
'I': linspace(0.1, 0.9, 9)
}
sa = SimulatedAnnealing(self.Tmin, self.Tmax, self.alpha, parameters, self.energy_type, self.max_iteration)
initial_parameter = sa.get_parameter()
# get maximal clique percolation
|
<commit_before>from MaxCliquesPercolation import MaxCliquesPercolationWeighted
class MaxCliquesPercolationSA(MaxCliquesPercolationWeighted):
def __init__(self, graph, edges_weight, nodes_id, k, threshold):
super(MaxCliquesPercolationSA, self).__init__(graph, edges_weight, nodes_id, k, threshold)
def get_maxcliques_percolation_sa(self):
pass
<commit_msg>Add constructor and get method with SA<commit_after>
|
from MaxCliquesPercolation import MaxCliquesPercolationWeighted
from pygraphc.optimization.SimulatedAnnealing import SimulatedAnnealing
from numpy import linspace
class MaxCliquesPercolationSA(MaxCliquesPercolationWeighted):
def __init__(self, graph, edges_weight, nodes_id, k, threshold, tmin, tmax, alpha, energy_type, max_iteration):
super(MaxCliquesPercolationSA, self).__init__(graph, edges_weight, nodes_id, k, threshold)
self.Tmin = tmin
self.Tmax = tmax
self.alpha = alpha
self.energy_type = energy_type
self.max_iteration = max_iteration
def get_maxcliques_percolation_sa(self):
# run max_clique
max_cliques = self._find_maxcliques()
# get maximal node for all maximal cliques to generate k
max_node = 0
for max_clique in max_cliques:
current_len = len(max_clique)
if max_node < current_len:
max_node = current_len
parameters = {
'k': list(xrange(2, max_node)),
'I': linspace(0.1, 0.9, 9)
}
sa = SimulatedAnnealing(self.Tmin, self.Tmax, self.alpha, parameters, self.energy_type, self.max_iteration)
initial_parameter = sa.get_parameter()
# get maximal clique percolation
|
from MaxCliquesPercolation import MaxCliquesPercolationWeighted
class MaxCliquesPercolationSA(MaxCliquesPercolationWeighted):
def __init__(self, graph, edges_weight, nodes_id, k, threshold):
super(MaxCliquesPercolationSA, self).__init__(graph, edges_weight, nodes_id, k, threshold)
def get_maxcliques_percolation_sa(self):
pass
Add constructor and get method with SAfrom MaxCliquesPercolation import MaxCliquesPercolationWeighted
from pygraphc.optimization.SimulatedAnnealing import SimulatedAnnealing
from numpy import linspace
class MaxCliquesPercolationSA(MaxCliquesPercolationWeighted):
def __init__(self, graph, edges_weight, nodes_id, k, threshold, tmin, tmax, alpha, energy_type, max_iteration):
super(MaxCliquesPercolationSA, self).__init__(graph, edges_weight, nodes_id, k, threshold)
self.Tmin = tmin
self.Tmax = tmax
self.alpha = alpha
self.energy_type = energy_type
self.max_iteration = max_iteration
def get_maxcliques_percolation_sa(self):
# run max_clique
max_cliques = self._find_maxcliques()
# get maximal node for all maximal cliques to generate k
max_node = 0
for max_clique in max_cliques:
current_len = len(max_clique)
if max_node < current_len:
max_node = current_len
parameters = {
'k': list(xrange(2, max_node)),
'I': linspace(0.1, 0.9, 9)
}
sa = SimulatedAnnealing(self.Tmin, self.Tmax, self.alpha, parameters, self.energy_type, self.max_iteration)
initial_parameter = sa.get_parameter()
# get maximal clique percolation
|
<commit_before>from MaxCliquesPercolation import MaxCliquesPercolationWeighted
class MaxCliquesPercolationSA(MaxCliquesPercolationWeighted):
def __init__(self, graph, edges_weight, nodes_id, k, threshold):
super(MaxCliquesPercolationSA, self).__init__(graph, edges_weight, nodes_id, k, threshold)
def get_maxcliques_percolation_sa(self):
pass
<commit_msg>Add constructor and get method with SA<commit_after>from MaxCliquesPercolation import MaxCliquesPercolationWeighted
from pygraphc.optimization.SimulatedAnnealing import SimulatedAnnealing
from numpy import linspace
class MaxCliquesPercolationSA(MaxCliquesPercolationWeighted):
def __init__(self, graph, edges_weight, nodes_id, k, threshold, tmin, tmax, alpha, energy_type, max_iteration):
super(MaxCliquesPercolationSA, self).__init__(graph, edges_weight, nodes_id, k, threshold)
self.Tmin = tmin
self.Tmax = tmax
self.alpha = alpha
self.energy_type = energy_type
self.max_iteration = max_iteration
def get_maxcliques_percolation_sa(self):
# run max_clique
max_cliques = self._find_maxcliques()
# get maximal node for all maximal cliques to generate k
max_node = 0
for max_clique in max_cliques:
current_len = len(max_clique)
if max_node < current_len:
max_node = current_len
parameters = {
'k': list(xrange(2, max_node)),
'I': linspace(0.1, 0.9, 9)
}
sa = SimulatedAnnealing(self.Tmin, self.Tmax, self.alpha, parameters, self.energy_type, self.max_iteration)
initial_parameter = sa.get_parameter()
# get maximal clique percolation
|
33309df85823bde19fcdd2b21b73db9f1da131ab
|
requests_oauthlib/compliance_fixes/facebook.py
|
requests_oauthlib/compliance_fixes/facebook.py
|
from json import dumps
from oauthlib.common import urldecode
from urlparse import parse_qsl
def facebook_compliance_fix(session):
def _compliance_fix(r):
# if Facebook claims to be sending us json, let's trust them.
if 'application/json' in r.headers['content-type']:
return r
# Facebook returns a content-type of text/plain when sending their
# x-www-form-urlencoded responses, along with a 200. If not, let's
# assume we're getting JSON and bail on the fix.
if 'text/plain' in r.headers['content-type'] and r.status_code == 200:
token = dict(parse_qsl(r.text, keep_blank_values=True))
else:
return r
expires = token.get('expires')
if expires is not None:
token['expires_in'] = expires
token['token_type'] = 'Bearer'
r._content = dumps(token)
return r
session.register_compliance_hook('access_token_response', _compliance_fix)
return session
|
from json import dumps
try:
from urlparse import parse_qsl
except ImportError:
from urllib.parse import parse_qsl
def facebook_compliance_fix(session):
def _compliance_fix(r):
# if Facebook claims to be sending us json, let's trust them.
if 'application/json' in r.headers['content-type']:
return r
# Facebook returns a content-type of text/plain when sending their
# x-www-form-urlencoded responses, along with a 200. If not, let's
# assume we're getting JSON and bail on the fix.
if 'text/plain' in r.headers['content-type'] and r.status_code == 200:
token = dict(parse_qsl(r.text, keep_blank_values=True))
else:
return r
expires = token.get('expires')
if expires is not None:
token['expires_in'] = expires
token['token_type'] = 'Bearer'
r._content = dumps(token)
return r
session.register_compliance_hook('access_token_response', _compliance_fix)
return session
|
Remove unused import. Facebook compliance support python3
|
Remove unused import. Facebook compliance support python3
|
Python
|
isc
|
abhi931375/requests-oauthlib,gras100/asks-oauthlib,requests/requests-oauthlib,singingwolfboy/requests-oauthlib,jayvdb/requests-oauthlib,lucidbard/requests-oauthlib,dongguangming/requests-oauthlib,jsfan/requests-oauthlib,jayvdb/requests-oauthlib,sigmavirus24/requests-oauthlib,elafarge/requests-oauthlib
|
from json import dumps
from oauthlib.common import urldecode
from urlparse import parse_qsl
def facebook_compliance_fix(session):
def _compliance_fix(r):
# if Facebook claims to be sending us json, let's trust them.
if 'application/json' in r.headers['content-type']:
return r
# Facebook returns a content-type of text/plain when sending their
# x-www-form-urlencoded responses, along with a 200. If not, let's
# assume we're getting JSON and bail on the fix.
if 'text/plain' in r.headers['content-type'] and r.status_code == 200:
token = dict(parse_qsl(r.text, keep_blank_values=True))
else:
return r
expires = token.get('expires')
if expires is not None:
token['expires_in'] = expires
token['token_type'] = 'Bearer'
r._content = dumps(token)
return r
session.register_compliance_hook('access_token_response', _compliance_fix)
return session
Remove unused import. Facebook compliance support python3
|
from json import dumps
try:
from urlparse import parse_qsl
except ImportError:
from urllib.parse import parse_qsl
def facebook_compliance_fix(session):
def _compliance_fix(r):
# if Facebook claims to be sending us json, let's trust them.
if 'application/json' in r.headers['content-type']:
return r
# Facebook returns a content-type of text/plain when sending their
# x-www-form-urlencoded responses, along with a 200. If not, let's
# assume we're getting JSON and bail on the fix.
if 'text/plain' in r.headers['content-type'] and r.status_code == 200:
token = dict(parse_qsl(r.text, keep_blank_values=True))
else:
return r
expires = token.get('expires')
if expires is not None:
token['expires_in'] = expires
token['token_type'] = 'Bearer'
r._content = dumps(token)
return r
session.register_compliance_hook('access_token_response', _compliance_fix)
return session
|
<commit_before>from json import dumps
from oauthlib.common import urldecode
from urlparse import parse_qsl
def facebook_compliance_fix(session):
def _compliance_fix(r):
# if Facebook claims to be sending us json, let's trust them.
if 'application/json' in r.headers['content-type']:
return r
# Facebook returns a content-type of text/plain when sending their
# x-www-form-urlencoded responses, along with a 200. If not, let's
# assume we're getting JSON and bail on the fix.
if 'text/plain' in r.headers['content-type'] and r.status_code == 200:
token = dict(parse_qsl(r.text, keep_blank_values=True))
else:
return r
expires = token.get('expires')
if expires is not None:
token['expires_in'] = expires
token['token_type'] = 'Bearer'
r._content = dumps(token)
return r
session.register_compliance_hook('access_token_response', _compliance_fix)
return session
<commit_msg>Remove unused import. Facebook compliance support python3<commit_after>
|
from json import dumps
try:
from urlparse import parse_qsl
except ImportError:
from urllib.parse import parse_qsl
def facebook_compliance_fix(session):
def _compliance_fix(r):
# if Facebook claims to be sending us json, let's trust them.
if 'application/json' in r.headers['content-type']:
return r
# Facebook returns a content-type of text/plain when sending their
# x-www-form-urlencoded responses, along with a 200. If not, let's
# assume we're getting JSON and bail on the fix.
if 'text/plain' in r.headers['content-type'] and r.status_code == 200:
token = dict(parse_qsl(r.text, keep_blank_values=True))
else:
return r
expires = token.get('expires')
if expires is not None:
token['expires_in'] = expires
token['token_type'] = 'Bearer'
r._content = dumps(token)
return r
session.register_compliance_hook('access_token_response', _compliance_fix)
return session
|
from json import dumps
from oauthlib.common import urldecode
from urlparse import parse_qsl
def facebook_compliance_fix(session):
def _compliance_fix(r):
# if Facebook claims to be sending us json, let's trust them.
if 'application/json' in r.headers['content-type']:
return r
# Facebook returns a content-type of text/plain when sending their
# x-www-form-urlencoded responses, along with a 200. If not, let's
# assume we're getting JSON and bail on the fix.
if 'text/plain' in r.headers['content-type'] and r.status_code == 200:
token = dict(parse_qsl(r.text, keep_blank_values=True))
else:
return r
expires = token.get('expires')
if expires is not None:
token['expires_in'] = expires
token['token_type'] = 'Bearer'
r._content = dumps(token)
return r
session.register_compliance_hook('access_token_response', _compliance_fix)
return session
Remove unused import. Facebook compliance support python3from json import dumps
try:
from urlparse import parse_qsl
except ImportError:
from urllib.parse import parse_qsl
def facebook_compliance_fix(session):
def _compliance_fix(r):
# if Facebook claims to be sending us json, let's trust them.
if 'application/json' in r.headers['content-type']:
return r
# Facebook returns a content-type of text/plain when sending their
# x-www-form-urlencoded responses, along with a 200. If not, let's
# assume we're getting JSON and bail on the fix.
if 'text/plain' in r.headers['content-type'] and r.status_code == 200:
token = dict(parse_qsl(r.text, keep_blank_values=True))
else:
return r
expires = token.get('expires')
if expires is not None:
token['expires_in'] = expires
token['token_type'] = 'Bearer'
r._content = dumps(token)
return r
session.register_compliance_hook('access_token_response', _compliance_fix)
return session
|
<commit_before>from json import dumps
from oauthlib.common import urldecode
from urlparse import parse_qsl
def facebook_compliance_fix(session):
def _compliance_fix(r):
# if Facebook claims to be sending us json, let's trust them.
if 'application/json' in r.headers['content-type']:
return r
# Facebook returns a content-type of text/plain when sending their
# x-www-form-urlencoded responses, along with a 200. If not, let's
# assume we're getting JSON and bail on the fix.
if 'text/plain' in r.headers['content-type'] and r.status_code == 200:
token = dict(parse_qsl(r.text, keep_blank_values=True))
else:
return r
expires = token.get('expires')
if expires is not None:
token['expires_in'] = expires
token['token_type'] = 'Bearer'
r._content = dumps(token)
return r
session.register_compliance_hook('access_token_response', _compliance_fix)
return session
<commit_msg>Remove unused import. Facebook compliance support python3<commit_after>from json import dumps
try:
from urlparse import parse_qsl
except ImportError:
from urllib.parse import parse_qsl
def facebook_compliance_fix(session):
def _compliance_fix(r):
# if Facebook claims to be sending us json, let's trust them.
if 'application/json' in r.headers['content-type']:
return r
# Facebook returns a content-type of text/plain when sending their
# x-www-form-urlencoded responses, along with a 200. If not, let's
# assume we're getting JSON and bail on the fix.
if 'text/plain' in r.headers['content-type'] and r.status_code == 200:
token = dict(parse_qsl(r.text, keep_blank_values=True))
else:
return r
expires = token.get('expires')
if expires is not None:
token['expires_in'] = expires
token['token_type'] = 'Bearer'
r._content = dumps(token)
return r
session.register_compliance_hook('access_token_response', _compliance_fix)
return session
|
60a9ace22f219f7b125b3a618090c4dd36cded4c
|
api/base/exceptions.py
|
api/base/exceptions.py
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
if response is not None and response.data['errors'][0].get('detail') == "Authentication credentials were not provided.":
response.status_code = 401
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
error_message = response.data['errors'][0].get('detail')
errors_401 = ["Authentication credentials were not provided.", 'Incorrect authentication credentials.']
if response is not None and error_message in errors_401:
response.status_code = 401
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
Add additional error detail to cover other circumstances that intend to throw 401
|
Add additional error detail to cover other circumstances that intend to throw 401
|
Python
|
apache-2.0
|
abought/osf.io,mfraezz/osf.io,rdhyee/osf.io,GageGaskins/osf.io,TomHeatwole/osf.io,Ghalko/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,cosenal/osf.io,danielneis/osf.io,caseyrygt/osf.io,haoyuchen1992/osf.io,cslzchen/osf.io,acshi/osf.io,emetsger/osf.io,Nesiehr/osf.io,samanehsan/osf.io,saradbowman/osf.io,GageGaskins/osf.io,ZobairAlijan/osf.io,chrisseto/osf.io,kwierman/osf.io,ticklemepierce/osf.io,kch8qx/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,chennan47/osf.io,njantrania/osf.io,samchrisinger/osf.io,asanfilippo7/osf.io,chrisseto/osf.io,cslzchen/osf.io,mluo613/osf.io,monikagrabowska/osf.io,acshi/osf.io,samanehsan/osf.io,alexschiller/osf.io,doublebits/osf.io,danielneis/osf.io,DanielSBrown/osf.io,hmoco/osf.io,mluke93/osf.io,njantrania/osf.io,abought/osf.io,leb2dg/osf.io,mluo613/osf.io,kch8qx/osf.io,ZobairAlijan/osf.io,kwierman/osf.io,doublebits/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,danielneis/osf.io,icereval/osf.io,adlius/osf.io,Johnetordoff/osf.io,sbt9uc/osf.io,ticklemepierce/osf.io,jnayak1/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,adlius/osf.io,monikagrabowska/osf.io,felliott/osf.io,Ghalko/osf.io,kch8qx/osf.io,SSJohns/osf.io,zachjanicki/osf.io,billyhunt/osf.io,SSJohns/osf.io,RomanZWang/osf.io,haoyuchen1992/osf.io,mluo613/osf.io,ZobairAlijan/osf.io,caseyrygt/osf.io,mfraezz/osf.io,cslzchen/osf.io,baylee-d/osf.io,samanehsan/osf.io,abought/osf.io,cosenal/osf.io,cwisecarver/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,mfraezz/osf.io,wearpants/osf.io,mattclark/osf.io,cosenal/osf.io,jnayak1/osf.io,sloria/osf.io,Nesiehr/osf.io,mluke93/osf.io,brianjgeiger/osf.io,erinspace/osf.io,cwisecarver/osf.io,cosenal/osf.io,crcresearch/osf.io,TomHeatwole/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,alexschiller/osf.io,chennan47/osf.io,acshi/osf.io,laurenrevere/osf.io,jnayak1/osf.io,asanfilippo7/osf.io,sbt9uc/osf.io,samchrisinger/osf.io,pattisdr/osf.io,SSJohns/osf.io,caseyrollins/osf.io,amyshi188/osf.io,alexschiller/osf.io,chennan47/osf.io,arpitar/osf.io,njantrania/osf.io,brianjgeiger/osf.io,hmoco/osf.io,GageGaskins/osf.io,RomanZWang/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,arpitar/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,amyshi188/osf.io,caseyrygt/osf.io,caneruguz/osf.io,felliott/osf.io,danielneis/osf.io,doublebits/osf.io,zachjanicki/osf.io,DanielSBrown/osf.io,GageGaskins/osf.io,acshi/osf.io,petermalcolm/osf.io,cwisecarver/osf.io,brandonPurvis/osf.io,binoculars/osf.io,mattclark/osf.io,zamattiac/osf.io,adlius/osf.io,ticklemepierce/osf.io,arpitar/osf.io,mattclark/osf.io,RomanZWang/osf.io,hmoco/osf.io,petermalcolm/osf.io,alexschiller/osf.io,amyshi188/osf.io,wearpants/osf.io,zachjanicki/osf.io,chrisseto/osf.io,wearpants/osf.io,kwierman/osf.io,wearpants/osf.io,caseyrollins/osf.io,doublebits/osf.io,zachjanicki/osf.io,kch8qx/osf.io,brandonPurvis/osf.io,felliott/osf.io,crcresearch/osf.io,leb2dg/osf.io,samchrisinger/osf.io,asanfilippo7/osf.io,KAsante95/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,laurenrevere/osf.io,RomanZWang/osf.io,binoculars/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,alexschiller/osf.io,haoyuchen1992/osf.io,KAsante95/osf.io,petermalcolm/osf.io,brandonPurvis/osf.io,mluke93/osf.io,crcresearch/osf.io,KAsante95/osf.io,cslzchen/osf.io,adlius/osf.io,KAsante95/osf.io,sloria/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,DanielSBrown/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,aaxelb/osf.io,GageGaskins/osf.io,emetsger/osf.io,emetsger/osf.io,zamattiac/osf.io,aaxelb/osf.io,asanfilippo7/osf.io,abought/osf.io,baylee-d/osf.io,sbt9uc/osf.io,mluke93/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,icereval/osf.io,icereval/osf.io,amyshi188/osf.io,TomBaxter/osf.io,petermalcolm/osf.io,erinspace/osf.io,baylee-d/osf.io,TomHeatwole/osf.io,monikagrabowska/osf.io,felliott/osf.io,erinspace/osf.io,zamattiac/osf.io,caneruguz/osf.io,billyhunt/osf.io,rdhyee/osf.io,ticklemepierce/osf.io,kwierman/osf.io,mluo613/osf.io,arpitar/osf.io,Ghalko/osf.io,caseyrygt/osf.io,emetsger/osf.io,jnayak1/osf.io,hmoco/osf.io,saradbowman/osf.io,billyhunt/osf.io,mluo613/osf.io,TomBaxter/osf.io,njantrania/osf.io,leb2dg/osf.io,TomBaxter/osf.io,SSJohns/osf.io,mfraezz/osf.io,brandonPurvis/osf.io,aaxelb/osf.io,caseyrollins/osf.io,sbt9uc/osf.io,Nesiehr/osf.io,acshi/osf.io,samanehsan/osf.io,CenterForOpenScience/osf.io,haoyuchen1992/osf.io,TomHeatwole/osf.io,Ghalko/osf.io,zamattiac/osf.io,doublebits/osf.io,RomanZWang/osf.io,sloria/osf.io,KAsante95/osf.io
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
if response is not None and response.data['errors'][0].get('detail') == "Authentication credentials were not provided.":
response.status_code = 401
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
Add additional error detail to cover other circumstances that intend to throw 401
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
error_message = response.data['errors'][0].get('detail')
errors_401 = ["Authentication credentials were not provided.", 'Incorrect authentication credentials.']
if response is not None and error_message in errors_401:
response.status_code = 401
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
<commit_before>
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
if response is not None and response.data['errors'][0].get('detail') == "Authentication credentials were not provided.":
response.status_code = 401
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
<commit_msg>Add additional error detail to cover other circumstances that intend to throw 401<commit_after>
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
error_message = response.data['errors'][0].get('detail')
errors_401 = ["Authentication credentials were not provided.", 'Incorrect authentication credentials.']
if response is not None and error_message in errors_401:
response.status_code = 401
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
if response is not None and response.data['errors'][0].get('detail') == "Authentication credentials were not provided.":
response.status_code = 401
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
Add additional error detail to cover other circumstances that intend to throw 401
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
error_message = response.data['errors'][0].get('detail')
errors_401 = ["Authentication credentials were not provided.", 'Incorrect authentication credentials.']
if response is not None and error_message in errors_401:
response.status_code = 401
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
<commit_before>
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
if response is not None and response.data['errors'][0].get('detail') == "Authentication credentials were not provided.":
response.status_code = 401
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
<commit_msg>Add additional error detail to cover other circumstances that intend to throw 401<commit_after>
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
error_message = response.data['errors'][0].get('detail')
errors_401 = ["Authentication credentials were not provided.", 'Incorrect authentication credentials.']
if response is not None and error_message in errors_401:
response.status_code = 401
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
18d06379a2dd89ef3d8db0d045f563b8f38f57db
|
badgekit_webhooks/urls.py
|
badgekit_webhooks/urls.py
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
from . import views
from django.contrib.admin.views.decorators import staff_member_required
urlpatterns = patterns(
"",
url(r"^hello/$", "badgekit_webhooks.views.hello", name="badgekit_webhooks_hello"),
url(r"^issued/$", "badgekit_webhooks.views.badge_issued_hook",
name="badge_issued_hook"),
url(r"^instances/$", views.InstanceListView.as_view()),
url(r"^claim/([-A-Za-z0-9_]+)/$", 'badgekit_webhooks.views.claim_page'),
url(r"^claim/([-A-Za-z0-9_]+)/email/(html|text)$", 'badgekit_webhooks.views.show_claim_email',
name="show_claim_email"),
url(r"^issue/$", staff_member_required(views.SendClaimCodeView.as_view()),
name="badge_issue_form"),
url(r"^claimcode/([-A-Za-z.0-9_]+)/$",
views.ClaimCodeClaimView.as_view(), name='claimcode_claim'),
url(r"^badges/$", "badgekit_webhooks.views.list_badges_view", name="badges_list"),
)
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
from . import views
from django.contrib.admin.views.decorators import staff_member_required
urlpatterns = patterns(
"",
url(r"^hello/$", "badgekit_webhooks.views.hello", name="badgekit_webhooks_hello"),
url(r"^issued/$", "badgekit_webhooks.views.badge_issued_hook",
name="badge_issued_hook"),
url(r"^instances/$", staff_member_required(views.InstanceListView.as_view()),
name="badge_instance_list"),
url(r"^claim/([-A-Za-z0-9_]+)/$", 'badgekit_webhooks.views.claim_page'),
url(r"^claim/([-A-Za-z0-9_]+)/email/(html|text)$", 'badgekit_webhooks.views.show_claim_email',
name="show_claim_email"),
url(r"^issue/$", staff_member_required(views.SendClaimCodeView.as_view()),
name="badge_issue_form"),
url(r"^claimcode/([-A-Za-z.0-9_]+)/$",
views.ClaimCodeClaimView.as_view(), name='claimcode_claim'),
url(r"^badges/$", "badgekit_webhooks.views.list_badges_view", name="badges_list"),
)
|
Mark instance list as staff-only, and give it a view name
|
Mark instance list as staff-only, and give it a view name
|
Python
|
mit
|
tgs/django-badgekit-webhooks
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
from . import views
from django.contrib.admin.views.decorators import staff_member_required
urlpatterns = patterns(
"",
url(r"^hello/$", "badgekit_webhooks.views.hello", name="badgekit_webhooks_hello"),
url(r"^issued/$", "badgekit_webhooks.views.badge_issued_hook",
name="badge_issued_hook"),
url(r"^instances/$", views.InstanceListView.as_view()),
url(r"^claim/([-A-Za-z0-9_]+)/$", 'badgekit_webhooks.views.claim_page'),
url(r"^claim/([-A-Za-z0-9_]+)/email/(html|text)$", 'badgekit_webhooks.views.show_claim_email',
name="show_claim_email"),
url(r"^issue/$", staff_member_required(views.SendClaimCodeView.as_view()),
name="badge_issue_form"),
url(r"^claimcode/([-A-Za-z.0-9_]+)/$",
views.ClaimCodeClaimView.as_view(), name='claimcode_claim'),
url(r"^badges/$", "badgekit_webhooks.views.list_badges_view", name="badges_list"),
)
Mark instance list as staff-only, and give it a view name
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
from . import views
from django.contrib.admin.views.decorators import staff_member_required
urlpatterns = patterns(
"",
url(r"^hello/$", "badgekit_webhooks.views.hello", name="badgekit_webhooks_hello"),
url(r"^issued/$", "badgekit_webhooks.views.badge_issued_hook",
name="badge_issued_hook"),
url(r"^instances/$", staff_member_required(views.InstanceListView.as_view()),
name="badge_instance_list"),
url(r"^claim/([-A-Za-z0-9_]+)/$", 'badgekit_webhooks.views.claim_page'),
url(r"^claim/([-A-Za-z0-9_]+)/email/(html|text)$", 'badgekit_webhooks.views.show_claim_email',
name="show_claim_email"),
url(r"^issue/$", staff_member_required(views.SendClaimCodeView.as_view()),
name="badge_issue_form"),
url(r"^claimcode/([-A-Za-z.0-9_]+)/$",
views.ClaimCodeClaimView.as_view(), name='claimcode_claim'),
url(r"^badges/$", "badgekit_webhooks.views.list_badges_view", name="badges_list"),
)
|
<commit_before>from __future__ import unicode_literals
from django.conf.urls import patterns, url
from . import views
from django.contrib.admin.views.decorators import staff_member_required
urlpatterns = patterns(
"",
url(r"^hello/$", "badgekit_webhooks.views.hello", name="badgekit_webhooks_hello"),
url(r"^issued/$", "badgekit_webhooks.views.badge_issued_hook",
name="badge_issued_hook"),
url(r"^instances/$", views.InstanceListView.as_view()),
url(r"^claim/([-A-Za-z0-9_]+)/$", 'badgekit_webhooks.views.claim_page'),
url(r"^claim/([-A-Za-z0-9_]+)/email/(html|text)$", 'badgekit_webhooks.views.show_claim_email',
name="show_claim_email"),
url(r"^issue/$", staff_member_required(views.SendClaimCodeView.as_view()),
name="badge_issue_form"),
url(r"^claimcode/([-A-Za-z.0-9_]+)/$",
views.ClaimCodeClaimView.as_view(), name='claimcode_claim'),
url(r"^badges/$", "badgekit_webhooks.views.list_badges_view", name="badges_list"),
)
<commit_msg>Mark instance list as staff-only, and give it a view name<commit_after>
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
from . import views
from django.contrib.admin.views.decorators import staff_member_required
urlpatterns = patterns(
"",
url(r"^hello/$", "badgekit_webhooks.views.hello", name="badgekit_webhooks_hello"),
url(r"^issued/$", "badgekit_webhooks.views.badge_issued_hook",
name="badge_issued_hook"),
url(r"^instances/$", staff_member_required(views.InstanceListView.as_view()),
name="badge_instance_list"),
url(r"^claim/([-A-Za-z0-9_]+)/$", 'badgekit_webhooks.views.claim_page'),
url(r"^claim/([-A-Za-z0-9_]+)/email/(html|text)$", 'badgekit_webhooks.views.show_claim_email',
name="show_claim_email"),
url(r"^issue/$", staff_member_required(views.SendClaimCodeView.as_view()),
name="badge_issue_form"),
url(r"^claimcode/([-A-Za-z.0-9_]+)/$",
views.ClaimCodeClaimView.as_view(), name='claimcode_claim'),
url(r"^badges/$", "badgekit_webhooks.views.list_badges_view", name="badges_list"),
)
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
from . import views
from django.contrib.admin.views.decorators import staff_member_required
urlpatterns = patterns(
"",
url(r"^hello/$", "badgekit_webhooks.views.hello", name="badgekit_webhooks_hello"),
url(r"^issued/$", "badgekit_webhooks.views.badge_issued_hook",
name="badge_issued_hook"),
url(r"^instances/$", views.InstanceListView.as_view()),
url(r"^claim/([-A-Za-z0-9_]+)/$", 'badgekit_webhooks.views.claim_page'),
url(r"^claim/([-A-Za-z0-9_]+)/email/(html|text)$", 'badgekit_webhooks.views.show_claim_email',
name="show_claim_email"),
url(r"^issue/$", staff_member_required(views.SendClaimCodeView.as_view()),
name="badge_issue_form"),
url(r"^claimcode/([-A-Za-z.0-9_]+)/$",
views.ClaimCodeClaimView.as_view(), name='claimcode_claim'),
url(r"^badges/$", "badgekit_webhooks.views.list_badges_view", name="badges_list"),
)
Mark instance list as staff-only, and give it a view namefrom __future__ import unicode_literals
from django.conf.urls import patterns, url
from . import views
from django.contrib.admin.views.decorators import staff_member_required
urlpatterns = patterns(
"",
url(r"^hello/$", "badgekit_webhooks.views.hello", name="badgekit_webhooks_hello"),
url(r"^issued/$", "badgekit_webhooks.views.badge_issued_hook",
name="badge_issued_hook"),
url(r"^instances/$", staff_member_required(views.InstanceListView.as_view()),
name="badge_instance_list"),
url(r"^claim/([-A-Za-z0-9_]+)/$", 'badgekit_webhooks.views.claim_page'),
url(r"^claim/([-A-Za-z0-9_]+)/email/(html|text)$", 'badgekit_webhooks.views.show_claim_email',
name="show_claim_email"),
url(r"^issue/$", staff_member_required(views.SendClaimCodeView.as_view()),
name="badge_issue_form"),
url(r"^claimcode/([-A-Za-z.0-9_]+)/$",
views.ClaimCodeClaimView.as_view(), name='claimcode_claim'),
url(r"^badges/$", "badgekit_webhooks.views.list_badges_view", name="badges_list"),
)
|
<commit_before>from __future__ import unicode_literals
from django.conf.urls import patterns, url
from . import views
from django.contrib.admin.views.decorators import staff_member_required
urlpatterns = patterns(
"",
url(r"^hello/$", "badgekit_webhooks.views.hello", name="badgekit_webhooks_hello"),
url(r"^issued/$", "badgekit_webhooks.views.badge_issued_hook",
name="badge_issued_hook"),
url(r"^instances/$", views.InstanceListView.as_view()),
url(r"^claim/([-A-Za-z0-9_]+)/$", 'badgekit_webhooks.views.claim_page'),
url(r"^claim/([-A-Za-z0-9_]+)/email/(html|text)$", 'badgekit_webhooks.views.show_claim_email',
name="show_claim_email"),
url(r"^issue/$", staff_member_required(views.SendClaimCodeView.as_view()),
name="badge_issue_form"),
url(r"^claimcode/([-A-Za-z.0-9_]+)/$",
views.ClaimCodeClaimView.as_view(), name='claimcode_claim'),
url(r"^badges/$", "badgekit_webhooks.views.list_badges_view", name="badges_list"),
)
<commit_msg>Mark instance list as staff-only, and give it a view name<commit_after>from __future__ import unicode_literals
from django.conf.urls import patterns, url
from . import views
from django.contrib.admin.views.decorators import staff_member_required
urlpatterns = patterns(
"",
url(r"^hello/$", "badgekit_webhooks.views.hello", name="badgekit_webhooks_hello"),
url(r"^issued/$", "badgekit_webhooks.views.badge_issued_hook",
name="badge_issued_hook"),
url(r"^instances/$", staff_member_required(views.InstanceListView.as_view()),
name="badge_instance_list"),
url(r"^claim/([-A-Za-z0-9_]+)/$", 'badgekit_webhooks.views.claim_page'),
url(r"^claim/([-A-Za-z0-9_]+)/email/(html|text)$", 'badgekit_webhooks.views.show_claim_email',
name="show_claim_email"),
url(r"^issue/$", staff_member_required(views.SendClaimCodeView.as_view()),
name="badge_issue_form"),
url(r"^claimcode/([-A-Za-z.0-9_]+)/$",
views.ClaimCodeClaimView.as_view(), name='claimcode_claim'),
url(r"^badges/$", "badgekit_webhooks.views.list_badges_view", name="badges_list"),
)
|
fb15b0735a8d2710baa33ac4e74d1dc88de209bc
|
suplemon/lexer.py
|
suplemon/lexer.py
|
# -*- encoding: utf-8
import pygments
import pygments.lexers
class Lexer:
def __init__(self, app):
self.app = app
def lex(self, code, lex):
"""Return tokenified code.
Return a list of tuples (scope, word) where word is the word to be
printed and scope the scope name representing the context.
:param str code: Code to tokenify.
:param lex: Lexer to use.
:return:
"""
if lex is None:
return (("global", code),)
words = pygments.lex(code, lex)
scopes = []
for word in words:
token = word[0]
scope = "global"
if token in pygments.token.Keyword:
scope = "keyword"
elif token == pygments.token.Comment:
scope = "comment"
elif token in pygments.token.Literal.String:
scope = "string"
elif token in pygments.token.Literal.Number:
scope = "constant.numeric"
elif token == pygments.token.Name.Function:
scope = "entity.name.function"
elif token == pygments.token.Name.Class:
scope = "entity.name.class"
elif token == pygments.token.Operator:
scope = "keyword"
elif token == pygments.token.Name.Builtin.Pseudo:
scope = "constant.language"
scopes.append((scope, word[1]))
return scopes
|
# -*- encoding: utf-8
import pygments
import pygments.lexers
class Lexer:
def __init__(self, app):
self.app = app
def lex(self, code, lex):
"""Return tokenified code.
Return a list of tuples (scope, word) where word is the word to be
printed and scope the scope name representing the context.
:param str code: Code to tokenify.
:param lex: Lexer to use.
:return:
"""
if lex is None:
if not type(code) is str:
# if not suitable lexer is found, return decoded code
code = code.decode("utf-8")
return (("global", code),)
words = pygments.lex(code, lex)
scopes = []
for word in words:
token = word[0]
scope = "global"
if token in pygments.token.Keyword:
scope = "keyword"
elif token == pygments.token.Comment:
scope = "comment"
elif token in pygments.token.Literal.String:
scope = "string"
elif token in pygments.token.Literal.Number:
scope = "constant.numeric"
elif token == pygments.token.Name.Function:
scope = "entity.name.function"
elif token == pygments.token.Name.Class:
scope = "entity.name.class"
elif token == pygments.token.Operator:
scope = "keyword"
elif token == pygments.token.Name.Builtin.Pseudo:
scope = "constant.language"
scopes.append((scope, word[1]))
return scopes
|
Make sure that Lexer.lex() returns str instead of bytes
|
Make sure that Lexer.lex() returns str instead of bytes
|
Python
|
mit
|
twolfson/suplemon,richrd/suplemon,richrd/suplemon,severin31/suplemon,twolfson/suplemon,trylle/suplemon
|
# -*- encoding: utf-8
import pygments
import pygments.lexers
class Lexer:
def __init__(self, app):
self.app = app
def lex(self, code, lex):
"""Return tokenified code.
Return a list of tuples (scope, word) where word is the word to be
printed and scope the scope name representing the context.
:param str code: Code to tokenify.
:param lex: Lexer to use.
:return:
"""
if lex is None:
return (("global", code),)
words = pygments.lex(code, lex)
scopes = []
for word in words:
token = word[0]
scope = "global"
if token in pygments.token.Keyword:
scope = "keyword"
elif token == pygments.token.Comment:
scope = "comment"
elif token in pygments.token.Literal.String:
scope = "string"
elif token in pygments.token.Literal.Number:
scope = "constant.numeric"
elif token == pygments.token.Name.Function:
scope = "entity.name.function"
elif token == pygments.token.Name.Class:
scope = "entity.name.class"
elif token == pygments.token.Operator:
scope = "keyword"
elif token == pygments.token.Name.Builtin.Pseudo:
scope = "constant.language"
scopes.append((scope, word[1]))
return scopes
Make sure that Lexer.lex() returns str instead of bytes
|
# -*- encoding: utf-8
import pygments
import pygments.lexers
class Lexer:
def __init__(self, app):
self.app = app
def lex(self, code, lex):
"""Return tokenified code.
Return a list of tuples (scope, word) where word is the word to be
printed and scope the scope name representing the context.
:param str code: Code to tokenify.
:param lex: Lexer to use.
:return:
"""
if lex is None:
if not type(code) is str:
# if not suitable lexer is found, return decoded code
code = code.decode("utf-8")
return (("global", code),)
words = pygments.lex(code, lex)
scopes = []
for word in words:
token = word[0]
scope = "global"
if token in pygments.token.Keyword:
scope = "keyword"
elif token == pygments.token.Comment:
scope = "comment"
elif token in pygments.token.Literal.String:
scope = "string"
elif token in pygments.token.Literal.Number:
scope = "constant.numeric"
elif token == pygments.token.Name.Function:
scope = "entity.name.function"
elif token == pygments.token.Name.Class:
scope = "entity.name.class"
elif token == pygments.token.Operator:
scope = "keyword"
elif token == pygments.token.Name.Builtin.Pseudo:
scope = "constant.language"
scopes.append((scope, word[1]))
return scopes
|
<commit_before># -*- encoding: utf-8
import pygments
import pygments.lexers
class Lexer:
def __init__(self, app):
self.app = app
def lex(self, code, lex):
"""Return tokenified code.
Return a list of tuples (scope, word) where word is the word to be
printed and scope the scope name representing the context.
:param str code: Code to tokenify.
:param lex: Lexer to use.
:return:
"""
if lex is None:
return (("global", code),)
words = pygments.lex(code, lex)
scopes = []
for word in words:
token = word[0]
scope = "global"
if token in pygments.token.Keyword:
scope = "keyword"
elif token == pygments.token.Comment:
scope = "comment"
elif token in pygments.token.Literal.String:
scope = "string"
elif token in pygments.token.Literal.Number:
scope = "constant.numeric"
elif token == pygments.token.Name.Function:
scope = "entity.name.function"
elif token == pygments.token.Name.Class:
scope = "entity.name.class"
elif token == pygments.token.Operator:
scope = "keyword"
elif token == pygments.token.Name.Builtin.Pseudo:
scope = "constant.language"
scopes.append((scope, word[1]))
return scopes
<commit_msg>Make sure that Lexer.lex() returns str instead of bytes<commit_after>
|
# -*- encoding: utf-8
import pygments
import pygments.lexers
class Lexer:
def __init__(self, app):
self.app = app
def lex(self, code, lex):
"""Return tokenified code.
Return a list of tuples (scope, word) where word is the word to be
printed and scope the scope name representing the context.
:param str code: Code to tokenify.
:param lex: Lexer to use.
:return:
"""
if lex is None:
if not type(code) is str:
# if not suitable lexer is found, return decoded code
code = code.decode("utf-8")
return (("global", code),)
words = pygments.lex(code, lex)
scopes = []
for word in words:
token = word[0]
scope = "global"
if token in pygments.token.Keyword:
scope = "keyword"
elif token == pygments.token.Comment:
scope = "comment"
elif token in pygments.token.Literal.String:
scope = "string"
elif token in pygments.token.Literal.Number:
scope = "constant.numeric"
elif token == pygments.token.Name.Function:
scope = "entity.name.function"
elif token == pygments.token.Name.Class:
scope = "entity.name.class"
elif token == pygments.token.Operator:
scope = "keyword"
elif token == pygments.token.Name.Builtin.Pseudo:
scope = "constant.language"
scopes.append((scope, word[1]))
return scopes
|
# -*- encoding: utf-8
import pygments
import pygments.lexers
class Lexer:
def __init__(self, app):
self.app = app
def lex(self, code, lex):
"""Return tokenified code.
Return a list of tuples (scope, word) where word is the word to be
printed and scope the scope name representing the context.
:param str code: Code to tokenify.
:param lex: Lexer to use.
:return:
"""
if lex is None:
return (("global", code),)
words = pygments.lex(code, lex)
scopes = []
for word in words:
token = word[0]
scope = "global"
if token in pygments.token.Keyword:
scope = "keyword"
elif token == pygments.token.Comment:
scope = "comment"
elif token in pygments.token.Literal.String:
scope = "string"
elif token in pygments.token.Literal.Number:
scope = "constant.numeric"
elif token == pygments.token.Name.Function:
scope = "entity.name.function"
elif token == pygments.token.Name.Class:
scope = "entity.name.class"
elif token == pygments.token.Operator:
scope = "keyword"
elif token == pygments.token.Name.Builtin.Pseudo:
scope = "constant.language"
scopes.append((scope, word[1]))
return scopes
Make sure that Lexer.lex() returns str instead of bytes# -*- encoding: utf-8
import pygments
import pygments.lexers
class Lexer:
def __init__(self, app):
self.app = app
def lex(self, code, lex):
"""Return tokenified code.
Return a list of tuples (scope, word) where word is the word to be
printed and scope the scope name representing the context.
:param str code: Code to tokenify.
:param lex: Lexer to use.
:return:
"""
if lex is None:
if not type(code) is str:
# if not suitable lexer is found, return decoded code
code = code.decode("utf-8")
return (("global", code),)
words = pygments.lex(code, lex)
scopes = []
for word in words:
token = word[0]
scope = "global"
if token in pygments.token.Keyword:
scope = "keyword"
elif token == pygments.token.Comment:
scope = "comment"
elif token in pygments.token.Literal.String:
scope = "string"
elif token in pygments.token.Literal.Number:
scope = "constant.numeric"
elif token == pygments.token.Name.Function:
scope = "entity.name.function"
elif token == pygments.token.Name.Class:
scope = "entity.name.class"
elif token == pygments.token.Operator:
scope = "keyword"
elif token == pygments.token.Name.Builtin.Pseudo:
scope = "constant.language"
scopes.append((scope, word[1]))
return scopes
|
<commit_before># -*- encoding: utf-8
import pygments
import pygments.lexers
class Lexer:
def __init__(self, app):
self.app = app
def lex(self, code, lex):
"""Return tokenified code.
Return a list of tuples (scope, word) where word is the word to be
printed and scope the scope name representing the context.
:param str code: Code to tokenify.
:param lex: Lexer to use.
:return:
"""
if lex is None:
return (("global", code),)
words = pygments.lex(code, lex)
scopes = []
for word in words:
token = word[0]
scope = "global"
if token in pygments.token.Keyword:
scope = "keyword"
elif token == pygments.token.Comment:
scope = "comment"
elif token in pygments.token.Literal.String:
scope = "string"
elif token in pygments.token.Literal.Number:
scope = "constant.numeric"
elif token == pygments.token.Name.Function:
scope = "entity.name.function"
elif token == pygments.token.Name.Class:
scope = "entity.name.class"
elif token == pygments.token.Operator:
scope = "keyword"
elif token == pygments.token.Name.Builtin.Pseudo:
scope = "constant.language"
scopes.append((scope, word[1]))
return scopes
<commit_msg>Make sure that Lexer.lex() returns str instead of bytes<commit_after># -*- encoding: utf-8
import pygments
import pygments.lexers
class Lexer:
def __init__(self, app):
self.app = app
def lex(self, code, lex):
"""Return tokenified code.
Return a list of tuples (scope, word) where word is the word to be
printed and scope the scope name representing the context.
:param str code: Code to tokenify.
:param lex: Lexer to use.
:return:
"""
if lex is None:
if not type(code) is str:
# if not suitable lexer is found, return decoded code
code = code.decode("utf-8")
return (("global", code),)
words = pygments.lex(code, lex)
scopes = []
for word in words:
token = word[0]
scope = "global"
if token in pygments.token.Keyword:
scope = "keyword"
elif token == pygments.token.Comment:
scope = "comment"
elif token in pygments.token.Literal.String:
scope = "string"
elif token in pygments.token.Literal.Number:
scope = "constant.numeric"
elif token == pygments.token.Name.Function:
scope = "entity.name.function"
elif token == pygments.token.Name.Class:
scope = "entity.name.class"
elif token == pygments.token.Operator:
scope = "keyword"
elif token == pygments.token.Name.Builtin.Pseudo:
scope = "constant.language"
scopes.append((scope, word[1]))
return scopes
|
85d6a10891e8c8c3f800a28db28a8d6a1c5684be
|
src/texas_choropleth/settings/production.py
|
src/texas_choropleth/settings/production.py
|
from .base import *
# Debug Settings
DEBUG = False
TEMPLATE_DEBUG = False
# Media Settings
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Staticfile Setttings
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATIC_ROOT = os.path.join(BASE_DIR, 'static_final')
# TMP Dir for Choropleth Screenshots
IMAGE_EXPORT_TMP_DIR = os.path.join('/', 'tmp')
# Enable Pipeline
PIPELINE_ENABLED = True
# Database Settings
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Email Settings
EMAIL_HOST = ''
EMAIL_PORT = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = ''
EMAIL_USE_SSL = ''
|
from .base import *
# Debug Settings
DEBUG = False
TEMPLATE_DEBUG = False
# Media Settings
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Staticfile Setttings
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATIC_ROOT = os.path.join(BASE_DIR, 'static_final')
# TMP Dir for Choropleth Screenshots
IMAGE_EXPORT_TMP_DIR = os.path.join('/', 'tmp')
# Enable Pipeline
PIPELINE_ENABLED = True
# Database Settings
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': get_secret('DB_NAME'),
'USER': get_secret('DB_USER'),
'PASSWORD': get_secret('DB_PASSWORD'),
'HOST': get_secret('DB_HOST'),
'PORT': get_secret('DB_PORT')
}
}
# Email Settings
EMAIL_HOST = ''
EMAIL_PORT = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = ''
EMAIL_USE_SSL = ''
|
Use the get_secret function to grab the database configuration.
|
Use the get_secret function to grab the database configuration.
git-svn-id: d73fdb991549f9d1a0affa567d55bb0fdbd453f3@8412 f04a3889-0f81-4131-97fb-bc517d1f583d
|
Python
|
bsd-3-clause
|
unt-libraries/texas-choropleth,unt-libraries/texas-choropleth,damonkelley/texas-choropleth,damonkelley/texas-choropleth,unt-libraries/texas-choropleth,damonkelley/texas-choropleth,damonkelley/texas-choropleth,unt-libraries/texas-choropleth
|
from .base import *
# Debug Settings
DEBUG = False
TEMPLATE_DEBUG = False
# Media Settings
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Staticfile Setttings
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATIC_ROOT = os.path.join(BASE_DIR, 'static_final')
# TMP Dir for Choropleth Screenshots
IMAGE_EXPORT_TMP_DIR = os.path.join('/', 'tmp')
# Enable Pipeline
PIPELINE_ENABLED = True
# Database Settings
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Email Settings
EMAIL_HOST = ''
EMAIL_PORT = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = ''
EMAIL_USE_SSL = ''
Use the get_secret function to grab the database configuration.
git-svn-id: d73fdb991549f9d1a0affa567d55bb0fdbd453f3@8412 f04a3889-0f81-4131-97fb-bc517d1f583d
|
from .base import *
# Debug Settings
DEBUG = False
TEMPLATE_DEBUG = False
# Media Settings
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Staticfile Setttings
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATIC_ROOT = os.path.join(BASE_DIR, 'static_final')
# TMP Dir for Choropleth Screenshots
IMAGE_EXPORT_TMP_DIR = os.path.join('/', 'tmp')
# Enable Pipeline
PIPELINE_ENABLED = True
# Database Settings
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': get_secret('DB_NAME'),
'USER': get_secret('DB_USER'),
'PASSWORD': get_secret('DB_PASSWORD'),
'HOST': get_secret('DB_HOST'),
'PORT': get_secret('DB_PORT')
}
}
# Email Settings
EMAIL_HOST = ''
EMAIL_PORT = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = ''
EMAIL_USE_SSL = ''
|
<commit_before>from .base import *
# Debug Settings
DEBUG = False
TEMPLATE_DEBUG = False
# Media Settings
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Staticfile Setttings
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATIC_ROOT = os.path.join(BASE_DIR, 'static_final')
# TMP Dir for Choropleth Screenshots
IMAGE_EXPORT_TMP_DIR = os.path.join('/', 'tmp')
# Enable Pipeline
PIPELINE_ENABLED = True
# Database Settings
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Email Settings
EMAIL_HOST = ''
EMAIL_PORT = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = ''
EMAIL_USE_SSL = ''
<commit_msg>Use the get_secret function to grab the database configuration.
git-svn-id: d73fdb991549f9d1a0affa567d55bb0fdbd453f3@8412 f04a3889-0f81-4131-97fb-bc517d1f583d<commit_after>
|
from .base import *
# Debug Settings
DEBUG = False
TEMPLATE_DEBUG = False
# Media Settings
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Staticfile Setttings
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATIC_ROOT = os.path.join(BASE_DIR, 'static_final')
# TMP Dir for Choropleth Screenshots
IMAGE_EXPORT_TMP_DIR = os.path.join('/', 'tmp')
# Enable Pipeline
PIPELINE_ENABLED = True
# Database Settings
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': get_secret('DB_NAME'),
'USER': get_secret('DB_USER'),
'PASSWORD': get_secret('DB_PASSWORD'),
'HOST': get_secret('DB_HOST'),
'PORT': get_secret('DB_PORT')
}
}
# Email Settings
EMAIL_HOST = ''
EMAIL_PORT = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = ''
EMAIL_USE_SSL = ''
|
from .base import *
# Debug Settings
DEBUG = False
TEMPLATE_DEBUG = False
# Media Settings
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Staticfile Setttings
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATIC_ROOT = os.path.join(BASE_DIR, 'static_final')
# TMP Dir for Choropleth Screenshots
IMAGE_EXPORT_TMP_DIR = os.path.join('/', 'tmp')
# Enable Pipeline
PIPELINE_ENABLED = True
# Database Settings
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Email Settings
EMAIL_HOST = ''
EMAIL_PORT = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = ''
EMAIL_USE_SSL = ''
Use the get_secret function to grab the database configuration.
git-svn-id: d73fdb991549f9d1a0affa567d55bb0fdbd453f3@8412 f04a3889-0f81-4131-97fb-bc517d1f583dfrom .base import *
# Debug Settings
DEBUG = False
TEMPLATE_DEBUG = False
# Media Settings
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Staticfile Setttings
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATIC_ROOT = os.path.join(BASE_DIR, 'static_final')
# TMP Dir for Choropleth Screenshots
IMAGE_EXPORT_TMP_DIR = os.path.join('/', 'tmp')
# Enable Pipeline
PIPELINE_ENABLED = True
# Database Settings
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': get_secret('DB_NAME'),
'USER': get_secret('DB_USER'),
'PASSWORD': get_secret('DB_PASSWORD'),
'HOST': get_secret('DB_HOST'),
'PORT': get_secret('DB_PORT')
}
}
# Email Settings
EMAIL_HOST = ''
EMAIL_PORT = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = ''
EMAIL_USE_SSL = ''
|
<commit_before>from .base import *
# Debug Settings
DEBUG = False
TEMPLATE_DEBUG = False
# Media Settings
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Staticfile Setttings
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATIC_ROOT = os.path.join(BASE_DIR, 'static_final')
# TMP Dir for Choropleth Screenshots
IMAGE_EXPORT_TMP_DIR = os.path.join('/', 'tmp')
# Enable Pipeline
PIPELINE_ENABLED = True
# Database Settings
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Email Settings
EMAIL_HOST = ''
EMAIL_PORT = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = ''
EMAIL_USE_SSL = ''
<commit_msg>Use the get_secret function to grab the database configuration.
git-svn-id: d73fdb991549f9d1a0affa567d55bb0fdbd453f3@8412 f04a3889-0f81-4131-97fb-bc517d1f583d<commit_after>from .base import *
# Debug Settings
DEBUG = False
TEMPLATE_DEBUG = False
# Media Settings
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Staticfile Setttings
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATIC_ROOT = os.path.join(BASE_DIR, 'static_final')
# TMP Dir for Choropleth Screenshots
IMAGE_EXPORT_TMP_DIR = os.path.join('/', 'tmp')
# Enable Pipeline
PIPELINE_ENABLED = True
# Database Settings
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': get_secret('DB_NAME'),
'USER': get_secret('DB_USER'),
'PASSWORD': get_secret('DB_PASSWORD'),
'HOST': get_secret('DB_HOST'),
'PORT': get_secret('DB_PORT')
}
}
# Email Settings
EMAIL_HOST = ''
EMAIL_PORT = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = ''
EMAIL_USE_SSL = ''
|
d8247d43c8026a8de39b09856a3f7beb235dc4f6
|
antxetamedia/multimedia/handlers.py
|
antxetamedia/multimedia/handlers.py
|
from boto.s3.connection import S3Connection
from boto.exception import S3ResponseError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
Handle the case where the bucket already exists
|
Handle the case where the bucket already exists
|
Python
|
agpl-3.0
|
GISAElkartea/antxetamedia,GISAElkartea/antxetamedia,GISAElkartea/antxetamedia
|
from boto.s3.connection import S3Connection
from boto.exception import S3ResponseError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
Handle the case where the bucket already exists
|
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
<commit_before>from boto.s3.connection import S3Connection
from boto.exception import S3ResponseError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
<commit_msg>Handle the case where the bucket already exists<commit_after>
|
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
from boto.s3.connection import S3Connection
from boto.exception import S3ResponseError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
Handle the case where the bucket already existsfrom boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
<commit_before>from boto.s3.connection import S3Connection
from boto.exception import S3ResponseError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
<commit_msg>Handle the case where the bucket already exists<commit_after>from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
2da190d0a6b6f8b6acc70b7e2b6e903283a1e735
|
taggit_bootstrap/widgets.py
|
taggit_bootstrap/widgets.py
|
from django import forms
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.forms.util import flatatt
from django.utils.encoding import force_text
from taggit.utils import parse_tags, edit_string_for_tags
from django.utils import six
class TagsInput(forms.TextInput):
class Media:
css = { 'all': ('css/bootstrap-tagsinput.css','css/typeahead.css') }
js = ('js/typeahead.jquery.min.js', 'js/bootstrap-tagsinput.min.js')
def render(self, name, value, attrs={}):
if value is not None and not isinstance(value, six.string_types):
value = edit_string_for_tags([o.tag for o in value.select_related("tag")])
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('taggit_bootstrap/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': value if value else '',
'id': final_attrs['id']
}))
|
from django import forms
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.forms.utils import flatatt
from django.utils.encoding import force_text
from taggit.utils import parse_tags, edit_string_for_tags
from django.utils import six
class TagsInput(forms.TextInput):
class Media:
css = { 'all': ('css/bootstrap-tagsinput.css','css/typeahead.css') }
js = ('js/typeahead.jquery.min.js', 'js/bootstrap-tagsinput.min.js')
def render(self, name, value, attrs={}):
if value is not None and not isinstance(value, six.string_types):
value = edit_string_for_tags([o.tag for o in value.select_related("tag")])
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('taggit_bootstrap/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': value if value else '',
'id': final_attrs['id']
}))
|
Update the django path to flatatt module
|
Update the django path to flatatt module
|
Python
|
mit
|
mi6gan/django-taggit-bootstrap,mi6gan/django-taggit-bootstrap
|
from django import forms
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.forms.util import flatatt
from django.utils.encoding import force_text
from taggit.utils import parse_tags, edit_string_for_tags
from django.utils import six
class TagsInput(forms.TextInput):
class Media:
css = { 'all': ('css/bootstrap-tagsinput.css','css/typeahead.css') }
js = ('js/typeahead.jquery.min.js', 'js/bootstrap-tagsinput.min.js')
def render(self, name, value, attrs={}):
if value is not None and not isinstance(value, six.string_types):
value = edit_string_for_tags([o.tag for o in value.select_related("tag")])
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('taggit_bootstrap/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': value if value else '',
'id': final_attrs['id']
}))
Update the django path to flatatt module
|
from django import forms
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.forms.utils import flatatt
from django.utils.encoding import force_text
from taggit.utils import parse_tags, edit_string_for_tags
from django.utils import six
class TagsInput(forms.TextInput):
class Media:
css = { 'all': ('css/bootstrap-tagsinput.css','css/typeahead.css') }
js = ('js/typeahead.jquery.min.js', 'js/bootstrap-tagsinput.min.js')
def render(self, name, value, attrs={}):
if value is not None and not isinstance(value, six.string_types):
value = edit_string_for_tags([o.tag for o in value.select_related("tag")])
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('taggit_bootstrap/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': value if value else '',
'id': final_attrs['id']
}))
|
<commit_before>from django import forms
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.forms.util import flatatt
from django.utils.encoding import force_text
from taggit.utils import parse_tags, edit_string_for_tags
from django.utils import six
class TagsInput(forms.TextInput):
class Media:
css = { 'all': ('css/bootstrap-tagsinput.css','css/typeahead.css') }
js = ('js/typeahead.jquery.min.js', 'js/bootstrap-tagsinput.min.js')
def render(self, name, value, attrs={}):
if value is not None and not isinstance(value, six.string_types):
value = edit_string_for_tags([o.tag for o in value.select_related("tag")])
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('taggit_bootstrap/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': value if value else '',
'id': final_attrs['id']
}))
<commit_msg>Update the django path to flatatt module<commit_after>
|
from django import forms
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.forms.utils import flatatt
from django.utils.encoding import force_text
from taggit.utils import parse_tags, edit_string_for_tags
from django.utils import six
class TagsInput(forms.TextInput):
class Media:
css = { 'all': ('css/bootstrap-tagsinput.css','css/typeahead.css') }
js = ('js/typeahead.jquery.min.js', 'js/bootstrap-tagsinput.min.js')
def render(self, name, value, attrs={}):
if value is not None and not isinstance(value, six.string_types):
value = edit_string_for_tags([o.tag for o in value.select_related("tag")])
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('taggit_bootstrap/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': value if value else '',
'id': final_attrs['id']
}))
|
from django import forms
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.forms.util import flatatt
from django.utils.encoding import force_text
from taggit.utils import parse_tags, edit_string_for_tags
from django.utils import six
class TagsInput(forms.TextInput):
class Media:
css = { 'all': ('css/bootstrap-tagsinput.css','css/typeahead.css') }
js = ('js/typeahead.jquery.min.js', 'js/bootstrap-tagsinput.min.js')
def render(self, name, value, attrs={}):
if value is not None and not isinstance(value, six.string_types):
value = edit_string_for_tags([o.tag for o in value.select_related("tag")])
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('taggit_bootstrap/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': value if value else '',
'id': final_attrs['id']
}))
Update the django path to flatatt modulefrom django import forms
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.forms.utils import flatatt
from django.utils.encoding import force_text
from taggit.utils import parse_tags, edit_string_for_tags
from django.utils import six
class TagsInput(forms.TextInput):
class Media:
css = { 'all': ('css/bootstrap-tagsinput.css','css/typeahead.css') }
js = ('js/typeahead.jquery.min.js', 'js/bootstrap-tagsinput.min.js')
def render(self, name, value, attrs={}):
if value is not None and not isinstance(value, six.string_types):
value = edit_string_for_tags([o.tag for o in value.select_related("tag")])
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('taggit_bootstrap/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': value if value else '',
'id': final_attrs['id']
}))
|
<commit_before>from django import forms
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.forms.util import flatatt
from django.utils.encoding import force_text
from taggit.utils import parse_tags, edit_string_for_tags
from django.utils import six
class TagsInput(forms.TextInput):
class Media:
css = { 'all': ('css/bootstrap-tagsinput.css','css/typeahead.css') }
js = ('js/typeahead.jquery.min.js', 'js/bootstrap-tagsinput.min.js')
def render(self, name, value, attrs={}):
if value is not None and not isinstance(value, six.string_types):
value = edit_string_for_tags([o.tag for o in value.select_related("tag")])
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('taggit_bootstrap/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': value if value else '',
'id': final_attrs['id']
}))
<commit_msg>Update the django path to flatatt module<commit_after>from django import forms
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.forms.utils import flatatt
from django.utils.encoding import force_text
from taggit.utils import parse_tags, edit_string_for_tags
from django.utils import six
class TagsInput(forms.TextInput):
class Media:
css = { 'all': ('css/bootstrap-tagsinput.css','css/typeahead.css') }
js = ('js/typeahead.jquery.min.js', 'js/bootstrap-tagsinput.min.js')
def render(self, name, value, attrs={}):
if value is not None and not isinstance(value, six.string_types):
value = edit_string_for_tags([o.tag for o in value.select_related("tag")])
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('taggit_bootstrap/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': value if value else '',
'id': final_attrs['id']
}))
|
e61bd9a56b31dde461ad0cb82e3140bd0dbfa958
|
ckanext/tayside/logic/action/update.py
|
ckanext/tayside/logic/action/update.py
|
from ckan.logic.action import update as update_core
import ckan.lib.uploader as uploader
def config_option_update(context, data_dict):
upload = uploader.get_uploader('admin')
upload.update_data_dict(data_dict, 'hero_image_url', 'hero_image_upload',
'clear_hero_image_upload')
upload.update_data_dict(data_dict, 'site_symbol_url', 'site_symbol_upload',
'clear_site_symbol_upload')
upload.upload(uploader.get_max_image_size())
return update_core.config_option_update(context, data_dict)
|
from ckan.logic.action import update as update_core
import ckan.lib.uploader as uploader
def config_option_update(context, data_dict):
upload = uploader.get_uploader('admin')
upload.update_data_dict(data_dict, 'hero_image_url', 'hero_image_upload',
'clear_hero_image_upload')
upload.upload(uploader.get_max_image_size())
upload.update_data_dict(data_dict, 'site_symbol_url', 'site_symbol_upload',
'clear_site_symbol_upload')
upload.upload(uploader.get_max_image_size())
return update_core.config_option_update(context, data_dict)
|
Fix bug for saving images in config
|
Fix bug for saving images in config
|
Python
|
agpl-3.0
|
ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside
|
from ckan.logic.action import update as update_core
import ckan.lib.uploader as uploader
def config_option_update(context, data_dict):
upload = uploader.get_uploader('admin')
upload.update_data_dict(data_dict, 'hero_image_url', 'hero_image_upload',
'clear_hero_image_upload')
upload.update_data_dict(data_dict, 'site_symbol_url', 'site_symbol_upload',
'clear_site_symbol_upload')
upload.upload(uploader.get_max_image_size())
return update_core.config_option_update(context, data_dict)
Fix bug for saving images in config
|
from ckan.logic.action import update as update_core
import ckan.lib.uploader as uploader
def config_option_update(context, data_dict):
upload = uploader.get_uploader('admin')
upload.update_data_dict(data_dict, 'hero_image_url', 'hero_image_upload',
'clear_hero_image_upload')
upload.upload(uploader.get_max_image_size())
upload.update_data_dict(data_dict, 'site_symbol_url', 'site_symbol_upload',
'clear_site_symbol_upload')
upload.upload(uploader.get_max_image_size())
return update_core.config_option_update(context, data_dict)
|
<commit_before>from ckan.logic.action import update as update_core
import ckan.lib.uploader as uploader
def config_option_update(context, data_dict):
upload = uploader.get_uploader('admin')
upload.update_data_dict(data_dict, 'hero_image_url', 'hero_image_upload',
'clear_hero_image_upload')
upload.update_data_dict(data_dict, 'site_symbol_url', 'site_symbol_upload',
'clear_site_symbol_upload')
upload.upload(uploader.get_max_image_size())
return update_core.config_option_update(context, data_dict)
<commit_msg>Fix bug for saving images in config<commit_after>
|
from ckan.logic.action import update as update_core
import ckan.lib.uploader as uploader
def config_option_update(context, data_dict):
upload = uploader.get_uploader('admin')
upload.update_data_dict(data_dict, 'hero_image_url', 'hero_image_upload',
'clear_hero_image_upload')
upload.upload(uploader.get_max_image_size())
upload.update_data_dict(data_dict, 'site_symbol_url', 'site_symbol_upload',
'clear_site_symbol_upload')
upload.upload(uploader.get_max_image_size())
return update_core.config_option_update(context, data_dict)
|
from ckan.logic.action import update as update_core
import ckan.lib.uploader as uploader
def config_option_update(context, data_dict):
upload = uploader.get_uploader('admin')
upload.update_data_dict(data_dict, 'hero_image_url', 'hero_image_upload',
'clear_hero_image_upload')
upload.update_data_dict(data_dict, 'site_symbol_url', 'site_symbol_upload',
'clear_site_symbol_upload')
upload.upload(uploader.get_max_image_size())
return update_core.config_option_update(context, data_dict)
Fix bug for saving images in configfrom ckan.logic.action import update as update_core
import ckan.lib.uploader as uploader
def config_option_update(context, data_dict):
upload = uploader.get_uploader('admin')
upload.update_data_dict(data_dict, 'hero_image_url', 'hero_image_upload',
'clear_hero_image_upload')
upload.upload(uploader.get_max_image_size())
upload.update_data_dict(data_dict, 'site_symbol_url', 'site_symbol_upload',
'clear_site_symbol_upload')
upload.upload(uploader.get_max_image_size())
return update_core.config_option_update(context, data_dict)
|
<commit_before>from ckan.logic.action import update as update_core
import ckan.lib.uploader as uploader
def config_option_update(context, data_dict):
upload = uploader.get_uploader('admin')
upload.update_data_dict(data_dict, 'hero_image_url', 'hero_image_upload',
'clear_hero_image_upload')
upload.update_data_dict(data_dict, 'site_symbol_url', 'site_symbol_upload',
'clear_site_symbol_upload')
upload.upload(uploader.get_max_image_size())
return update_core.config_option_update(context, data_dict)
<commit_msg>Fix bug for saving images in config<commit_after>from ckan.logic.action import update as update_core
import ckan.lib.uploader as uploader
def config_option_update(context, data_dict):
upload = uploader.get_uploader('admin')
upload.update_data_dict(data_dict, 'hero_image_url', 'hero_image_upload',
'clear_hero_image_upload')
upload.upload(uploader.get_max_image_size())
upload.update_data_dict(data_dict, 'site_symbol_url', 'site_symbol_upload',
'clear_site_symbol_upload')
upload.upload(uploader.get_max_image_size())
return update_core.config_option_update(context, data_dict)
|
b0fd983269fca4c514a8a21d0bb17d47d46780c3
|
system_maintenance/tests/functional/base.py
|
system_maintenance/tests/functional/base.py
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from system_maintenance.tests.utilities import populate_test_db
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
populate_test_db()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
self.username_inputbox = None
self.password_inputbox = None
self.login_button = None
def tearDown(self):
self.browser.quit()
def find_authentication_elements(self):
self.username_inputbox = self.browser.find_element_by_id('id_username')
self.password_inputbox = self.browser.find_element_by_id('id_password')
self.login_button = self.browser.find_element_by_tag_name('button')
def login_as(self, username):
self.find_authentication_elements()
self.username_inputbox.send_keys(username)
self.password_inputbox.send_keys(username)
self.password_inputbox.send_keys(Keys.ENTER)
def system_maintenance_url(self, url_stem=''):
return '{}/system_maintenance/{}'.format(
self.live_server_url, url_stem)
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from system_maintenance.tests.utilities import populate_test_db
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
populate_test_db()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
self.username_inputbox = None
self.password_inputbox = None
self.login_button = None
def tearDown(self):
self.browser.quit()
def find_authentication_elements(self):
self.username_inputbox = self.browser.find_element_by_id('id_username')
self.password_inputbox = self.browser.find_element_by_id('id_password')
self.login_button = self.browser.find_element_by_tag_name('button')
def login_as(self, username):
self.find_authentication_elements()
self.username_inputbox.send_keys(username)
self.password_inputbox.send_keys(username)
self.login_button.click()
def system_maintenance_url(self, url_stem=''):
return '{}/system_maintenance/{}'.format(
self.live_server_url, url_stem)
|
Make functional testing compatible with selenium 3.141.0
|
Make functional testing compatible with selenium 3.141.0
|
Python
|
bsd-3-clause
|
mfcovington/django-system-maintenance,mfcovington/django-system-maintenance,mfcovington/django-system-maintenance
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from system_maintenance.tests.utilities import populate_test_db
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
populate_test_db()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
self.username_inputbox = None
self.password_inputbox = None
self.login_button = None
def tearDown(self):
self.browser.quit()
def find_authentication_elements(self):
self.username_inputbox = self.browser.find_element_by_id('id_username')
self.password_inputbox = self.browser.find_element_by_id('id_password')
self.login_button = self.browser.find_element_by_tag_name('button')
def login_as(self, username):
self.find_authentication_elements()
self.username_inputbox.send_keys(username)
self.password_inputbox.send_keys(username)
self.password_inputbox.send_keys(Keys.ENTER)
def system_maintenance_url(self, url_stem=''):
return '{}/system_maintenance/{}'.format(
self.live_server_url, url_stem)
Make functional testing compatible with selenium 3.141.0
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from system_maintenance.tests.utilities import populate_test_db
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
populate_test_db()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
self.username_inputbox = None
self.password_inputbox = None
self.login_button = None
def tearDown(self):
self.browser.quit()
def find_authentication_elements(self):
self.username_inputbox = self.browser.find_element_by_id('id_username')
self.password_inputbox = self.browser.find_element_by_id('id_password')
self.login_button = self.browser.find_element_by_tag_name('button')
def login_as(self, username):
self.find_authentication_elements()
self.username_inputbox.send_keys(username)
self.password_inputbox.send_keys(username)
self.login_button.click()
def system_maintenance_url(self, url_stem=''):
return '{}/system_maintenance/{}'.format(
self.live_server_url, url_stem)
|
<commit_before>from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from system_maintenance.tests.utilities import populate_test_db
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
populate_test_db()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
self.username_inputbox = None
self.password_inputbox = None
self.login_button = None
def tearDown(self):
self.browser.quit()
def find_authentication_elements(self):
self.username_inputbox = self.browser.find_element_by_id('id_username')
self.password_inputbox = self.browser.find_element_by_id('id_password')
self.login_button = self.browser.find_element_by_tag_name('button')
def login_as(self, username):
self.find_authentication_elements()
self.username_inputbox.send_keys(username)
self.password_inputbox.send_keys(username)
self.password_inputbox.send_keys(Keys.ENTER)
def system_maintenance_url(self, url_stem=''):
return '{}/system_maintenance/{}'.format(
self.live_server_url, url_stem)
<commit_msg>Make functional testing compatible with selenium 3.141.0<commit_after>
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from system_maintenance.tests.utilities import populate_test_db
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
populate_test_db()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
self.username_inputbox = None
self.password_inputbox = None
self.login_button = None
def tearDown(self):
self.browser.quit()
def find_authentication_elements(self):
self.username_inputbox = self.browser.find_element_by_id('id_username')
self.password_inputbox = self.browser.find_element_by_id('id_password')
self.login_button = self.browser.find_element_by_tag_name('button')
def login_as(self, username):
self.find_authentication_elements()
self.username_inputbox.send_keys(username)
self.password_inputbox.send_keys(username)
self.login_button.click()
def system_maintenance_url(self, url_stem=''):
return '{}/system_maintenance/{}'.format(
self.live_server_url, url_stem)
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from system_maintenance.tests.utilities import populate_test_db
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
populate_test_db()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
self.username_inputbox = None
self.password_inputbox = None
self.login_button = None
def tearDown(self):
self.browser.quit()
def find_authentication_elements(self):
self.username_inputbox = self.browser.find_element_by_id('id_username')
self.password_inputbox = self.browser.find_element_by_id('id_password')
self.login_button = self.browser.find_element_by_tag_name('button')
def login_as(self, username):
self.find_authentication_elements()
self.username_inputbox.send_keys(username)
self.password_inputbox.send_keys(username)
self.password_inputbox.send_keys(Keys.ENTER)
def system_maintenance_url(self, url_stem=''):
return '{}/system_maintenance/{}'.format(
self.live_server_url, url_stem)
Make functional testing compatible with selenium 3.141.0from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from system_maintenance.tests.utilities import populate_test_db
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
populate_test_db()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
self.username_inputbox = None
self.password_inputbox = None
self.login_button = None
def tearDown(self):
self.browser.quit()
def find_authentication_elements(self):
self.username_inputbox = self.browser.find_element_by_id('id_username')
self.password_inputbox = self.browser.find_element_by_id('id_password')
self.login_button = self.browser.find_element_by_tag_name('button')
def login_as(self, username):
self.find_authentication_elements()
self.username_inputbox.send_keys(username)
self.password_inputbox.send_keys(username)
self.login_button.click()
def system_maintenance_url(self, url_stem=''):
return '{}/system_maintenance/{}'.format(
self.live_server_url, url_stem)
|
<commit_before>from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from system_maintenance.tests.utilities import populate_test_db
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
populate_test_db()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
self.username_inputbox = None
self.password_inputbox = None
self.login_button = None
def tearDown(self):
self.browser.quit()
def find_authentication_elements(self):
self.username_inputbox = self.browser.find_element_by_id('id_username')
self.password_inputbox = self.browser.find_element_by_id('id_password')
self.login_button = self.browser.find_element_by_tag_name('button')
def login_as(self, username):
self.find_authentication_elements()
self.username_inputbox.send_keys(username)
self.password_inputbox.send_keys(username)
self.password_inputbox.send_keys(Keys.ENTER)
def system_maintenance_url(self, url_stem=''):
return '{}/system_maintenance/{}'.format(
self.live_server_url, url_stem)
<commit_msg>Make functional testing compatible with selenium 3.141.0<commit_after>from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from system_maintenance.tests.utilities import populate_test_db
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
populate_test_db()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
self.username_inputbox = None
self.password_inputbox = None
self.login_button = None
def tearDown(self):
self.browser.quit()
def find_authentication_elements(self):
self.username_inputbox = self.browser.find_element_by_id('id_username')
self.password_inputbox = self.browser.find_element_by_id('id_password')
self.login_button = self.browser.find_element_by_tag_name('button')
def login_as(self, username):
self.find_authentication_elements()
self.username_inputbox.send_keys(username)
self.password_inputbox.send_keys(username)
self.login_button.click()
def system_maintenance_url(self, url_stem=''):
return '{}/system_maintenance/{}'.format(
self.live_server_url, url_stem)
|
1ec98e066eda1faa212816abf1de99c4ed87f8a0
|
mysite/multitenancy/models.py
|
mysite/multitenancy/models.py
|
import os
from django.conf import settings
from django.db import models
from . import tenant
if 'DJANGO_TENANT' in os.environ:
tenant._set_for_tenant(os.environ['DJANGO_TENANT'])
else:
tenant._set_default()
tenant._patch_table_names()
class Tenant(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
network_name = models.CharField(max_length=20)
slug = models.SlugField()
@property
def network_url(self):
if settings.DEBUG:
return 'http://%s.localhost:8080' % self.slug
else:
# Do the Site import here to avoid messing up the
# monkeypatching of _meta.db_table
from django.contrib.sites.models import Site
return 'http://%s.%s' % (self.slug, Site.objects.get_current())
|
import os
from django.conf import settings
from django.db import models
from . import tenant
if 'DJANGO_TENANT' in os.environ:
tenant._set_for_tenant(os.environ['DJANGO_TENANT'])
else:
tenant._set_default()
tenant._patch_table_names()
class Tenant(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
network_name = models.CharField(max_length=20, unique=True)
slug = models.SlugField(unique=True)
@property
def network_url(self):
if settings.DEBUG:
return 'http://%s.localhost:8080' % self.slug
else:
# Do the Site import here to avoid messing up the
# monkeypatching of _meta.db_table
from django.contrib.sites.models import Site
return 'http://%s.%s' % (self.slug, Site.objects.get_current())
|
Make network name and slug unique
|
Make network name and slug unique
|
Python
|
bsd-3-clause
|
Kvoti/ditto,Kvoti/ditto,Kvoti/ditto,Kvoti/ditto,Kvoti/ditto
|
import os
from django.conf import settings
from django.db import models
from . import tenant
if 'DJANGO_TENANT' in os.environ:
tenant._set_for_tenant(os.environ['DJANGO_TENANT'])
else:
tenant._set_default()
tenant._patch_table_names()
class Tenant(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
network_name = models.CharField(max_length=20)
slug = models.SlugField()
@property
def network_url(self):
if settings.DEBUG:
return 'http://%s.localhost:8080' % self.slug
else:
# Do the Site import here to avoid messing up the
# monkeypatching of _meta.db_table
from django.contrib.sites.models import Site
return 'http://%s.%s' % (self.slug, Site.objects.get_current())
Make network name and slug unique
|
import os
from django.conf import settings
from django.db import models
from . import tenant
if 'DJANGO_TENANT' in os.environ:
tenant._set_for_tenant(os.environ['DJANGO_TENANT'])
else:
tenant._set_default()
tenant._patch_table_names()
class Tenant(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
network_name = models.CharField(max_length=20, unique=True)
slug = models.SlugField(unique=True)
@property
def network_url(self):
if settings.DEBUG:
return 'http://%s.localhost:8080' % self.slug
else:
# Do the Site import here to avoid messing up the
# monkeypatching of _meta.db_table
from django.contrib.sites.models import Site
return 'http://%s.%s' % (self.slug, Site.objects.get_current())
|
<commit_before>import os
from django.conf import settings
from django.db import models
from . import tenant
if 'DJANGO_TENANT' in os.environ:
tenant._set_for_tenant(os.environ['DJANGO_TENANT'])
else:
tenant._set_default()
tenant._patch_table_names()
class Tenant(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
network_name = models.CharField(max_length=20)
slug = models.SlugField()
@property
def network_url(self):
if settings.DEBUG:
return 'http://%s.localhost:8080' % self.slug
else:
# Do the Site import here to avoid messing up the
# monkeypatching of _meta.db_table
from django.contrib.sites.models import Site
return 'http://%s.%s' % (self.slug, Site.objects.get_current())
<commit_msg>Make network name and slug unique<commit_after>
|
import os
from django.conf import settings
from django.db import models
from . import tenant
if 'DJANGO_TENANT' in os.environ:
tenant._set_for_tenant(os.environ['DJANGO_TENANT'])
else:
tenant._set_default()
tenant._patch_table_names()
class Tenant(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
network_name = models.CharField(max_length=20, unique=True)
slug = models.SlugField(unique=True)
@property
def network_url(self):
if settings.DEBUG:
return 'http://%s.localhost:8080' % self.slug
else:
# Do the Site import here to avoid messing up the
# monkeypatching of _meta.db_table
from django.contrib.sites.models import Site
return 'http://%s.%s' % (self.slug, Site.objects.get_current())
|
import os
from django.conf import settings
from django.db import models
from . import tenant
if 'DJANGO_TENANT' in os.environ:
tenant._set_for_tenant(os.environ['DJANGO_TENANT'])
else:
tenant._set_default()
tenant._patch_table_names()
class Tenant(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
network_name = models.CharField(max_length=20)
slug = models.SlugField()
@property
def network_url(self):
if settings.DEBUG:
return 'http://%s.localhost:8080' % self.slug
else:
# Do the Site import here to avoid messing up the
# monkeypatching of _meta.db_table
from django.contrib.sites.models import Site
return 'http://%s.%s' % (self.slug, Site.objects.get_current())
Make network name and slug uniqueimport os
from django.conf import settings
from django.db import models
from . import tenant
if 'DJANGO_TENANT' in os.environ:
tenant._set_for_tenant(os.environ['DJANGO_TENANT'])
else:
tenant._set_default()
tenant._patch_table_names()
class Tenant(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
network_name = models.CharField(max_length=20, unique=True)
slug = models.SlugField(unique=True)
@property
def network_url(self):
if settings.DEBUG:
return 'http://%s.localhost:8080' % self.slug
else:
# Do the Site import here to avoid messing up the
# monkeypatching of _meta.db_table
from django.contrib.sites.models import Site
return 'http://%s.%s' % (self.slug, Site.objects.get_current())
|
<commit_before>import os
from django.conf import settings
from django.db import models
from . import tenant
if 'DJANGO_TENANT' in os.environ:
tenant._set_for_tenant(os.environ['DJANGO_TENANT'])
else:
tenant._set_default()
tenant._patch_table_names()
class Tenant(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
network_name = models.CharField(max_length=20)
slug = models.SlugField()
@property
def network_url(self):
if settings.DEBUG:
return 'http://%s.localhost:8080' % self.slug
else:
# Do the Site import here to avoid messing up the
# monkeypatching of _meta.db_table
from django.contrib.sites.models import Site
return 'http://%s.%s' % (self.slug, Site.objects.get_current())
<commit_msg>Make network name and slug unique<commit_after>import os
from django.conf import settings
from django.db import models
from . import tenant
if 'DJANGO_TENANT' in os.environ:
tenant._set_for_tenant(os.environ['DJANGO_TENANT'])
else:
tenant._set_default()
tenant._patch_table_names()
class Tenant(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
network_name = models.CharField(max_length=20, unique=True)
slug = models.SlugField(unique=True)
@property
def network_url(self):
if settings.DEBUG:
return 'http://%s.localhost:8080' % self.slug
else:
# Do the Site import here to avoid messing up the
# monkeypatching of _meta.db_table
from django.contrib.sites.models import Site
return 'http://%s.%s' % (self.slug, Site.objects.get_current())
|
7408862af1a6dc618e9dd78ece2120533466ab75
|
test/settings/gyptest-settings.py
|
test/settings/gyptest-settings.py
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
# 'settings' is only supported for make and scons (and will be removed there as
# well eventually).
test = TestGyp.TestGyp(formats=['make', 'scons'])
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
|
Make new settings test not run for xcode generator.
|
Make new settings test not run for xcode generator.
TBR=evan
Review URL: http://codereview.chromium.org/7472006
|
Python
|
bsd-3-clause
|
csulmone/gyp,csulmone/gyp,csulmone/gyp,csulmone/gyp
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
Make new settings test not run for xcode generator.
TBR=evan
Review URL: http://codereview.chromium.org/7472006
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
# 'settings' is only supported for make and scons (and will be removed there as
# well eventually).
test = TestGyp.TestGyp(formats=['make', 'scons'])
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
<commit_msg>Make new settings test not run for xcode generator.
TBR=evan
Review URL: http://codereview.chromium.org/7472006<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
# 'settings' is only supported for make and scons (and will be removed there as
# well eventually).
test = TestGyp.TestGyp(formats=['make', 'scons'])
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
Make new settings test not run for xcode generator.
TBR=evan
Review URL: http://codereview.chromium.org/7472006#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
# 'settings' is only supported for make and scons (and will be removed there as
# well eventually).
test = TestGyp.TestGyp(formats=['make', 'scons'])
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
<commit_msg>Make new settings test not run for xcode generator.
TBR=evan
Review URL: http://codereview.chromium.org/7472006<commit_after>#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
# 'settings' is only supported for make and scons (and will be removed there as
# well eventually).
test = TestGyp.TestGyp(formats=['make', 'scons'])
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
|
ff2def37816fbf1a8cf726914368036c0081e869
|
tests/integration/shared.py
|
tests/integration/shared.py
|
class ServiceTests(object):
def test_bash(self):
return self.check(
input='bc -q\n1+1\nquit()',
type='org.tyrion.service.bash',
output='2',
error='',
code='0',
)
def test_python(self):
return self.check(
input='print 1+1',
type='org.tyrion.service.python',
output='2',
error='',
code='0',
)
def test_ruby(self):
return self.check(
input='puts 1+1',
type='org.tyrion.service.ruby',
output='2',
error='',
code='0',
)
def test_timeout_error(self):
return self.check(
input='sleep 10',
type='org.tyrion.service.bash',
output='',
error=None,
code='15',
timeout=2,
)
|
class ServiceTests(object):
def test_bash(self):
return self.check(
input='bc -q\n1+1\nquit()',
type='org.tyrion.service.bash',
output='2',
error='',
code='0',
)
def test_python(self):
return self.check(
input='print 1+1',
type='org.tyrion.service.python',
output='2',
error='',
code='0',
)
def test_ruby(self):
return self.check(
input='puts 1+1',
type='org.tyrion.service.ruby',
output='2',
error='',
code='0',
)
def test_timeout_error(self):
return self.check(
input='echo test\nsleep 10',
type='org.tyrion.service.bash',
output='test',
error=None,
code='15',
timeout=1,
)
|
Tweak integration timeout test to match gtest
|
Tweak integration timeout test to match gtest
|
Python
|
mit
|
silas/tyrion,silas/tyrion,silas/tyrion,silas/tyrion,silas/tyrion
|
class ServiceTests(object):
def test_bash(self):
return self.check(
input='bc -q\n1+1\nquit()',
type='org.tyrion.service.bash',
output='2',
error='',
code='0',
)
def test_python(self):
return self.check(
input='print 1+1',
type='org.tyrion.service.python',
output='2',
error='',
code='0',
)
def test_ruby(self):
return self.check(
input='puts 1+1',
type='org.tyrion.service.ruby',
output='2',
error='',
code='0',
)
def test_timeout_error(self):
return self.check(
input='sleep 10',
type='org.tyrion.service.bash',
output='',
error=None,
code='15',
timeout=2,
)
Tweak integration timeout test to match gtest
|
class ServiceTests(object):
def test_bash(self):
return self.check(
input='bc -q\n1+1\nquit()',
type='org.tyrion.service.bash',
output='2',
error='',
code='0',
)
def test_python(self):
return self.check(
input='print 1+1',
type='org.tyrion.service.python',
output='2',
error='',
code='0',
)
def test_ruby(self):
return self.check(
input='puts 1+1',
type='org.tyrion.service.ruby',
output='2',
error='',
code='0',
)
def test_timeout_error(self):
return self.check(
input='echo test\nsleep 10',
type='org.tyrion.service.bash',
output='test',
error=None,
code='15',
timeout=1,
)
|
<commit_before>
class ServiceTests(object):
def test_bash(self):
return self.check(
input='bc -q\n1+1\nquit()',
type='org.tyrion.service.bash',
output='2',
error='',
code='0',
)
def test_python(self):
return self.check(
input='print 1+1',
type='org.tyrion.service.python',
output='2',
error='',
code='0',
)
def test_ruby(self):
return self.check(
input='puts 1+1',
type='org.tyrion.service.ruby',
output='2',
error='',
code='0',
)
def test_timeout_error(self):
return self.check(
input='sleep 10',
type='org.tyrion.service.bash',
output='',
error=None,
code='15',
timeout=2,
)
<commit_msg>Tweak integration timeout test to match gtest<commit_after>
|
class ServiceTests(object):
def test_bash(self):
return self.check(
input='bc -q\n1+1\nquit()',
type='org.tyrion.service.bash',
output='2',
error='',
code='0',
)
def test_python(self):
return self.check(
input='print 1+1',
type='org.tyrion.service.python',
output='2',
error='',
code='0',
)
def test_ruby(self):
return self.check(
input='puts 1+1',
type='org.tyrion.service.ruby',
output='2',
error='',
code='0',
)
def test_timeout_error(self):
return self.check(
input='echo test\nsleep 10',
type='org.tyrion.service.bash',
output='test',
error=None,
code='15',
timeout=1,
)
|
class ServiceTests(object):
def test_bash(self):
return self.check(
input='bc -q\n1+1\nquit()',
type='org.tyrion.service.bash',
output='2',
error='',
code='0',
)
def test_python(self):
return self.check(
input='print 1+1',
type='org.tyrion.service.python',
output='2',
error='',
code='0',
)
def test_ruby(self):
return self.check(
input='puts 1+1',
type='org.tyrion.service.ruby',
output='2',
error='',
code='0',
)
def test_timeout_error(self):
return self.check(
input='sleep 10',
type='org.tyrion.service.bash',
output='',
error=None,
code='15',
timeout=2,
)
Tweak integration timeout test to match gtest
class ServiceTests(object):
def test_bash(self):
return self.check(
input='bc -q\n1+1\nquit()',
type='org.tyrion.service.bash',
output='2',
error='',
code='0',
)
def test_python(self):
return self.check(
input='print 1+1',
type='org.tyrion.service.python',
output='2',
error='',
code='0',
)
def test_ruby(self):
return self.check(
input='puts 1+1',
type='org.tyrion.service.ruby',
output='2',
error='',
code='0',
)
def test_timeout_error(self):
return self.check(
input='echo test\nsleep 10',
type='org.tyrion.service.bash',
output='test',
error=None,
code='15',
timeout=1,
)
|
<commit_before>
class ServiceTests(object):
def test_bash(self):
return self.check(
input='bc -q\n1+1\nquit()',
type='org.tyrion.service.bash',
output='2',
error='',
code='0',
)
def test_python(self):
return self.check(
input='print 1+1',
type='org.tyrion.service.python',
output='2',
error='',
code='0',
)
def test_ruby(self):
return self.check(
input='puts 1+1',
type='org.tyrion.service.ruby',
output='2',
error='',
code='0',
)
def test_timeout_error(self):
return self.check(
input='sleep 10',
type='org.tyrion.service.bash',
output='',
error=None,
code='15',
timeout=2,
)
<commit_msg>Tweak integration timeout test to match gtest<commit_after>
class ServiceTests(object):
def test_bash(self):
return self.check(
input='bc -q\n1+1\nquit()',
type='org.tyrion.service.bash',
output='2',
error='',
code='0',
)
def test_python(self):
return self.check(
input='print 1+1',
type='org.tyrion.service.python',
output='2',
error='',
code='0',
)
def test_ruby(self):
return self.check(
input='puts 1+1',
type='org.tyrion.service.ruby',
output='2',
error='',
code='0',
)
def test_timeout_error(self):
return self.check(
input='echo test\nsleep 10',
type='org.tyrion.service.bash',
output='test',
error=None,
code='15',
timeout=1,
)
|
9aaf3bd6c376f608911b232d5f811e0b7964022f
|
tests/django_mysql_tests/tests.py
|
tests/django_mysql_tests/tests.py
|
# -*- coding:utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from django.test import TestCase
from django_mysql_tests.models import MyModel
class SimpleTests(TestCase):
def test_simple(self):
MyModel.objects.create()
|
# -*- coding:utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from django.test import TestCase
from django_mysql_tests.models import MyModel
class SimpleTests(TestCase):
def test_simple(self):
MyModel.objects.create()
def test_two(self):
MyModel.objects.create()
MyModel.objects.create()
|
Add second test, trying to trigger travis
|
Add second test, trying to trigger travis
|
Python
|
mit
|
nickmeharry/django-mysql,nickmeharry/django-mysql,arnau126/django-mysql,adamchainz/django-mysql,arnau126/django-mysql,graingert/django-mysql,graingert/django-mysql
|
# -*- coding:utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from django.test import TestCase
from django_mysql_tests.models import MyModel
class SimpleTests(TestCase):
def test_simple(self):
MyModel.objects.create()
Add second test, trying to trigger travis
|
# -*- coding:utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from django.test import TestCase
from django_mysql_tests.models import MyModel
class SimpleTests(TestCase):
def test_simple(self):
MyModel.objects.create()
def test_two(self):
MyModel.objects.create()
MyModel.objects.create()
|
<commit_before># -*- coding:utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from django.test import TestCase
from django_mysql_tests.models import MyModel
class SimpleTests(TestCase):
def test_simple(self):
MyModel.objects.create()
<commit_msg>Add second test, trying to trigger travis<commit_after>
|
# -*- coding:utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from django.test import TestCase
from django_mysql_tests.models import MyModel
class SimpleTests(TestCase):
def test_simple(self):
MyModel.objects.create()
def test_two(self):
MyModel.objects.create()
MyModel.objects.create()
|
# -*- coding:utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from django.test import TestCase
from django_mysql_tests.models import MyModel
class SimpleTests(TestCase):
def test_simple(self):
MyModel.objects.create()
Add second test, trying to trigger travis# -*- coding:utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from django.test import TestCase
from django_mysql_tests.models import MyModel
class SimpleTests(TestCase):
def test_simple(self):
MyModel.objects.create()
def test_two(self):
MyModel.objects.create()
MyModel.objects.create()
|
<commit_before># -*- coding:utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from django.test import TestCase
from django_mysql_tests.models import MyModel
class SimpleTests(TestCase):
def test_simple(self):
MyModel.objects.create()
<commit_msg>Add second test, trying to trigger travis<commit_after># -*- coding:utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from django.test import TestCase
from django_mysql_tests.models import MyModel
class SimpleTests(TestCase):
def test_simple(self):
MyModel.objects.create()
def test_two(self):
MyModel.objects.create()
MyModel.objects.create()
|
b0dd95950058d174e50589ceeb18c6a0e2a16ec8
|
docs/source/_static/export_all_data.py
|
docs/source/_static/export_all_data.py
|
#!/usr/bin/env python
"""export_all_data.py - script for exporting all available data"""
import os
from collectionbatchtool import *
def export_all_data(output_dir=None):
"""
Export table data to CSV files.
Parameters
----------
output_dir : str
Path to the output directory.
"""
output_dir = output_dir if output_dir else ''
for tabledataset_subclass in TableDataset.__subclasses__():
instance = tabledataset_subclass()
if instance.database_query.count() > 0: # no files without data
instance.from_database(quiet=False)
filename = instance.model.__name__.lower() + '.csv'
filepath = os.path.join(output_dir, filename)
instance.to_csv(filepath, update_sourceid=True, quiet=False)
if __name__ == '__main__':
apply_user_settings('settings.cfg') # change to your own config-file!
export_all_data() # call the export function
|
#!/usr/bin/env python
"""export_all_data.py - script for exporting all available data"""
import os
from collectionbatchtool import *
def export_all_data(output_dir=None, quiet=True):
"""
Export table data to CSV files.
Parameters
----------
output_dir : str
Path to the output directory.
"""
output_dir = output_dir if output_dir else ''
for tabledataset_subclass in TableDataset.__subclasses__():
instance = tabledataset_subclass()
if instance.database_query.count() > 0: # no files without data
instance.from_database(quiet=quiet)
filename = instance.model.__name__.lower() + '.csv'
filepath = os.path.join(output_dir, filename)
instance.to_csv(
filepath, update_sourceid=True, quiet=quiet)
if __name__ == '__main__':
apply_user_settings('settings.cfg') # change to your own config-file!
export_all_data(quiet=False) # call the export function
|
Add parameter "quiet" to export function
|
Add parameter "quiet" to export function
|
Python
|
mit
|
jmenglund/CollectionBatchTool
|
#!/usr/bin/env python
"""export_all_data.py - script for exporting all available data"""
import os
from collectionbatchtool import *
def export_all_data(output_dir=None):
"""
Export table data to CSV files.
Parameters
----------
output_dir : str
Path to the output directory.
"""
output_dir = output_dir if output_dir else ''
for tabledataset_subclass in TableDataset.__subclasses__():
instance = tabledataset_subclass()
if instance.database_query.count() > 0: # no files without data
instance.from_database(quiet=False)
filename = instance.model.__name__.lower() + '.csv'
filepath = os.path.join(output_dir, filename)
instance.to_csv(filepath, update_sourceid=True, quiet=False)
if __name__ == '__main__':
apply_user_settings('settings.cfg') # change to your own config-file!
export_all_data() # call the export function
Add parameter "quiet" to export function
|
#!/usr/bin/env python
"""export_all_data.py - script for exporting all available data"""
import os
from collectionbatchtool import *
def export_all_data(output_dir=None, quiet=True):
"""
Export table data to CSV files.
Parameters
----------
output_dir : str
Path to the output directory.
"""
output_dir = output_dir if output_dir else ''
for tabledataset_subclass in TableDataset.__subclasses__():
instance = tabledataset_subclass()
if instance.database_query.count() > 0: # no files without data
instance.from_database(quiet=quiet)
filename = instance.model.__name__.lower() + '.csv'
filepath = os.path.join(output_dir, filename)
instance.to_csv(
filepath, update_sourceid=True, quiet=quiet)
if __name__ == '__main__':
apply_user_settings('settings.cfg') # change to your own config-file!
export_all_data(quiet=False) # call the export function
|
<commit_before>#!/usr/bin/env python
"""export_all_data.py - script for exporting all available data"""
import os
from collectionbatchtool import *
def export_all_data(output_dir=None):
"""
Export table data to CSV files.
Parameters
----------
output_dir : str
Path to the output directory.
"""
output_dir = output_dir if output_dir else ''
for tabledataset_subclass in TableDataset.__subclasses__():
instance = tabledataset_subclass()
if instance.database_query.count() > 0: # no files without data
instance.from_database(quiet=False)
filename = instance.model.__name__.lower() + '.csv'
filepath = os.path.join(output_dir, filename)
instance.to_csv(filepath, update_sourceid=True, quiet=False)
if __name__ == '__main__':
apply_user_settings('settings.cfg') # change to your own config-file!
export_all_data() # call the export function
<commit_msg>Add parameter "quiet" to export function<commit_after>
|
#!/usr/bin/env python
"""export_all_data.py - script for exporting all available data"""
import os
from collectionbatchtool import *
def export_all_data(output_dir=None, quiet=True):
"""
Export table data to CSV files.
Parameters
----------
output_dir : str
Path to the output directory.
"""
output_dir = output_dir if output_dir else ''
for tabledataset_subclass in TableDataset.__subclasses__():
instance = tabledataset_subclass()
if instance.database_query.count() > 0: # no files without data
instance.from_database(quiet=quiet)
filename = instance.model.__name__.lower() + '.csv'
filepath = os.path.join(output_dir, filename)
instance.to_csv(
filepath, update_sourceid=True, quiet=quiet)
if __name__ == '__main__':
apply_user_settings('settings.cfg') # change to your own config-file!
export_all_data(quiet=False) # call the export function
|
#!/usr/bin/env python
"""export_all_data.py - script for exporting all available data"""
import os
from collectionbatchtool import *
def export_all_data(output_dir=None):
"""
Export table data to CSV files.
Parameters
----------
output_dir : str
Path to the output directory.
"""
output_dir = output_dir if output_dir else ''
for tabledataset_subclass in TableDataset.__subclasses__():
instance = tabledataset_subclass()
if instance.database_query.count() > 0: # no files without data
instance.from_database(quiet=False)
filename = instance.model.__name__.lower() + '.csv'
filepath = os.path.join(output_dir, filename)
instance.to_csv(filepath, update_sourceid=True, quiet=False)
if __name__ == '__main__':
apply_user_settings('settings.cfg') # change to your own config-file!
export_all_data() # call the export function
Add parameter "quiet" to export function#!/usr/bin/env python
"""export_all_data.py - script for exporting all available data"""
import os
from collectionbatchtool import *
def export_all_data(output_dir=None, quiet=True):
"""
Export table data to CSV files.
Parameters
----------
output_dir : str
Path to the output directory.
"""
output_dir = output_dir if output_dir else ''
for tabledataset_subclass in TableDataset.__subclasses__():
instance = tabledataset_subclass()
if instance.database_query.count() > 0: # no files without data
instance.from_database(quiet=quiet)
filename = instance.model.__name__.lower() + '.csv'
filepath = os.path.join(output_dir, filename)
instance.to_csv(
filepath, update_sourceid=True, quiet=quiet)
if __name__ == '__main__':
apply_user_settings('settings.cfg') # change to your own config-file!
export_all_data(quiet=False) # call the export function
|
<commit_before>#!/usr/bin/env python
"""export_all_data.py - script for exporting all available data"""
import os
from collectionbatchtool import *
def export_all_data(output_dir=None):
"""
Export table data to CSV files.
Parameters
----------
output_dir : str
Path to the output directory.
"""
output_dir = output_dir if output_dir else ''
for tabledataset_subclass in TableDataset.__subclasses__():
instance = tabledataset_subclass()
if instance.database_query.count() > 0: # no files without data
instance.from_database(quiet=False)
filename = instance.model.__name__.lower() + '.csv'
filepath = os.path.join(output_dir, filename)
instance.to_csv(filepath, update_sourceid=True, quiet=False)
if __name__ == '__main__':
apply_user_settings('settings.cfg') # change to your own config-file!
export_all_data() # call the export function
<commit_msg>Add parameter "quiet" to export function<commit_after>#!/usr/bin/env python
"""export_all_data.py - script for exporting all available data"""
import os
from collectionbatchtool import *
def export_all_data(output_dir=None, quiet=True):
"""
Export table data to CSV files.
Parameters
----------
output_dir : str
Path to the output directory.
"""
output_dir = output_dir if output_dir else ''
for tabledataset_subclass in TableDataset.__subclasses__():
instance = tabledataset_subclass()
if instance.database_query.count() > 0: # no files without data
instance.from_database(quiet=quiet)
filename = instance.model.__name__.lower() + '.csv'
filepath = os.path.join(output_dir, filename)
instance.to_csv(
filepath, update_sourceid=True, quiet=quiet)
if __name__ == '__main__':
apply_user_settings('settings.cfg') # change to your own config-file!
export_all_data(quiet=False) # call the export function
|
1a575129299985471c69cef083224f939c68052f
|
tests/test-empty-results.py
|
tests/test-empty-results.py
|
#!/usr/bin/env python
import unittest
import waybackpack
import sys, os
# via https://github.com/jsvine/waybackpack/issues/39
URL = "https://indianexpress.com/section/lifestyle/health/feed/"
class Test(unittest.TestCase):
def test_empty_result(self):
timestamps = waybackpack.search(URL, from_date = "2020")
assert(len(timestamps) == 0)
pack = waybackpack.Pack(
URL,
timestamps=timestamps,
)
assert(len(pack.timestamps) == 0)
|
#!/usr/bin/env python
import unittest
import waybackpack
import sys, os
# via https://github.com/jsvine/waybackpack/issues/39
URL = "https://indianexpress.com/section/lifestyle/health/feed/"
class Test(unittest.TestCase):
def test_empty_result(self):
timestamps = waybackpack.search(URL, from_date = "2080")
assert(len(timestamps) == 0)
pack = waybackpack.Pack(
URL,
timestamps=timestamps,
)
assert(len(pack.timestamps) == 0)
|
Update test (endpoint was no longer result-less)
|
Update test (endpoint was no longer result-less)
|
Python
|
mit
|
jsvine/waybackpack
|
#!/usr/bin/env python
import unittest
import waybackpack
import sys, os
# via https://github.com/jsvine/waybackpack/issues/39
URL = "https://indianexpress.com/section/lifestyle/health/feed/"
class Test(unittest.TestCase):
def test_empty_result(self):
timestamps = waybackpack.search(URL, from_date = "2020")
assert(len(timestamps) == 0)
pack = waybackpack.Pack(
URL,
timestamps=timestamps,
)
assert(len(pack.timestamps) == 0)
Update test (endpoint was no longer result-less)
|
#!/usr/bin/env python
import unittest
import waybackpack
import sys, os
# via https://github.com/jsvine/waybackpack/issues/39
URL = "https://indianexpress.com/section/lifestyle/health/feed/"
class Test(unittest.TestCase):
def test_empty_result(self):
timestamps = waybackpack.search(URL, from_date = "2080")
assert(len(timestamps) == 0)
pack = waybackpack.Pack(
URL,
timestamps=timestamps,
)
assert(len(pack.timestamps) == 0)
|
<commit_before>#!/usr/bin/env python
import unittest
import waybackpack
import sys, os
# via https://github.com/jsvine/waybackpack/issues/39
URL = "https://indianexpress.com/section/lifestyle/health/feed/"
class Test(unittest.TestCase):
def test_empty_result(self):
timestamps = waybackpack.search(URL, from_date = "2020")
assert(len(timestamps) == 0)
pack = waybackpack.Pack(
URL,
timestamps=timestamps,
)
assert(len(pack.timestamps) == 0)
<commit_msg>Update test (endpoint was no longer result-less)<commit_after>
|
#!/usr/bin/env python
import unittest
import waybackpack
import sys, os
# via https://github.com/jsvine/waybackpack/issues/39
URL = "https://indianexpress.com/section/lifestyle/health/feed/"
class Test(unittest.TestCase):
def test_empty_result(self):
timestamps = waybackpack.search(URL, from_date = "2080")
assert(len(timestamps) == 0)
pack = waybackpack.Pack(
URL,
timestamps=timestamps,
)
assert(len(pack.timestamps) == 0)
|
#!/usr/bin/env python
import unittest
import waybackpack
import sys, os
# via https://github.com/jsvine/waybackpack/issues/39
URL = "https://indianexpress.com/section/lifestyle/health/feed/"
class Test(unittest.TestCase):
def test_empty_result(self):
timestamps = waybackpack.search(URL, from_date = "2020")
assert(len(timestamps) == 0)
pack = waybackpack.Pack(
URL,
timestamps=timestamps,
)
assert(len(pack.timestamps) == 0)
Update test (endpoint was no longer result-less)#!/usr/bin/env python
import unittest
import waybackpack
import sys, os
# via https://github.com/jsvine/waybackpack/issues/39
URL = "https://indianexpress.com/section/lifestyle/health/feed/"
class Test(unittest.TestCase):
def test_empty_result(self):
timestamps = waybackpack.search(URL, from_date = "2080")
assert(len(timestamps) == 0)
pack = waybackpack.Pack(
URL,
timestamps=timestamps,
)
assert(len(pack.timestamps) == 0)
|
<commit_before>#!/usr/bin/env python
import unittest
import waybackpack
import sys, os
# via https://github.com/jsvine/waybackpack/issues/39
URL = "https://indianexpress.com/section/lifestyle/health/feed/"
class Test(unittest.TestCase):
def test_empty_result(self):
timestamps = waybackpack.search(URL, from_date = "2020")
assert(len(timestamps) == 0)
pack = waybackpack.Pack(
URL,
timestamps=timestamps,
)
assert(len(pack.timestamps) == 0)
<commit_msg>Update test (endpoint was no longer result-less)<commit_after>#!/usr/bin/env python
import unittest
import waybackpack
import sys, os
# via https://github.com/jsvine/waybackpack/issues/39
URL = "https://indianexpress.com/section/lifestyle/health/feed/"
class Test(unittest.TestCase):
def test_empty_result(self):
timestamps = waybackpack.search(URL, from_date = "2080")
assert(len(timestamps) == 0)
pack = waybackpack.Pack(
URL,
timestamps=timestamps,
)
assert(len(pack.timestamps) == 0)
|
3d027b8d4d39fcdbc839bd0e186ea225e1c7b976
|
tests/__init__.py
|
tests/__init__.py
|
from .test_great_expectations import *
from .test_util import *
from .test_dataset import *
from .test_pandas_dataset import *
from tests.pandas.test_pandas_dataset_distributional_expectations import *
from .test_expectation_decorators import *
from .test_cli import *
|
# from .test_great_expectations import *
# from .test_util import *
# from .test_dataset import *
# from .test_pandas_dataset import *
# from tests.pandas.test_pandas_dataset_distributional_expectations import *
# from .test_expectation_decorators import *
# from .test_cli import *
|
Remove explicit import in tests module.
|
Remove explicit import in tests module.
|
Python
|
apache-2.0
|
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
|
from .test_great_expectations import *
from .test_util import *
from .test_dataset import *
from .test_pandas_dataset import *
from tests.pandas.test_pandas_dataset_distributional_expectations import *
from .test_expectation_decorators import *
from .test_cli import *
Remove explicit import in tests module.
|
# from .test_great_expectations import *
# from .test_util import *
# from .test_dataset import *
# from .test_pandas_dataset import *
# from tests.pandas.test_pandas_dataset_distributional_expectations import *
# from .test_expectation_decorators import *
# from .test_cli import *
|
<commit_before>from .test_great_expectations import *
from .test_util import *
from .test_dataset import *
from .test_pandas_dataset import *
from tests.pandas.test_pandas_dataset_distributional_expectations import *
from .test_expectation_decorators import *
from .test_cli import *
<commit_msg>Remove explicit import in tests module.<commit_after>
|
# from .test_great_expectations import *
# from .test_util import *
# from .test_dataset import *
# from .test_pandas_dataset import *
# from tests.pandas.test_pandas_dataset_distributional_expectations import *
# from .test_expectation_decorators import *
# from .test_cli import *
|
from .test_great_expectations import *
from .test_util import *
from .test_dataset import *
from .test_pandas_dataset import *
from tests.pandas.test_pandas_dataset_distributional_expectations import *
from .test_expectation_decorators import *
from .test_cli import *
Remove explicit import in tests module.# from .test_great_expectations import *
# from .test_util import *
# from .test_dataset import *
# from .test_pandas_dataset import *
# from tests.pandas.test_pandas_dataset_distributional_expectations import *
# from .test_expectation_decorators import *
# from .test_cli import *
|
<commit_before>from .test_great_expectations import *
from .test_util import *
from .test_dataset import *
from .test_pandas_dataset import *
from tests.pandas.test_pandas_dataset_distributional_expectations import *
from .test_expectation_decorators import *
from .test_cli import *
<commit_msg>Remove explicit import in tests module.<commit_after># from .test_great_expectations import *
# from .test_util import *
# from .test_dataset import *
# from .test_pandas_dataset import *
# from tests.pandas.test_pandas_dataset_distributional_expectations import *
# from .test_expectation_decorators import *
# from .test_cli import *
|
fa1b111e63ebd069c027a3b969f679b2de54949f
|
tests/conftest.py
|
tests/conftest.py
|
import pytest
from sanic import Sanic
from sanic_openapi import swagger_blueprint
@pytest.fixture()
def app():
app = Sanic('test')
app.blueprint(swagger_blueprint)
return app
|
import pytest
from sanic import Sanic
import sanic_openapi
@pytest.fixture()
def app():
app = Sanic("test")
app.blueprint(sanic_openapi.swagger_blueprint)
yield app
# Clean up
sanic_openapi.swagger.definitions = {}
sanic_openapi.swagger._spec = {}
|
Add clean up in app fixture
|
Test: Add clean up in app fixture
|
Python
|
mit
|
channelcat/sanic-openapi,channelcat/sanic-openapi
|
import pytest
from sanic import Sanic
from sanic_openapi import swagger_blueprint
@pytest.fixture()
def app():
app = Sanic('test')
app.blueprint(swagger_blueprint)
return app
Test: Add clean up in app fixture
|
import pytest
from sanic import Sanic
import sanic_openapi
@pytest.fixture()
def app():
app = Sanic("test")
app.blueprint(sanic_openapi.swagger_blueprint)
yield app
# Clean up
sanic_openapi.swagger.definitions = {}
sanic_openapi.swagger._spec = {}
|
<commit_before>import pytest
from sanic import Sanic
from sanic_openapi import swagger_blueprint
@pytest.fixture()
def app():
app = Sanic('test')
app.blueprint(swagger_blueprint)
return app
<commit_msg>Test: Add clean up in app fixture<commit_after>
|
import pytest
from sanic import Sanic
import sanic_openapi
@pytest.fixture()
def app():
app = Sanic("test")
app.blueprint(sanic_openapi.swagger_blueprint)
yield app
# Clean up
sanic_openapi.swagger.definitions = {}
sanic_openapi.swagger._spec = {}
|
import pytest
from sanic import Sanic
from sanic_openapi import swagger_blueprint
@pytest.fixture()
def app():
app = Sanic('test')
app.blueprint(swagger_blueprint)
return app
Test: Add clean up in app fixtureimport pytest
from sanic import Sanic
import sanic_openapi
@pytest.fixture()
def app():
app = Sanic("test")
app.blueprint(sanic_openapi.swagger_blueprint)
yield app
# Clean up
sanic_openapi.swagger.definitions = {}
sanic_openapi.swagger._spec = {}
|
<commit_before>import pytest
from sanic import Sanic
from sanic_openapi import swagger_blueprint
@pytest.fixture()
def app():
app = Sanic('test')
app.blueprint(swagger_blueprint)
return app
<commit_msg>Test: Add clean up in app fixture<commit_after>import pytest
from sanic import Sanic
import sanic_openapi
@pytest.fixture()
def app():
app = Sanic("test")
app.blueprint(sanic_openapi.swagger_blueprint)
yield app
# Clean up
sanic_openapi.swagger.definitions = {}
sanic_openapi.swagger._spec = {}
|
04e5083006ee1faffbbdc73bd71b4601ff1db3ae
|
tests/workers/test_merge.py
|
tests/workers/test_merge.py
|
import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
|
import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
def test_on_idle_with_commits_and_merges(self):
mocked_want_to_merge = MagicMock()
mocked_commit = MagicMock()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy",
want_to_merge=mocked_want_to_merge)
worker.commit = mocked_commit
commits, merges = worker.on_idle("commits", "merges")
mocked_commit.assert_called_once_with("commits")
assert mocked_want_to_merge.set.call_count == 1
assert commits == []
assert merges == []
|
Test merge worker with commits and merges
|
test: Test merge worker with commits and merges
|
Python
|
apache-2.0
|
rowhit/gitfs,bussiere/gitfs,PressLabs/gitfs,PressLabs/gitfs,ksmaheshkumar/gitfs
|
import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
test: Test merge worker with commits and merges
|
import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
def test_on_idle_with_commits_and_merges(self):
mocked_want_to_merge = MagicMock()
mocked_commit = MagicMock()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy",
want_to_merge=mocked_want_to_merge)
worker.commit = mocked_commit
commits, merges = worker.on_idle("commits", "merges")
mocked_commit.assert_called_once_with("commits")
assert mocked_want_to_merge.set.call_count == 1
assert commits == []
assert merges == []
|
<commit_before>import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
<commit_msg>test: Test merge worker with commits and merges<commit_after>
|
import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
def test_on_idle_with_commits_and_merges(self):
mocked_want_to_merge = MagicMock()
mocked_commit = MagicMock()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy",
want_to_merge=mocked_want_to_merge)
worker.commit = mocked_commit
commits, merges = worker.on_idle("commits", "merges")
mocked_commit.assert_called_once_with("commits")
assert mocked_want_to_merge.set.call_count == 1
assert commits == []
assert merges == []
|
import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
test: Test merge worker with commits and mergesimport pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
def test_on_idle_with_commits_and_merges(self):
mocked_want_to_merge = MagicMock()
mocked_commit = MagicMock()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy",
want_to_merge=mocked_want_to_merge)
worker.commit = mocked_commit
commits, merges = worker.on_idle("commits", "merges")
mocked_commit.assert_called_once_with("commits")
assert mocked_want_to_merge.set.call_count == 1
assert commits == []
assert merges == []
|
<commit_before>import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
<commit_msg>test: Test merge worker with commits and merges<commit_after>import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
def test_on_idle_with_commits_and_merges(self):
mocked_want_to_merge = MagicMock()
mocked_commit = MagicMock()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy",
want_to_merge=mocked_want_to_merge)
worker.commit = mocked_commit
commits, merges = worker.on_idle("commits", "merges")
mocked_commit.assert_called_once_with("commits")
assert mocked_want_to_merge.set.call_count == 1
assert commits == []
assert merges == []
|
3a2936bf55019dfd9203031ebe73966846b6f041
|
tests/test_dpp.py
|
tests/test_dpp.py
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from agents.dpp import DPP
import replay_buffer
from test_dqn_like import _TestDQNLike
from chainer import testing
@testing.parameterize(
{'eta': 1e-2},
{'eta': 1e-1},
{'eta': 1e-0},
{'eta': 1e+1},
)
class TestDQN(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPP(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100,
eta=self.eta)
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from agents.dpp import DPP
from agents.dpp import DPPL
from agents.dpp import DPPGreedy
import replay_buffer
from test_dqn_like import _TestDQNLike
class TestDPP(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPP(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100)
def test_abc_continuous_gpu(self):
print("DPP doesn't support continuous action spaces.")
def test_abc_continuous_cpu(self):
print("DPP doesn't support continuous action spaces.")
class TestDPPL(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPPL(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100)
def test_abc_continuous_gpu(self):
print("DPPL doesn't support continuous action spaces.")
def test_abc_continuous_cpu(self):
print("DPPL doesn't support continuous action spaces.")
class TestDPPGreedy(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPPGreedy(q_func, opt, rbuf, gpu=gpu, gamma=0.9,
explorer=explorer,
replay_start_size=100, target_update_frequency=100)
|
Add tests for DPPL and DPPGreedy.
|
Add tests for DPPL and DPPGreedy.
|
Python
|
mit
|
toslunar/chainerrl,toslunar/chainerrl
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from agents.dpp import DPP
import replay_buffer
from test_dqn_like import _TestDQNLike
from chainer import testing
@testing.parameterize(
{'eta': 1e-2},
{'eta': 1e-1},
{'eta': 1e-0},
{'eta': 1e+1},
)
class TestDQN(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPP(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100,
eta=self.eta)
Add tests for DPPL and DPPGreedy.
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from agents.dpp import DPP
from agents.dpp import DPPL
from agents.dpp import DPPGreedy
import replay_buffer
from test_dqn_like import _TestDQNLike
class TestDPP(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPP(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100)
def test_abc_continuous_gpu(self):
print("DPP doesn't support continuous action spaces.")
def test_abc_continuous_cpu(self):
print("DPP doesn't support continuous action spaces.")
class TestDPPL(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPPL(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100)
def test_abc_continuous_gpu(self):
print("DPPL doesn't support continuous action spaces.")
def test_abc_continuous_cpu(self):
print("DPPL doesn't support continuous action spaces.")
class TestDPPGreedy(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPPGreedy(q_func, opt, rbuf, gpu=gpu, gamma=0.9,
explorer=explorer,
replay_start_size=100, target_update_frequency=100)
|
<commit_before>from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from agents.dpp import DPP
import replay_buffer
from test_dqn_like import _TestDQNLike
from chainer import testing
@testing.parameterize(
{'eta': 1e-2},
{'eta': 1e-1},
{'eta': 1e-0},
{'eta': 1e+1},
)
class TestDQN(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPP(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100,
eta=self.eta)
<commit_msg>Add tests for DPPL and DPPGreedy.<commit_after>
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from agents.dpp import DPP
from agents.dpp import DPPL
from agents.dpp import DPPGreedy
import replay_buffer
from test_dqn_like import _TestDQNLike
class TestDPP(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPP(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100)
def test_abc_continuous_gpu(self):
print("DPP doesn't support continuous action spaces.")
def test_abc_continuous_cpu(self):
print("DPP doesn't support continuous action spaces.")
class TestDPPL(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPPL(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100)
def test_abc_continuous_gpu(self):
print("DPPL doesn't support continuous action spaces.")
def test_abc_continuous_cpu(self):
print("DPPL doesn't support continuous action spaces.")
class TestDPPGreedy(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPPGreedy(q_func, opt, rbuf, gpu=gpu, gamma=0.9,
explorer=explorer,
replay_start_size=100, target_update_frequency=100)
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from agents.dpp import DPP
import replay_buffer
from test_dqn_like import _TestDQNLike
from chainer import testing
@testing.parameterize(
{'eta': 1e-2},
{'eta': 1e-1},
{'eta': 1e-0},
{'eta': 1e+1},
)
class TestDQN(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPP(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100,
eta=self.eta)
Add tests for DPPL and DPPGreedy.from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from agents.dpp import DPP
from agents.dpp import DPPL
from agents.dpp import DPPGreedy
import replay_buffer
from test_dqn_like import _TestDQNLike
class TestDPP(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPP(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100)
def test_abc_continuous_gpu(self):
print("DPP doesn't support continuous action spaces.")
def test_abc_continuous_cpu(self):
print("DPP doesn't support continuous action spaces.")
class TestDPPL(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPPL(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100)
def test_abc_continuous_gpu(self):
print("DPPL doesn't support continuous action spaces.")
def test_abc_continuous_cpu(self):
print("DPPL doesn't support continuous action spaces.")
class TestDPPGreedy(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPPGreedy(q_func, opt, rbuf, gpu=gpu, gamma=0.9,
explorer=explorer,
replay_start_size=100, target_update_frequency=100)
|
<commit_before>from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from agents.dpp import DPP
import replay_buffer
from test_dqn_like import _TestDQNLike
from chainer import testing
@testing.parameterize(
{'eta': 1e-2},
{'eta': 1e-1},
{'eta': 1e-0},
{'eta': 1e+1},
)
class TestDQN(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPP(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100,
eta=self.eta)
<commit_msg>Add tests for DPPL and DPPGreedy.<commit_after>from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from agents.dpp import DPP
from agents.dpp import DPPL
from agents.dpp import DPPGreedy
import replay_buffer
from test_dqn_like import _TestDQNLike
class TestDPP(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPP(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100)
def test_abc_continuous_gpu(self):
print("DPP doesn't support continuous action spaces.")
def test_abc_continuous_cpu(self):
print("DPP doesn't support continuous action spaces.")
class TestDPPL(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPPL(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_frequency=100)
def test_abc_continuous_gpu(self):
print("DPPL doesn't support continuous action spaces.")
def test_abc_continuous_cpu(self):
print("DPPL doesn't support continuous action spaces.")
class TestDPPGreedy(_TestDQNLike):
def make_agent(self, gpu, q_func, explorer, opt):
rbuf = replay_buffer.ReplayBuffer(10 ** 5)
return DPPGreedy(q_func, opt, rbuf, gpu=gpu, gamma=0.9,
explorer=explorer,
replay_start_size=100, target_update_frequency=100)
|
7c1b539436b1f27896bc0e193b52838e2323519b
|
tutorials/urls.py
|
tutorials/urls.py
|
from django.conf.urls import include, url
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view()),
url(r'add/', views.NewTutorial.as_view(), name='add_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'),
# This must be last, otherwise it will match anything
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'),
]
|
from django.conf.urls import include, url
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view(), name='list_tutorials'),
url(r'add/', views.CreateNewTutorial.as_view(), name='add_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/delete/', views.DeleteTutorial.as_view(), name='delete_tutorial'),
# This must be last, otherwise it will match anything
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'),
]
|
Add url name to ListView, New url for delete view, Refactor ViewClass name for NewTutorials to CreateNewTutorials
|
Add url name to ListView, New url for delete view, Refactor ViewClass name for NewTutorials to CreateNewTutorials
|
Python
|
agpl-3.0
|
openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform
|
from django.conf.urls import include, url
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view()),
url(r'add/', views.NewTutorial.as_view(), name='add_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'),
# This must be last, otherwise it will match anything
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'),
]Add url name to ListView, New url for delete view, Refactor ViewClass name for NewTutorials to CreateNewTutorials
|
from django.conf.urls import include, url
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view(), name='list_tutorials'),
url(r'add/', views.CreateNewTutorial.as_view(), name='add_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/delete/', views.DeleteTutorial.as_view(), name='delete_tutorial'),
# This must be last, otherwise it will match anything
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'),
]
|
<commit_before>from django.conf.urls import include, url
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view()),
url(r'add/', views.NewTutorial.as_view(), name='add_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'),
# This must be last, otherwise it will match anything
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'),
]<commit_msg>Add url name to ListView, New url for delete view, Refactor ViewClass name for NewTutorials to CreateNewTutorials<commit_after>
|
from django.conf.urls import include, url
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view(), name='list_tutorials'),
url(r'add/', views.CreateNewTutorial.as_view(), name='add_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/delete/', views.DeleteTutorial.as_view(), name='delete_tutorial'),
# This must be last, otherwise it will match anything
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'),
]
|
from django.conf.urls import include, url
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view()),
url(r'add/', views.NewTutorial.as_view(), name='add_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'),
# This must be last, otherwise it will match anything
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'),
]Add url name to ListView, New url for delete view, Refactor ViewClass name for NewTutorials to CreateNewTutorialsfrom django.conf.urls import include, url
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view(), name='list_tutorials'),
url(r'add/', views.CreateNewTutorial.as_view(), name='add_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/delete/', views.DeleteTutorial.as_view(), name='delete_tutorial'),
# This must be last, otherwise it will match anything
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'),
]
|
<commit_before>from django.conf.urls import include, url
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view()),
url(r'add/', views.NewTutorial.as_view(), name='add_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'),
# This must be last, otherwise it will match anything
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'),
]<commit_msg>Add url name to ListView, New url for delete view, Refactor ViewClass name for NewTutorials to CreateNewTutorials<commit_after>from django.conf.urls import include, url
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view(), name='list_tutorials'),
url(r'add/', views.CreateNewTutorial.as_view(), name='add_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/edit/', views.EditTutorials.as_view(), name='edit_tutorial'),
url(r'(?P<tutorial_id>[\w\-]+)/delete/', views.DeleteTutorial.as_view(), name='delete_tutorial'),
# This must be last, otherwise it will match anything
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view(), name='detail_tutorial'),
]
|
835b8adfb610cdac0233840497f3a1cf9860f946
|
cerebro/tests/core/test_usecases.py
|
cerebro/tests/core/test_usecases.py
|
import unittest
import cerebro.core.entities as en
import cerebro.core.usecases as uc
class TestUseCases(unittest.TestCase):
def setUp(self):
self.neurons_path = ["./cerebro/neurons"]
self.neuron_test = ("system check")
self.neuron_test_response = "All working properly."
self.command_args = ("arg1", "arg2")
self.test_command = en.Command(self.neuron_test, self.command_args)
self.error_test = ("asd asdasd ")
self.error_test_response = "Sorry, I could not process that."
self.error_command = en.Command(self.error_test, self.command_args)
self.total_neurons = 2
uc.get_all_neurons(self.neurons_path)
def test_get_all_neurons(self):
assert len(uc.NEURONS) == self.total_neurons
def test_neuron_execution(self):
assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response
def test_command_execution(self):
response = uc.process_command(self.test_command)
assert response == self.neuron_test_response
def test_command_execution_faliure(self):
response = uc.process_command(self.error_command)
assert response == self.error_test_response
|
import unittest
import cerebro.core.entities as en
import cerebro.core.usecases as uc
class TestUseCases(unittest.TestCase):
def setUp(self):
self.neurons_path = ["./cerebro/neurons"]
self.neuron_test = ("system check")
self.neuron_test_response = "All working properly."
self.command_args = ("arg1", "arg2")
self.test_command = en.Command(self.neuron_test, self.command_args)
self.total_neurons = 2
uc.get_all_neurons(self.neurons_path)
def test_get_all_neurons(self):
assert len(uc.NEURONS) == self.total_neurons
def test_neuron_execution(self):
assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response
def test_command_execution(self):
response = uc.process_command(self.test_command)
assert response == self.neuron_test_response
def test_command_execution_faliure(self):
error_test = ("asd asdasd ")
error_test_response = "Sorry, I could not process that."
error_command = en.Command(error_test, self.command_args)
response = uc.process_command(error_command)
assert response == error_test_response
|
Test cases changed and minor optimization
|
Test cases changed and minor optimization
|
Python
|
mit
|
Le-Bot/cerebro
|
import unittest
import cerebro.core.entities as en
import cerebro.core.usecases as uc
class TestUseCases(unittest.TestCase):
def setUp(self):
self.neurons_path = ["./cerebro/neurons"]
self.neuron_test = ("system check")
self.neuron_test_response = "All working properly."
self.command_args = ("arg1", "arg2")
self.test_command = en.Command(self.neuron_test, self.command_args)
self.error_test = ("asd asdasd ")
self.error_test_response = "Sorry, I could not process that."
self.error_command = en.Command(self.error_test, self.command_args)
self.total_neurons = 2
uc.get_all_neurons(self.neurons_path)
def test_get_all_neurons(self):
assert len(uc.NEURONS) == self.total_neurons
def test_neuron_execution(self):
assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response
def test_command_execution(self):
response = uc.process_command(self.test_command)
assert response == self.neuron_test_response
def test_command_execution_faliure(self):
response = uc.process_command(self.error_command)
assert response == self.error_test_responseTest cases changed and minor optimization
|
import unittest
import cerebro.core.entities as en
import cerebro.core.usecases as uc
class TestUseCases(unittest.TestCase):
def setUp(self):
self.neurons_path = ["./cerebro/neurons"]
self.neuron_test = ("system check")
self.neuron_test_response = "All working properly."
self.command_args = ("arg1", "arg2")
self.test_command = en.Command(self.neuron_test, self.command_args)
self.total_neurons = 2
uc.get_all_neurons(self.neurons_path)
def test_get_all_neurons(self):
assert len(uc.NEURONS) == self.total_neurons
def test_neuron_execution(self):
assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response
def test_command_execution(self):
response = uc.process_command(self.test_command)
assert response == self.neuron_test_response
def test_command_execution_faliure(self):
error_test = ("asd asdasd ")
error_test_response = "Sorry, I could not process that."
error_command = en.Command(error_test, self.command_args)
response = uc.process_command(error_command)
assert response == error_test_response
|
<commit_before>import unittest
import cerebro.core.entities as en
import cerebro.core.usecases as uc
class TestUseCases(unittest.TestCase):
def setUp(self):
self.neurons_path = ["./cerebro/neurons"]
self.neuron_test = ("system check")
self.neuron_test_response = "All working properly."
self.command_args = ("arg1", "arg2")
self.test_command = en.Command(self.neuron_test, self.command_args)
self.error_test = ("asd asdasd ")
self.error_test_response = "Sorry, I could not process that."
self.error_command = en.Command(self.error_test, self.command_args)
self.total_neurons = 2
uc.get_all_neurons(self.neurons_path)
def test_get_all_neurons(self):
assert len(uc.NEURONS) == self.total_neurons
def test_neuron_execution(self):
assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response
def test_command_execution(self):
response = uc.process_command(self.test_command)
assert response == self.neuron_test_response
def test_command_execution_faliure(self):
response = uc.process_command(self.error_command)
assert response == self.error_test_response<commit_msg>Test cases changed and minor optimization<commit_after>
|
import unittest
import cerebro.core.entities as en
import cerebro.core.usecases as uc
class TestUseCases(unittest.TestCase):
def setUp(self):
self.neurons_path = ["./cerebro/neurons"]
self.neuron_test = ("system check")
self.neuron_test_response = "All working properly."
self.command_args = ("arg1", "arg2")
self.test_command = en.Command(self.neuron_test, self.command_args)
self.total_neurons = 2
uc.get_all_neurons(self.neurons_path)
def test_get_all_neurons(self):
assert len(uc.NEURONS) == self.total_neurons
def test_neuron_execution(self):
assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response
def test_command_execution(self):
response = uc.process_command(self.test_command)
assert response == self.neuron_test_response
def test_command_execution_faliure(self):
error_test = ("asd asdasd ")
error_test_response = "Sorry, I could not process that."
error_command = en.Command(error_test, self.command_args)
response = uc.process_command(error_command)
assert response == error_test_response
|
import unittest
import cerebro.core.entities as en
import cerebro.core.usecases as uc
class TestUseCases(unittest.TestCase):
def setUp(self):
self.neurons_path = ["./cerebro/neurons"]
self.neuron_test = ("system check")
self.neuron_test_response = "All working properly."
self.command_args = ("arg1", "arg2")
self.test_command = en.Command(self.neuron_test, self.command_args)
self.error_test = ("asd asdasd ")
self.error_test_response = "Sorry, I could not process that."
self.error_command = en.Command(self.error_test, self.command_args)
self.total_neurons = 2
uc.get_all_neurons(self.neurons_path)
def test_get_all_neurons(self):
assert len(uc.NEURONS) == self.total_neurons
def test_neuron_execution(self):
assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response
def test_command_execution(self):
response = uc.process_command(self.test_command)
assert response == self.neuron_test_response
def test_command_execution_faliure(self):
response = uc.process_command(self.error_command)
assert response == self.error_test_responseTest cases changed and minor optimizationimport unittest
import cerebro.core.entities as en
import cerebro.core.usecases as uc
class TestUseCases(unittest.TestCase):
def setUp(self):
self.neurons_path = ["./cerebro/neurons"]
self.neuron_test = ("system check")
self.neuron_test_response = "All working properly."
self.command_args = ("arg1", "arg2")
self.test_command = en.Command(self.neuron_test, self.command_args)
self.total_neurons = 2
uc.get_all_neurons(self.neurons_path)
def test_get_all_neurons(self):
assert len(uc.NEURONS) == self.total_neurons
def test_neuron_execution(self):
assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response
def test_command_execution(self):
response = uc.process_command(self.test_command)
assert response == self.neuron_test_response
def test_command_execution_faliure(self):
error_test = ("asd asdasd ")
error_test_response = "Sorry, I could not process that."
error_command = en.Command(error_test, self.command_args)
response = uc.process_command(error_command)
assert response == error_test_response
|
<commit_before>import unittest
import cerebro.core.entities as en
import cerebro.core.usecases as uc
class TestUseCases(unittest.TestCase):
def setUp(self):
self.neurons_path = ["./cerebro/neurons"]
self.neuron_test = ("system check")
self.neuron_test_response = "All working properly."
self.command_args = ("arg1", "arg2")
self.test_command = en.Command(self.neuron_test, self.command_args)
self.error_test = ("asd asdasd ")
self.error_test_response = "Sorry, I could not process that."
self.error_command = en.Command(self.error_test, self.command_args)
self.total_neurons = 2
uc.get_all_neurons(self.neurons_path)
def test_get_all_neurons(self):
assert len(uc.NEURONS) == self.total_neurons
def test_neuron_execution(self):
assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response
def test_command_execution(self):
response = uc.process_command(self.test_command)
assert response == self.neuron_test_response
def test_command_execution_faliure(self):
response = uc.process_command(self.error_command)
assert response == self.error_test_response<commit_msg>Test cases changed and minor optimization<commit_after>import unittest
import cerebro.core.entities as en
import cerebro.core.usecases as uc
class TestUseCases(unittest.TestCase):
def setUp(self):
self.neurons_path = ["./cerebro/neurons"]
self.neuron_test = ("system check")
self.neuron_test_response = "All working properly."
self.command_args = ("arg1", "arg2")
self.test_command = en.Command(self.neuron_test, self.command_args)
self.total_neurons = 2
uc.get_all_neurons(self.neurons_path)
def test_get_all_neurons(self):
assert len(uc.NEURONS) == self.total_neurons
def test_neuron_execution(self):
assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response
def test_command_execution(self):
response = uc.process_command(self.test_command)
assert response == self.neuron_test_response
def test_command_execution_faliure(self):
error_test = ("asd asdasd ")
error_test_response = "Sorry, I could not process that."
error_command = en.Command(error_test, self.command_args)
response = uc.process_command(error_command)
assert response == error_test_response
|
8aabbacd06e0b634b40a77270e6bf20257289f56
|
bin/combine_results.py
|
bin/combine_results.py
|
#!/usr/bin/env python
"""
Simple script to combine JUnit test results into a single XML file.
Useful for Jenkins.
TODO: Pretty indentation
"""
import os
from xml.etree import cElementTree as ET
def find_all(name, path):
result = []
for root, dirs, files in os.walk(path):
if name in files:
yield os.path.join(root, name)
def main(path, output):
testsuite = ET.Element("testsuite", name="all", package="all", tests="0")
for fname in find_all("results.xml", path):
tree = ET.parse(fname)
for element in tree.iter("testcase"):
testsuite.append(element)
result = ET.Element("testsuites", name="results")
result.append(testsuite)
ET.ElementTree(result).write(output, encoding="UTF-8")
if __name__ == "__main__":
main(".", "results.xml")
|
#!/usr/bin/env python
"""
Simple script to combine JUnit test results into a single XML file.
Useful for Jenkins.
TODO: Pretty indentation
"""
import os
from xml.etree import cElementTree as ET
def find_all(name, path):
result = []
for root, dirs, files in os.walk(path):
if name in files:
yield os.path.join(root, name)
def main(path, output):
testsuite = ET.Element("testsuite", name="all", package="all", tests="0")
for fname in find_all("results.xml", path):
tree = ET.parse(fname)
for element in tree.iter("testcase"):
testsuite.append(element)
result = ET.Element("testsuites", name="results")
result.append(testsuite)
ET.ElementTree(result).write(output, encoding="UTF-8")
if __name__ == "__main__":
main(".", "combined_results.xml")
|
Change output filename for combined results to avoid recursive accumulation
|
Change output filename for combined results to avoid recursive accumulation
|
Python
|
bsd-3-clause
|
stuarthodgson/cocotb,mkreider/cocotb2,stuarthodgson/cocotb,mkreider/cocotb2,mkreider/cocotb2,stuarthodgson/cocotb
|
#!/usr/bin/env python
"""
Simple script to combine JUnit test results into a single XML file.
Useful for Jenkins.
TODO: Pretty indentation
"""
import os
from xml.etree import cElementTree as ET
def find_all(name, path):
result = []
for root, dirs, files in os.walk(path):
if name in files:
yield os.path.join(root, name)
def main(path, output):
testsuite = ET.Element("testsuite", name="all", package="all", tests="0")
for fname in find_all("results.xml", path):
tree = ET.parse(fname)
for element in tree.iter("testcase"):
testsuite.append(element)
result = ET.Element("testsuites", name="results")
result.append(testsuite)
ET.ElementTree(result).write(output, encoding="UTF-8")
if __name__ == "__main__":
main(".", "results.xml")
Change output filename for combined results to avoid recursive accumulation
|
#!/usr/bin/env python
"""
Simple script to combine JUnit test results into a single XML file.
Useful for Jenkins.
TODO: Pretty indentation
"""
import os
from xml.etree import cElementTree as ET
def find_all(name, path):
result = []
for root, dirs, files in os.walk(path):
if name in files:
yield os.path.join(root, name)
def main(path, output):
testsuite = ET.Element("testsuite", name="all", package="all", tests="0")
for fname in find_all("results.xml", path):
tree = ET.parse(fname)
for element in tree.iter("testcase"):
testsuite.append(element)
result = ET.Element("testsuites", name="results")
result.append(testsuite)
ET.ElementTree(result).write(output, encoding="UTF-8")
if __name__ == "__main__":
main(".", "combined_results.xml")
|
<commit_before>#!/usr/bin/env python
"""
Simple script to combine JUnit test results into a single XML file.
Useful for Jenkins.
TODO: Pretty indentation
"""
import os
from xml.etree import cElementTree as ET
def find_all(name, path):
result = []
for root, dirs, files in os.walk(path):
if name in files:
yield os.path.join(root, name)
def main(path, output):
testsuite = ET.Element("testsuite", name="all", package="all", tests="0")
for fname in find_all("results.xml", path):
tree = ET.parse(fname)
for element in tree.iter("testcase"):
testsuite.append(element)
result = ET.Element("testsuites", name="results")
result.append(testsuite)
ET.ElementTree(result).write(output, encoding="UTF-8")
if __name__ == "__main__":
main(".", "results.xml")
<commit_msg>Change output filename for combined results to avoid recursive accumulation<commit_after>
|
#!/usr/bin/env python
"""
Simple script to combine JUnit test results into a single XML file.
Useful for Jenkins.
TODO: Pretty indentation
"""
import os
from xml.etree import cElementTree as ET
def find_all(name, path):
result = []
for root, dirs, files in os.walk(path):
if name in files:
yield os.path.join(root, name)
def main(path, output):
testsuite = ET.Element("testsuite", name="all", package="all", tests="0")
for fname in find_all("results.xml", path):
tree = ET.parse(fname)
for element in tree.iter("testcase"):
testsuite.append(element)
result = ET.Element("testsuites", name="results")
result.append(testsuite)
ET.ElementTree(result).write(output, encoding="UTF-8")
if __name__ == "__main__":
main(".", "combined_results.xml")
|
#!/usr/bin/env python
"""
Simple script to combine JUnit test results into a single XML file.
Useful for Jenkins.
TODO: Pretty indentation
"""
import os
from xml.etree import cElementTree as ET
def find_all(name, path):
result = []
for root, dirs, files in os.walk(path):
if name in files:
yield os.path.join(root, name)
def main(path, output):
testsuite = ET.Element("testsuite", name="all", package="all", tests="0")
for fname in find_all("results.xml", path):
tree = ET.parse(fname)
for element in tree.iter("testcase"):
testsuite.append(element)
result = ET.Element("testsuites", name="results")
result.append(testsuite)
ET.ElementTree(result).write(output, encoding="UTF-8")
if __name__ == "__main__":
main(".", "results.xml")
Change output filename for combined results to avoid recursive accumulation#!/usr/bin/env python
"""
Simple script to combine JUnit test results into a single XML file.
Useful for Jenkins.
TODO: Pretty indentation
"""
import os
from xml.etree import cElementTree as ET
def find_all(name, path):
result = []
for root, dirs, files in os.walk(path):
if name in files:
yield os.path.join(root, name)
def main(path, output):
testsuite = ET.Element("testsuite", name="all", package="all", tests="0")
for fname in find_all("results.xml", path):
tree = ET.parse(fname)
for element in tree.iter("testcase"):
testsuite.append(element)
result = ET.Element("testsuites", name="results")
result.append(testsuite)
ET.ElementTree(result).write(output, encoding="UTF-8")
if __name__ == "__main__":
main(".", "combined_results.xml")
|
<commit_before>#!/usr/bin/env python
"""
Simple script to combine JUnit test results into a single XML file.
Useful for Jenkins.
TODO: Pretty indentation
"""
import os
from xml.etree import cElementTree as ET
def find_all(name, path):
result = []
for root, dirs, files in os.walk(path):
if name in files:
yield os.path.join(root, name)
def main(path, output):
testsuite = ET.Element("testsuite", name="all", package="all", tests="0")
for fname in find_all("results.xml", path):
tree = ET.parse(fname)
for element in tree.iter("testcase"):
testsuite.append(element)
result = ET.Element("testsuites", name="results")
result.append(testsuite)
ET.ElementTree(result).write(output, encoding="UTF-8")
if __name__ == "__main__":
main(".", "results.xml")
<commit_msg>Change output filename for combined results to avoid recursive accumulation<commit_after>#!/usr/bin/env python
"""
Simple script to combine JUnit test results into a single XML file.
Useful for Jenkins.
TODO: Pretty indentation
"""
import os
from xml.etree import cElementTree as ET
def find_all(name, path):
result = []
for root, dirs, files in os.walk(path):
if name in files:
yield os.path.join(root, name)
def main(path, output):
testsuite = ET.Element("testsuite", name="all", package="all", tests="0")
for fname in find_all("results.xml", path):
tree = ET.parse(fname)
for element in tree.iter("testcase"):
testsuite.append(element)
result = ET.Element("testsuites", name="results")
result.append(testsuite)
ET.ElementTree(result).write(output, encoding="UTF-8")
if __name__ == "__main__":
main(".", "combined_results.xml")
|
a0775510c81494777ab1adf7c822c4ca9a0227b2
|
tensorbayes/distributions.py
|
tensorbayes/distributions.py
|
""" Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli_with_logits(x, logits):
return -tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits, x), 1)
def log_normal(x, mu, var):
return -0.5 * tf.reduce_sum(tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, 1)
|
""" Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli_with_logits(x, logits, eps=0.0):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit')
return -tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits, x), 1)
def log_normal(x, mu, var, eps=0.0):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, 1)
|
Add eps factor for numerical stability
|
Add eps factor for numerical stability
|
Python
|
mit
|
RuiShu/tensorbayes
|
""" Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli_with_logits(x, logits):
return -tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits, x), 1)
def log_normal(x, mu, var):
return -0.5 * tf.reduce_sum(tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, 1)
Add eps factor for numerical stability
|
""" Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli_with_logits(x, logits, eps=0.0):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit')
return -tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits, x), 1)
def log_normal(x, mu, var, eps=0.0):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, 1)
|
<commit_before>""" Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli_with_logits(x, logits):
return -tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits, x), 1)
def log_normal(x, mu, var):
return -0.5 * tf.reduce_sum(tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, 1)
<commit_msg>Add eps factor for numerical stability<commit_after>
|
""" Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli_with_logits(x, logits, eps=0.0):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit')
return -tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits, x), 1)
def log_normal(x, mu, var, eps=0.0):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, 1)
|
""" Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli_with_logits(x, logits):
return -tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits, x), 1)
def log_normal(x, mu, var):
return -0.5 * tf.reduce_sum(tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, 1)
Add eps factor for numerical stability""" Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli_with_logits(x, logits, eps=0.0):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit')
return -tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits, x), 1)
def log_normal(x, mu, var, eps=0.0):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, 1)
|
<commit_before>""" Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli_with_logits(x, logits):
return -tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits, x), 1)
def log_normal(x, mu, var):
return -0.5 * tf.reduce_sum(tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, 1)
<commit_msg>Add eps factor for numerical stability<commit_after>""" Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli_with_logits(x, logits, eps=0.0):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit')
return -tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits, x), 1)
def log_normal(x, mu, var, eps=0.0):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, 1)
|
477822717759df88624644df8b4e64aa81463c42
|
kippt_reader/settings/production.py
|
kippt_reader/settings/production.py
|
from os import environ
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = environ.get('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_REDIRECT_EXEMPT = [
'^(?!hub/).*'
]
|
from os import environ
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = environ.get('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
Remove exempt for hub url
|
Remove exempt for hub url
|
Python
|
mit
|
jpadilla/feedleap,jpadilla/feedleap
|
from os import environ
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = environ.get('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_REDIRECT_EXEMPT = [
'^(?!hub/).*'
]
Remove exempt for hub url
|
from os import environ
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = environ.get('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
<commit_before>from os import environ
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = environ.get('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_REDIRECT_EXEMPT = [
'^(?!hub/).*'
]
<commit_msg>Remove exempt for hub url<commit_after>
|
from os import environ
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = environ.get('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
from os import environ
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = environ.get('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_REDIRECT_EXEMPT = [
'^(?!hub/).*'
]
Remove exempt for hub urlfrom os import environ
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = environ.get('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
<commit_before>from os import environ
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = environ.get('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_REDIRECT_EXEMPT = [
'^(?!hub/).*'
]
<commit_msg>Remove exempt for hub url<commit_after>from os import environ
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = environ.get('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
f603d382ab8b93677713d6c9c26f9b6a2616ba13
|
src/utils/indices.py
|
src/utils/indices.py
|
import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
SMARTAPI_MAPPING = json.load(file)
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
if not Index(APIDoc.Index.name).exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=SMARTAPI_MAPPING
)
def reset():
index = Index(APIDoc.Index.name)
if index.exists():
index.delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
|
import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
SMARTAPI_MAPPING = json.load(file)
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=SMARTAPI_MAPPING
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
|
Add a few methods used in admin.py
|
Add a few methods used in admin.py
|
Python
|
mit
|
Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI
|
import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
SMARTAPI_MAPPING = json.load(file)
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
if not Index(APIDoc.Index.name).exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=SMARTAPI_MAPPING
)
def reset():
index = Index(APIDoc.Index.name)
if index.exists():
index.delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
Add a few methods used in admin.py
|
import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
SMARTAPI_MAPPING = json.load(file)
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=SMARTAPI_MAPPING
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
|
<commit_before>import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
SMARTAPI_MAPPING = json.load(file)
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
if not Index(APIDoc.Index.name).exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=SMARTAPI_MAPPING
)
def reset():
index = Index(APIDoc.Index.name)
if index.exists():
index.delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
<commit_msg>Add a few methods used in admin.py<commit_after>
|
import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
SMARTAPI_MAPPING = json.load(file)
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=SMARTAPI_MAPPING
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
|
import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
SMARTAPI_MAPPING = json.load(file)
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
if not Index(APIDoc.Index.name).exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=SMARTAPI_MAPPING
)
def reset():
index = Index(APIDoc.Index.name)
if index.exists():
index.delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
Add a few methods used in admin.pyimport json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
SMARTAPI_MAPPING = json.load(file)
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=SMARTAPI_MAPPING
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
|
<commit_before>import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
SMARTAPI_MAPPING = json.load(file)
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
if not Index(APIDoc.Index.name).exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=SMARTAPI_MAPPING
)
def reset():
index = Index(APIDoc.Index.name)
if index.exists():
index.delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
<commit_msg>Add a few methods used in admin.py<commit_after>import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
SMARTAPI_MAPPING = json.load(file)
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=SMARTAPI_MAPPING
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
|
2cee4fbfee7d922dfb83d154c2fa023cb647c4b3
|
sslify/middleware.py
|
sslify/middleware.py
|
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True.
"""
def process_request(self, request):
# If the user has explicitly disabled SSLify, do nothing.
if getattr(settings, 'SSLIFY_DISABLE', False):
return None
# If we get here, proceed as normal.
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
|
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True.
"""
def process_request(self, request):
# If the user has explicitly disabled SSLify, do nothing.
if getattr(settings, 'SSLIFY_DISABLE', settings.DEBUG):
return None
# If we get here, proceed as normal.
if not request.is_secure():
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
|
Allow using sslify even in DEBUG mode.
|
Allow using sslify even in DEBUG mode.
|
Python
|
unlicense
|
rdegges/django-sslify
|
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True.
"""
def process_request(self, request):
# If the user has explicitly disabled SSLify, do nothing.
if getattr(settings, 'SSLIFY_DISABLE', False):
return None
# If we get here, proceed as normal.
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
Allow using sslify even in DEBUG mode.
|
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True.
"""
def process_request(self, request):
# If the user has explicitly disabled SSLify, do nothing.
if getattr(settings, 'SSLIFY_DISABLE', settings.DEBUG):
return None
# If we get here, proceed as normal.
if not request.is_secure():
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
|
<commit_before>from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True.
"""
def process_request(self, request):
# If the user has explicitly disabled SSLify, do nothing.
if getattr(settings, 'SSLIFY_DISABLE', False):
return None
# If we get here, proceed as normal.
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
<commit_msg>Allow using sslify even in DEBUG mode.<commit_after>
|
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True.
"""
def process_request(self, request):
# If the user has explicitly disabled SSLify, do nothing.
if getattr(settings, 'SSLIFY_DISABLE', settings.DEBUG):
return None
# If we get here, proceed as normal.
if not request.is_secure():
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
|
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True.
"""
def process_request(self, request):
# If the user has explicitly disabled SSLify, do nothing.
if getattr(settings, 'SSLIFY_DISABLE', False):
return None
# If we get here, proceed as normal.
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
Allow using sslify even in DEBUG mode.from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True.
"""
def process_request(self, request):
# If the user has explicitly disabled SSLify, do nothing.
if getattr(settings, 'SSLIFY_DISABLE', settings.DEBUG):
return None
# If we get here, proceed as normal.
if not request.is_secure():
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
|
<commit_before>from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True.
"""
def process_request(self, request):
# If the user has explicitly disabled SSLify, do nothing.
if getattr(settings, 'SSLIFY_DISABLE', False):
return None
# If we get here, proceed as normal.
if not any((settings.DEBUG, request.is_secure())):
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
<commit_msg>Allow using sslify even in DEBUG mode.<commit_after>from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SSLifyMiddleware(object):
"""Force all requests to use HTTPs. If we get an HTTP request, we'll just
force a redirect to HTTPs.
.. note::
This will only take effect if ``settings.DEBUG`` is False.
.. note::
You can also disable this middleware when testing by setting
``settings.SSLIFY_DISABLE`` to True.
"""
def process_request(self, request):
# If the user has explicitly disabled SSLify, do nothing.
if getattr(settings, 'SSLIFY_DISABLE', settings.DEBUG):
return None
# If we get here, proceed as normal.
if not request.is_secure():
url = request.build_absolute_uri(request.get_full_path())
secure_url = url.replace('http://', 'https://')
return HttpResponsePermanentRedirect(secure_url)
|
e28c9da712574618eb28b6ff82631462fee67c16
|
changes/utils/times.py
|
changes/utils/times.py
|
def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
if value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
|
def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
abs_value = abs(value)
if abs_value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif abs_value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
|
Fix for negative values in duration
|
Fix for negative values in duration
|
Python
|
apache-2.0
|
bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes
|
def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
if value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
Fix for negative values in duration
|
def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
abs_value = abs(value)
if abs_value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif abs_value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
|
<commit_before>def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
if value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
<commit_msg>Fix for negative values in duration<commit_after>
|
def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
abs_value = abs(value)
if abs_value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif abs_value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
|
def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
if value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
Fix for negative values in durationdef duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
abs_value = abs(value)
if abs_value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif abs_value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
|
<commit_before>def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
if value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
<commit_msg>Fix for negative values in duration<commit_after>def duration(value):
ONE_SECOND = 1000
ONE_MINUTE = ONE_SECOND * 60
if not value:
return '0 s'
abs_value = abs(value)
if abs_value < 3 * ONE_SECOND:
return '%d ms' % (value,)
elif abs_value < 5 * ONE_MINUTE:
return '%d s' % (value / ONE_SECOND,)
else:
return '%d m' % (value / ONE_MINUTE,)
|
d3d88d628c61a87b1a36f9a25bdea807dd2d12a2
|
saleor/dashboard/settings/forms.py
|
saleor/dashboard/settings/forms.py
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from ...setting.models import Setting
class SettingForm(forms.ModelForm):
class Meta:
model = Setting
exclude = []
def clean_name(self):
name = self.cleaned_data['name']
if len(name.split()) > 1:
raise forms.ValidationError(_("Name cannot contains whitespaces"))
return name
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from ...setting.models import Setting
class SettingForm(forms.ModelForm):
class Meta:
model = Setting
exclude = []
def clean_name(self):
name = self.cleaned_data['name']
if len(name.split()) > 1:
raise forms.ValidationError(_("Name cannot contains whitespaces"))
return name
|
Add missing newline between imports
|
Add missing newline between imports
|
Python
|
bsd-3-clause
|
tfroehlich82/saleor,KenMutemi/saleor,itbabu/saleor,maferelo/saleor,KenMutemi/saleor,jreigel/saleor,tfroehlich82/saleor,HyperManTT/ECommerceSaleor,mociepka/saleor,KenMutemi/saleor,itbabu/saleor,mociepka/saleor,car3oon/saleor,HyperManTT/ECommerceSaleor,car3oon/saleor,car3oon/saleor,mociepka/saleor,UITools/saleor,UITools/saleor,itbabu/saleor,maferelo/saleor,UITools/saleor,HyperManTT/ECommerceSaleor,UITools/saleor,jreigel/saleor,tfroehlich82/saleor,maferelo/saleor,jreigel/saleor,UITools/saleor
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from ...setting.models import Setting
class SettingForm(forms.ModelForm):
class Meta:
model = Setting
exclude = []
def clean_name(self):
name = self.cleaned_data['name']
if len(name.split()) > 1:
raise forms.ValidationError(_("Name cannot contains whitespaces"))
return name
Add missing newline between imports
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from ...setting.models import Setting
class SettingForm(forms.ModelForm):
class Meta:
model = Setting
exclude = []
def clean_name(self):
name = self.cleaned_data['name']
if len(name.split()) > 1:
raise forms.ValidationError(_("Name cannot contains whitespaces"))
return name
|
<commit_before>from django import forms
from django.utils.translation import ugettext_lazy as _
from ...setting.models import Setting
class SettingForm(forms.ModelForm):
class Meta:
model = Setting
exclude = []
def clean_name(self):
name = self.cleaned_data['name']
if len(name.split()) > 1:
raise forms.ValidationError(_("Name cannot contains whitespaces"))
return name
<commit_msg>Add missing newline between imports<commit_after>
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from ...setting.models import Setting
class SettingForm(forms.ModelForm):
class Meta:
model = Setting
exclude = []
def clean_name(self):
name = self.cleaned_data['name']
if len(name.split()) > 1:
raise forms.ValidationError(_("Name cannot contains whitespaces"))
return name
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from ...setting.models import Setting
class SettingForm(forms.ModelForm):
class Meta:
model = Setting
exclude = []
def clean_name(self):
name = self.cleaned_data['name']
if len(name.split()) > 1:
raise forms.ValidationError(_("Name cannot contains whitespaces"))
return name
Add missing newline between importsfrom django import forms
from django.utils.translation import ugettext_lazy as _
from ...setting.models import Setting
class SettingForm(forms.ModelForm):
class Meta:
model = Setting
exclude = []
def clean_name(self):
name = self.cleaned_data['name']
if len(name.split()) > 1:
raise forms.ValidationError(_("Name cannot contains whitespaces"))
return name
|
<commit_before>from django import forms
from django.utils.translation import ugettext_lazy as _
from ...setting.models import Setting
class SettingForm(forms.ModelForm):
class Meta:
model = Setting
exclude = []
def clean_name(self):
name = self.cleaned_data['name']
if len(name.split()) > 1:
raise forms.ValidationError(_("Name cannot contains whitespaces"))
return name
<commit_msg>Add missing newline between imports<commit_after>from django import forms
from django.utils.translation import ugettext_lazy as _
from ...setting.models import Setting
class SettingForm(forms.ModelForm):
class Meta:
model = Setting
exclude = []
def clean_name(self):
name = self.cleaned_data['name']
if len(name.split()) > 1:
raise forms.ValidationError(_("Name cannot contains whitespaces"))
return name
|
876d414f85297d45dca4f2c9158f9257dfd6cf5f
|
wagtailgeowidget/edit_handlers.py
|
wagtailgeowidget/edit_handlers.py
|
import warnings
import wagtail
if wagtail.VERSION < (2, 0):
warnings.warn("GeoPanel only works in Wagtail 2+", Warning) # NOQA
warnings.warn("Please import GeoPanel from wagtailgeowidget.legacy_edit_handlers instead", Warning) # NOQA
warnings.warn("All support for Wagtail 1.13 and below will be droppen in April 2018", Warning) # NOQA
from wagtail.admin.edit_handlers import FieldPanel
from wagtailgeowidget.widgets import (
GeoField,
)
from wagtailgeowidget.app_settings import (
GEO_WIDGET_ZOOM
)
class GeoPanel(FieldPanel):
def __init__(self, *args, **kwargs):
self.classname = kwargs.pop('classname', "")
self.address_field = kwargs.pop('address_field', "")
self.hide_latlng = kwargs.pop('hide_latlng', False)
self.zoom = kwargs.pop('zoom', GEO_WIDGET_ZOOM)
super().__init__(*args, **kwargs)
def widget_overrides(self):
field = self.model._meta.get_field(self.field_name)
srid = getattr(field, 'srid', 4326)
return {
self.field_name: GeoField(
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
srid=srid,
id_prefix='id_',
)
}
def clone(self):
return self.__class__(
field_name=self.field_name,
classname=self.classname,
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
)
|
from wagtail.admin.edit_handlers import FieldPanel
from wagtailgeowidget.widgets import (
GeoField,
)
from wagtailgeowidget.app_settings import (
GEO_WIDGET_ZOOM
)
class GeoPanel(FieldPanel):
def __init__(self, *args, **kwargs):
self.classname = kwargs.pop('classname', "")
self.address_field = kwargs.pop('address_field', "")
self.hide_latlng = kwargs.pop('hide_latlng', False)
self.zoom = kwargs.pop('zoom', GEO_WIDGET_ZOOM)
super().__init__(*args, **kwargs)
def widget_overrides(self):
field = self.model._meta.get_field(self.field_name)
srid = getattr(field, 'srid', 4326)
return {
self.field_name: GeoField(
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
srid=srid,
id_prefix='id_',
used_in='GeoPanel',
)
}
def clone(self):
return self.__class__(
field_name=self.field_name,
classname=self.classname,
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
)
|
Remove no-longer needed wagtail 2.0 warning
|
Remove no-longer needed wagtail 2.0 warning
|
Python
|
mit
|
Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget
|
import warnings
import wagtail
if wagtail.VERSION < (2, 0):
warnings.warn("GeoPanel only works in Wagtail 2+", Warning) # NOQA
warnings.warn("Please import GeoPanel from wagtailgeowidget.legacy_edit_handlers instead", Warning) # NOQA
warnings.warn("All support for Wagtail 1.13 and below will be droppen in April 2018", Warning) # NOQA
from wagtail.admin.edit_handlers import FieldPanel
from wagtailgeowidget.widgets import (
GeoField,
)
from wagtailgeowidget.app_settings import (
GEO_WIDGET_ZOOM
)
class GeoPanel(FieldPanel):
def __init__(self, *args, **kwargs):
self.classname = kwargs.pop('classname', "")
self.address_field = kwargs.pop('address_field', "")
self.hide_latlng = kwargs.pop('hide_latlng', False)
self.zoom = kwargs.pop('zoom', GEO_WIDGET_ZOOM)
super().__init__(*args, **kwargs)
def widget_overrides(self):
field = self.model._meta.get_field(self.field_name)
srid = getattr(field, 'srid', 4326)
return {
self.field_name: GeoField(
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
srid=srid,
id_prefix='id_',
)
}
def clone(self):
return self.__class__(
field_name=self.field_name,
classname=self.classname,
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
)
Remove no-longer needed wagtail 2.0 warning
|
from wagtail.admin.edit_handlers import FieldPanel
from wagtailgeowidget.widgets import (
GeoField,
)
from wagtailgeowidget.app_settings import (
GEO_WIDGET_ZOOM
)
class GeoPanel(FieldPanel):
def __init__(self, *args, **kwargs):
self.classname = kwargs.pop('classname', "")
self.address_field = kwargs.pop('address_field', "")
self.hide_latlng = kwargs.pop('hide_latlng', False)
self.zoom = kwargs.pop('zoom', GEO_WIDGET_ZOOM)
super().__init__(*args, **kwargs)
def widget_overrides(self):
field = self.model._meta.get_field(self.field_name)
srid = getattr(field, 'srid', 4326)
return {
self.field_name: GeoField(
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
srid=srid,
id_prefix='id_',
used_in='GeoPanel',
)
}
def clone(self):
return self.__class__(
field_name=self.field_name,
classname=self.classname,
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
)
|
<commit_before>import warnings
import wagtail
if wagtail.VERSION < (2, 0):
warnings.warn("GeoPanel only works in Wagtail 2+", Warning) # NOQA
warnings.warn("Please import GeoPanel from wagtailgeowidget.legacy_edit_handlers instead", Warning) # NOQA
warnings.warn("All support for Wagtail 1.13 and below will be droppen in April 2018", Warning) # NOQA
from wagtail.admin.edit_handlers import FieldPanel
from wagtailgeowidget.widgets import (
GeoField,
)
from wagtailgeowidget.app_settings import (
GEO_WIDGET_ZOOM
)
class GeoPanel(FieldPanel):
def __init__(self, *args, **kwargs):
self.classname = kwargs.pop('classname', "")
self.address_field = kwargs.pop('address_field', "")
self.hide_latlng = kwargs.pop('hide_latlng', False)
self.zoom = kwargs.pop('zoom', GEO_WIDGET_ZOOM)
super().__init__(*args, **kwargs)
def widget_overrides(self):
field = self.model._meta.get_field(self.field_name)
srid = getattr(field, 'srid', 4326)
return {
self.field_name: GeoField(
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
srid=srid,
id_prefix='id_',
)
}
def clone(self):
return self.__class__(
field_name=self.field_name,
classname=self.classname,
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
)
<commit_msg>Remove no-longer needed wagtail 2.0 warning<commit_after>
|
from wagtail.admin.edit_handlers import FieldPanel
from wagtailgeowidget.widgets import (
GeoField,
)
from wagtailgeowidget.app_settings import (
GEO_WIDGET_ZOOM
)
class GeoPanel(FieldPanel):
def __init__(self, *args, **kwargs):
self.classname = kwargs.pop('classname', "")
self.address_field = kwargs.pop('address_field', "")
self.hide_latlng = kwargs.pop('hide_latlng', False)
self.zoom = kwargs.pop('zoom', GEO_WIDGET_ZOOM)
super().__init__(*args, **kwargs)
def widget_overrides(self):
field = self.model._meta.get_field(self.field_name)
srid = getattr(field, 'srid', 4326)
return {
self.field_name: GeoField(
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
srid=srid,
id_prefix='id_',
used_in='GeoPanel',
)
}
def clone(self):
return self.__class__(
field_name=self.field_name,
classname=self.classname,
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
)
|
import warnings
import wagtail
if wagtail.VERSION < (2, 0):
warnings.warn("GeoPanel only works in Wagtail 2+", Warning) # NOQA
warnings.warn("Please import GeoPanel from wagtailgeowidget.legacy_edit_handlers instead", Warning) # NOQA
warnings.warn("All support for Wagtail 1.13 and below will be droppen in April 2018", Warning) # NOQA
from wagtail.admin.edit_handlers import FieldPanel
from wagtailgeowidget.widgets import (
GeoField,
)
from wagtailgeowidget.app_settings import (
GEO_WIDGET_ZOOM
)
class GeoPanel(FieldPanel):
def __init__(self, *args, **kwargs):
self.classname = kwargs.pop('classname', "")
self.address_field = kwargs.pop('address_field', "")
self.hide_latlng = kwargs.pop('hide_latlng', False)
self.zoom = kwargs.pop('zoom', GEO_WIDGET_ZOOM)
super().__init__(*args, **kwargs)
def widget_overrides(self):
field = self.model._meta.get_field(self.field_name)
srid = getattr(field, 'srid', 4326)
return {
self.field_name: GeoField(
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
srid=srid,
id_prefix='id_',
)
}
def clone(self):
return self.__class__(
field_name=self.field_name,
classname=self.classname,
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
)
Remove no-longer needed wagtail 2.0 warningfrom wagtail.admin.edit_handlers import FieldPanel
from wagtailgeowidget.widgets import (
GeoField,
)
from wagtailgeowidget.app_settings import (
GEO_WIDGET_ZOOM
)
class GeoPanel(FieldPanel):
def __init__(self, *args, **kwargs):
self.classname = kwargs.pop('classname', "")
self.address_field = kwargs.pop('address_field', "")
self.hide_latlng = kwargs.pop('hide_latlng', False)
self.zoom = kwargs.pop('zoom', GEO_WIDGET_ZOOM)
super().__init__(*args, **kwargs)
def widget_overrides(self):
field = self.model._meta.get_field(self.field_name)
srid = getattr(field, 'srid', 4326)
return {
self.field_name: GeoField(
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
srid=srid,
id_prefix='id_',
used_in='GeoPanel',
)
}
def clone(self):
return self.__class__(
field_name=self.field_name,
classname=self.classname,
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
)
|
<commit_before>import warnings
import wagtail
if wagtail.VERSION < (2, 0):
warnings.warn("GeoPanel only works in Wagtail 2+", Warning) # NOQA
warnings.warn("Please import GeoPanel from wagtailgeowidget.legacy_edit_handlers instead", Warning) # NOQA
warnings.warn("All support for Wagtail 1.13 and below will be droppen in April 2018", Warning) # NOQA
from wagtail.admin.edit_handlers import FieldPanel
from wagtailgeowidget.widgets import (
GeoField,
)
from wagtailgeowidget.app_settings import (
GEO_WIDGET_ZOOM
)
class GeoPanel(FieldPanel):
def __init__(self, *args, **kwargs):
self.classname = kwargs.pop('classname', "")
self.address_field = kwargs.pop('address_field', "")
self.hide_latlng = kwargs.pop('hide_latlng', False)
self.zoom = kwargs.pop('zoom', GEO_WIDGET_ZOOM)
super().__init__(*args, **kwargs)
def widget_overrides(self):
field = self.model._meta.get_field(self.field_name)
srid = getattr(field, 'srid', 4326)
return {
self.field_name: GeoField(
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
srid=srid,
id_prefix='id_',
)
}
def clone(self):
return self.__class__(
field_name=self.field_name,
classname=self.classname,
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
)
<commit_msg>Remove no-longer needed wagtail 2.0 warning<commit_after>from wagtail.admin.edit_handlers import FieldPanel
from wagtailgeowidget.widgets import (
GeoField,
)
from wagtailgeowidget.app_settings import (
GEO_WIDGET_ZOOM
)
class GeoPanel(FieldPanel):
def __init__(self, *args, **kwargs):
self.classname = kwargs.pop('classname', "")
self.address_field = kwargs.pop('address_field', "")
self.hide_latlng = kwargs.pop('hide_latlng', False)
self.zoom = kwargs.pop('zoom', GEO_WIDGET_ZOOM)
super().__init__(*args, **kwargs)
def widget_overrides(self):
field = self.model._meta.get_field(self.field_name)
srid = getattr(field, 'srid', 4326)
return {
self.field_name: GeoField(
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
srid=srid,
id_prefix='id_',
used_in='GeoPanel',
)
}
def clone(self):
return self.__class__(
field_name=self.field_name,
classname=self.classname,
address_field=self.address_field,
hide_latlng=self.hide_latlng,
zoom=self.zoom,
)
|
d466785a4faaf1c01519935317ededf336f9dd14
|
contentstore/management/commands/tests/test_sync_schedules.py
|
contentstore/management/commands/tests/test_sync_schedules.py
|
from six import BytesIO
from django.core.management import call_command
from django.test import TestCase
from mock import patch
from contentstore.models import Schedule
from seed_stage_based_messaging import test_utils as utils
class SyncSchedulesTests(TestCase):
@patch('contentstore.management.commands.sync_schedules.sync_schedule')
def test_schedule_sync_called(self, sync_task):
"""
The sync schedules management command should call the sync schedule
task for every schedule.
"""
utils.disable_signals()
schedule = Schedule.objects.create()
utils.enable_signals()
out = BytesIO()
call_command('sync_schedules', stdout=out)
sync_task.assert_called_once_with(str(schedule.id))
self.assertIn(str(schedule.id), out.getvalue())
self.assertIn('Synchronised 1 schedule/s', out.getvalue())
|
from six import StringIO
from django.core.management import call_command
from django.test import TestCase
from mock import patch
from contentstore.models import Schedule
from seed_stage_based_messaging import test_utils as utils
class SyncSchedulesTests(TestCase):
@patch('contentstore.management.commands.sync_schedules.sync_schedule')
def test_schedule_sync_called(self, sync_task):
"""
The sync schedules management command should call the sync schedule
task for every schedule.
"""
utils.disable_signals()
schedule = Schedule.objects.create()
utils.enable_signals()
out = StringIO()
call_command('sync_schedules', stdout=out)
sync_task.assert_called_once_with(str(schedule.id))
self.assertIn(str(schedule.id), out.getvalue())
self.assertIn('Synchronised 1 schedule/s', out.getvalue())
|
Use StringIO instead of BytesIO
|
Use StringIO instead of BytesIO
|
Python
|
bsd-3-clause
|
praekelt/seed-staged-based-messaging,praekelt/seed-stage-based-messaging,praekelt/seed-stage-based-messaging
|
from six import BytesIO
from django.core.management import call_command
from django.test import TestCase
from mock import patch
from contentstore.models import Schedule
from seed_stage_based_messaging import test_utils as utils
class SyncSchedulesTests(TestCase):
@patch('contentstore.management.commands.sync_schedules.sync_schedule')
def test_schedule_sync_called(self, sync_task):
"""
The sync schedules management command should call the sync schedule
task for every schedule.
"""
utils.disable_signals()
schedule = Schedule.objects.create()
utils.enable_signals()
out = BytesIO()
call_command('sync_schedules', stdout=out)
sync_task.assert_called_once_with(str(schedule.id))
self.assertIn(str(schedule.id), out.getvalue())
self.assertIn('Synchronised 1 schedule/s', out.getvalue())
Use StringIO instead of BytesIO
|
from six import StringIO
from django.core.management import call_command
from django.test import TestCase
from mock import patch
from contentstore.models import Schedule
from seed_stage_based_messaging import test_utils as utils
class SyncSchedulesTests(TestCase):
@patch('contentstore.management.commands.sync_schedules.sync_schedule')
def test_schedule_sync_called(self, sync_task):
"""
The sync schedules management command should call the sync schedule
task for every schedule.
"""
utils.disable_signals()
schedule = Schedule.objects.create()
utils.enable_signals()
out = StringIO()
call_command('sync_schedules', stdout=out)
sync_task.assert_called_once_with(str(schedule.id))
self.assertIn(str(schedule.id), out.getvalue())
self.assertIn('Synchronised 1 schedule/s', out.getvalue())
|
<commit_before>from six import BytesIO
from django.core.management import call_command
from django.test import TestCase
from mock import patch
from contentstore.models import Schedule
from seed_stage_based_messaging import test_utils as utils
class SyncSchedulesTests(TestCase):
@patch('contentstore.management.commands.sync_schedules.sync_schedule')
def test_schedule_sync_called(self, sync_task):
"""
The sync schedules management command should call the sync schedule
task for every schedule.
"""
utils.disable_signals()
schedule = Schedule.objects.create()
utils.enable_signals()
out = BytesIO()
call_command('sync_schedules', stdout=out)
sync_task.assert_called_once_with(str(schedule.id))
self.assertIn(str(schedule.id), out.getvalue())
self.assertIn('Synchronised 1 schedule/s', out.getvalue())
<commit_msg>Use StringIO instead of BytesIO<commit_after>
|
from six import StringIO
from django.core.management import call_command
from django.test import TestCase
from mock import patch
from contentstore.models import Schedule
from seed_stage_based_messaging import test_utils as utils
class SyncSchedulesTests(TestCase):
@patch('contentstore.management.commands.sync_schedules.sync_schedule')
def test_schedule_sync_called(self, sync_task):
"""
The sync schedules management command should call the sync schedule
task for every schedule.
"""
utils.disable_signals()
schedule = Schedule.objects.create()
utils.enable_signals()
out = StringIO()
call_command('sync_schedules', stdout=out)
sync_task.assert_called_once_with(str(schedule.id))
self.assertIn(str(schedule.id), out.getvalue())
self.assertIn('Synchronised 1 schedule/s', out.getvalue())
|
from six import BytesIO
from django.core.management import call_command
from django.test import TestCase
from mock import patch
from contentstore.models import Schedule
from seed_stage_based_messaging import test_utils as utils
class SyncSchedulesTests(TestCase):
@patch('contentstore.management.commands.sync_schedules.sync_schedule')
def test_schedule_sync_called(self, sync_task):
"""
The sync schedules management command should call the sync schedule
task for every schedule.
"""
utils.disable_signals()
schedule = Schedule.objects.create()
utils.enable_signals()
out = BytesIO()
call_command('sync_schedules', stdout=out)
sync_task.assert_called_once_with(str(schedule.id))
self.assertIn(str(schedule.id), out.getvalue())
self.assertIn('Synchronised 1 schedule/s', out.getvalue())
Use StringIO instead of BytesIOfrom six import StringIO
from django.core.management import call_command
from django.test import TestCase
from mock import patch
from contentstore.models import Schedule
from seed_stage_based_messaging import test_utils as utils
class SyncSchedulesTests(TestCase):
@patch('contentstore.management.commands.sync_schedules.sync_schedule')
def test_schedule_sync_called(self, sync_task):
"""
The sync schedules management command should call the sync schedule
task for every schedule.
"""
utils.disable_signals()
schedule = Schedule.objects.create()
utils.enable_signals()
out = StringIO()
call_command('sync_schedules', stdout=out)
sync_task.assert_called_once_with(str(schedule.id))
self.assertIn(str(schedule.id), out.getvalue())
self.assertIn('Synchronised 1 schedule/s', out.getvalue())
|
<commit_before>from six import BytesIO
from django.core.management import call_command
from django.test import TestCase
from mock import patch
from contentstore.models import Schedule
from seed_stage_based_messaging import test_utils as utils
class SyncSchedulesTests(TestCase):
@patch('contentstore.management.commands.sync_schedules.sync_schedule')
def test_schedule_sync_called(self, sync_task):
"""
The sync schedules management command should call the sync schedule
task for every schedule.
"""
utils.disable_signals()
schedule = Schedule.objects.create()
utils.enable_signals()
out = BytesIO()
call_command('sync_schedules', stdout=out)
sync_task.assert_called_once_with(str(schedule.id))
self.assertIn(str(schedule.id), out.getvalue())
self.assertIn('Synchronised 1 schedule/s', out.getvalue())
<commit_msg>Use StringIO instead of BytesIO<commit_after>from six import StringIO
from django.core.management import call_command
from django.test import TestCase
from mock import patch
from contentstore.models import Schedule
from seed_stage_based_messaging import test_utils as utils
class SyncSchedulesTests(TestCase):
@patch('contentstore.management.commands.sync_schedules.sync_schedule')
def test_schedule_sync_called(self, sync_task):
"""
The sync schedules management command should call the sync schedule
task for every schedule.
"""
utils.disable_signals()
schedule = Schedule.objects.create()
utils.enable_signals()
out = StringIO()
call_command('sync_schedules', stdout=out)
sync_task.assert_called_once_with(str(schedule.id))
self.assertIn(str(schedule.id), out.getvalue())
self.assertIn('Synchronised 1 schedule/s', out.getvalue())
|
fcd523105e9f158f423018d45b05527435a41fb0
|
geotrek/altimetry/tests/test_models.py
|
geotrek/altimetry/tests/test_models.py
|
import os
from django.test import TestCase
from django.conf import settings
from geotrek.trekking.factories import TrekFactory
from geotrek.trekking.models import Trek
class AltimetryMixinTest(TestCase):
def test_get_elevation_chart_none(self):
trek = TrekFactory.create(no_path=True)
trek.get_elevation_chart_path()
basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles')
self.assertTrue(os.listdir(basefolder))
directory = os.listdir(basefolder)
self.assertIn('%s-%s-%s.png' % (Trek._meta.model_name, '1', 'en'), directory)
|
import os
from django.test import TestCase
from django.conf import settings
from django.utils.translation import get_language
from geotrek.trekking.factories import TrekFactory
from geotrek.trekking.models import Trek
class AltimetryMixinTest(TestCase):
def test_get_elevation_chart_none(self):
trek = TrekFactory.create(no_path=True, published=True)
response = self.client.get('/media/profiles/trek-%s.png' % trek.pk)
self.assertEqual(response.status_code, 200)
# In PDF
trek.get_elevation_chart_path()
basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles')
self.assertTrue(os.listdir(basefolder))
directory = os.listdir(basefolder)
self.assertIn('%s-%s-%s.png' % (Trek._meta.model_name, str(trek.pk), get_language()), directory)
|
Change test model elevation chart
|
Change test model elevation chart
|
Python
|
bsd-2-clause
|
GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,makinacorpus/Geotrek
|
import os
from django.test import TestCase
from django.conf import settings
from geotrek.trekking.factories import TrekFactory
from geotrek.trekking.models import Trek
class AltimetryMixinTest(TestCase):
def test_get_elevation_chart_none(self):
trek = TrekFactory.create(no_path=True)
trek.get_elevation_chart_path()
basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles')
self.assertTrue(os.listdir(basefolder))
directory = os.listdir(basefolder)
self.assertIn('%s-%s-%s.png' % (Trek._meta.model_name, '1', 'en'), directory)
Change test model elevation chart
|
import os
from django.test import TestCase
from django.conf import settings
from django.utils.translation import get_language
from geotrek.trekking.factories import TrekFactory
from geotrek.trekking.models import Trek
class AltimetryMixinTest(TestCase):
def test_get_elevation_chart_none(self):
trek = TrekFactory.create(no_path=True, published=True)
response = self.client.get('/media/profiles/trek-%s.png' % trek.pk)
self.assertEqual(response.status_code, 200)
# In PDF
trek.get_elevation_chart_path()
basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles')
self.assertTrue(os.listdir(basefolder))
directory = os.listdir(basefolder)
self.assertIn('%s-%s-%s.png' % (Trek._meta.model_name, str(trek.pk), get_language()), directory)
|
<commit_before>import os
from django.test import TestCase
from django.conf import settings
from geotrek.trekking.factories import TrekFactory
from geotrek.trekking.models import Trek
class AltimetryMixinTest(TestCase):
def test_get_elevation_chart_none(self):
trek = TrekFactory.create(no_path=True)
trek.get_elevation_chart_path()
basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles')
self.assertTrue(os.listdir(basefolder))
directory = os.listdir(basefolder)
self.assertIn('%s-%s-%s.png' % (Trek._meta.model_name, '1', 'en'), directory)
<commit_msg>Change test model elevation chart<commit_after>
|
import os
from django.test import TestCase
from django.conf import settings
from django.utils.translation import get_language
from geotrek.trekking.factories import TrekFactory
from geotrek.trekking.models import Trek
class AltimetryMixinTest(TestCase):
def test_get_elevation_chart_none(self):
trek = TrekFactory.create(no_path=True, published=True)
response = self.client.get('/media/profiles/trek-%s.png' % trek.pk)
self.assertEqual(response.status_code, 200)
# In PDF
trek.get_elevation_chart_path()
basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles')
self.assertTrue(os.listdir(basefolder))
directory = os.listdir(basefolder)
self.assertIn('%s-%s-%s.png' % (Trek._meta.model_name, str(trek.pk), get_language()), directory)
|
import os
from django.test import TestCase
from django.conf import settings
from geotrek.trekking.factories import TrekFactory
from geotrek.trekking.models import Trek
class AltimetryMixinTest(TestCase):
def test_get_elevation_chart_none(self):
trek = TrekFactory.create(no_path=True)
trek.get_elevation_chart_path()
basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles')
self.assertTrue(os.listdir(basefolder))
directory = os.listdir(basefolder)
self.assertIn('%s-%s-%s.png' % (Trek._meta.model_name, '1', 'en'), directory)
Change test model elevation chartimport os
from django.test import TestCase
from django.conf import settings
from django.utils.translation import get_language
from geotrek.trekking.factories import TrekFactory
from geotrek.trekking.models import Trek
class AltimetryMixinTest(TestCase):
def test_get_elevation_chart_none(self):
trek = TrekFactory.create(no_path=True, published=True)
response = self.client.get('/media/profiles/trek-%s.png' % trek.pk)
self.assertEqual(response.status_code, 200)
# In PDF
trek.get_elevation_chart_path()
basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles')
self.assertTrue(os.listdir(basefolder))
directory = os.listdir(basefolder)
self.assertIn('%s-%s-%s.png' % (Trek._meta.model_name, str(trek.pk), get_language()), directory)
|
<commit_before>import os
from django.test import TestCase
from django.conf import settings
from geotrek.trekking.factories import TrekFactory
from geotrek.trekking.models import Trek
class AltimetryMixinTest(TestCase):
def test_get_elevation_chart_none(self):
trek = TrekFactory.create(no_path=True)
trek.get_elevation_chart_path()
basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles')
self.assertTrue(os.listdir(basefolder))
directory = os.listdir(basefolder)
self.assertIn('%s-%s-%s.png' % (Trek._meta.model_name, '1', 'en'), directory)
<commit_msg>Change test model elevation chart<commit_after>import os
from django.test import TestCase
from django.conf import settings
from django.utils.translation import get_language
from geotrek.trekking.factories import TrekFactory
from geotrek.trekking.models import Trek
class AltimetryMixinTest(TestCase):
def test_get_elevation_chart_none(self):
trek = TrekFactory.create(no_path=True, published=True)
response = self.client.get('/media/profiles/trek-%s.png' % trek.pk)
self.assertEqual(response.status_code, 200)
# In PDF
trek.get_elevation_chart_path()
basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles')
self.assertTrue(os.listdir(basefolder))
directory = os.listdir(basefolder)
self.assertIn('%s-%s-%s.png' % (Trek._meta.model_name, str(trek.pk), get_language()), directory)
|
68452ffc8490d976b043f660a0e3e1f19c4ed98e
|
great_expectations/actions/__init__.py
|
great_expectations/actions/__init__.py
|
from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)
|
from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
SlackNotificationAction
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)
|
Add Slack action to init
|
Add Slack action to init
|
Python
|
apache-2.0
|
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
|
from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)Add Slack action to init
|
from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
SlackNotificationAction
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)
|
<commit_before>from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)<commit_msg>Add Slack action to init<commit_after>
|
from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
SlackNotificationAction
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)
|
from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)Add Slack action to initfrom .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
SlackNotificationAction
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)
|
<commit_before>from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)<commit_msg>Add Slack action to init<commit_after>from .actions import (
BasicValidationAction,
NamespacedValidationAction,
NoOpAction,
SummarizeAndStoreAction,
SlackNotificationAction
)
from .validation_operators import (
DefaultActionAwareValidationOperator
)
|
dabd787a647e345bdd9f3fd2fee1474b04347512
|
website/addons/base/utils.py
|
website/addons/base/utils.py
|
from os.path import basename
from website import settings
def serialize_addon_config(config):
lookup = config.template_lookup
return {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
}
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
addon_settings = []
for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
short_name = addon_config.short_name
config = serialize_addon_config(addon_config)
user_settings = user.get_addon(short_name)
if user_settings:
user_settings = user_settings.to_json(user)
config.update({
'user_settings': user_settings,
})
addon_settings.append(config)
return addon_settings
|
from os.path import basename
from website import settings
def serialize_addon_config(config):
lookup = config.template_lookup
return {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
}
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
addon_settings = []
for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
short_name = addon_config.short_name
config = serialize_addon_config(addon_config)
user_settings = user.get_addon(short_name)
if user_settings:
user_settings = user_settings.to_json(user)
config.update({
'user_settiongs': user_settings or addon_config.DEFAULT_SETTINGS,
})
addon_settings.append(config)
return addon_settings
|
Use default settings if no user settings
|
Use default settings if no user settings
|
Python
|
apache-2.0
|
aaxelb/osf.io,DanielSBrown/osf.io,bdyetton/prettychart,HalcyonChimera/osf.io,rdhyee/osf.io,caseyrygt/osf.io,dplorimer/osf,icereval/osf.io,alexschiller/osf.io,pattisdr/osf.io,zachjanicki/osf.io,petermalcolm/osf.io,jmcarp/osf.io,KAsante95/osf.io,acshi/osf.io,cwisecarver/osf.io,adlius/osf.io,cldershem/osf.io,kch8qx/osf.io,cslzchen/osf.io,zachjanicki/osf.io,cslzchen/osf.io,kch8qx/osf.io,DanielSBrown/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,wearpants/osf.io,acshi/osf.io,jnayak1/osf.io,lyndsysimon/osf.io,asanfilippo7/osf.io,cosenal/osf.io,baylee-d/osf.io,ZobairAlijan/osf.io,mluo613/osf.io,chrisseto/osf.io,monikagrabowska/osf.io,sbt9uc/osf.io,caseyrollins/osf.io,caneruguz/osf.io,rdhyee/osf.io,caseyrollins/osf.io,lyndsysimon/osf.io,adlius/osf.io,petermalcolm/osf.io,zamattiac/osf.io,chrisseto/osf.io,ckc6cz/osf.io,petermalcolm/osf.io,kch8qx/osf.io,samchrisinger/osf.io,icereval/osf.io,cosenal/osf.io,billyhunt/osf.io,TomHeatwole/osf.io,njantrania/osf.io,ZobairAlijan/osf.io,cosenal/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,zamattiac/osf.io,baylee-d/osf.io,wearpants/osf.io,monikagrabowska/osf.io,abought/osf.io,amyshi188/osf.io,chennan47/osf.io,reinaH/osf.io,zachjanicki/osf.io,brandonPurvis/osf.io,HarryRybacki/osf.io,ticklemepierce/osf.io,GageGaskins/osf.io,brianjgeiger/osf.io,alexschiller/osf.io,jolene-esposito/osf.io,lyndsysimon/osf.io,billyhunt/osf.io,cldershem/osf.io,ZobairAlijan/osf.io,Johnetordoff/osf.io,SSJohns/osf.io,crcresearch/osf.io,Ghalko/osf.io,Nesiehr/osf.io,mluo613/osf.io,erinspace/osf.io,monikagrabowska/osf.io,amyshi188/osf.io,doublebits/osf.io,zamattiac/osf.io,zamattiac/osf.io,hmoco/osf.io,brandonPurvis/osf.io,jmcarp/osf.io,jolene-esposito/osf.io,TomBaxter/osf.io,mluke93/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,abought/osf.io,rdhyee/osf.io,ticklemepierce/osf.io,samanehsan/osf.io,Nesiehr/osf.io,erinspace/osf.io,sloria/osf.io,mattclark/osf.io,adlius/osf.io,TomBaxter/osf.io,KAsante95/osf.io,TomHeatwole/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,jolene-esposito/osf.io,haoyuchen1992/osf.io,bdyetton/prettychart,alexschiller/osf.io,doublebits/osf.io,jmcarp/osf.io,reinaH/osf.io,samanehsan/osf.io,binoculars/osf.io,cslzchen/osf.io,GageGaskins/osf.io,alexschiller/osf.io,cldershem/osf.io,kwierman/osf.io,njantrania/osf.io,DanielSBrown/osf.io,reinaH/osf.io,samanehsan/osf.io,brandonPurvis/osf.io,TomBaxter/osf.io,asanfilippo7/osf.io,danielneis/osf.io,arpitar/osf.io,cldershem/osf.io,KAsante95/osf.io,danielneis/osf.io,wearpants/osf.io,CenterForOpenScience/osf.io,jmcarp/osf.io,zachjanicki/osf.io,mfraezz/osf.io,RomanZWang/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,doublebits/osf.io,danielneis/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,kch8qx/osf.io,RomanZWang/osf.io,hmoco/osf.io,abought/osf.io,binoculars/osf.io,crcresearch/osf.io,felliott/osf.io,RomanZWang/osf.io,Nesiehr/osf.io,SSJohns/osf.io,billyhunt/osf.io,asanfilippo7/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,HarryRybacki/osf.io,njantrania/osf.io,TomHeatwole/osf.io,laurenrevere/osf.io,MerlinZhang/osf.io,brandonPurvis/osf.io,cwisecarver/osf.io,mluo613/osf.io,asanfilippo7/osf.io,HalcyonChimera/osf.io,HarryRybacki/osf.io,acshi/osf.io,brianjgeiger/osf.io,felliott/osf.io,cwisecarver/osf.io,cslzchen/osf.io,aaxelb/osf.io,mattclark/osf.io,amyshi188/osf.io,caneruguz/osf.io,felliott/osf.io,TomHeatwole/osf.io,icereval/osf.io,Ghalko/osf.io,brandonPurvis/osf.io,sbt9uc/osf.io,jnayak1/osf.io,doublebits/osf.io,rdhyee/osf.io,HarryRybacki/osf.io,leb2dg/osf.io,billyhunt/osf.io,kwierman/osf.io,kch8qx/osf.io,dplorimer/osf,mluo613/osf.io,ticklemepierce/osf.io,GageGaskins/osf.io,leb2dg/osf.io,abought/osf.io,wearpants/osf.io,arpitar/osf.io,Ghalko/osf.io,leb2dg/osf.io,cosenal/osf.io,arpitar/osf.io,HalcyonChimera/osf.io,petermalcolm/osf.io,lyndsysimon/osf.io,mluke93/osf.io,ckc6cz/osf.io,emetsger/osf.io,haoyuchen1992/osf.io,mluke93/osf.io,Johnetordoff/osf.io,bdyetton/prettychart,laurenrevere/osf.io,haoyuchen1992/osf.io,caseyrollins/osf.io,adlius/osf.io,sloria/osf.io,ticklemepierce/osf.io,MerlinZhang/osf.io,KAsante95/osf.io,laurenrevere/osf.io,chrisseto/osf.io,sbt9uc/osf.io,GageGaskins/osf.io,baylee-d/osf.io,samchrisinger/osf.io,mattclark/osf.io,chrisseto/osf.io,saradbowman/osf.io,ckc6cz/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,arpitar/osf.io,billyhunt/osf.io,acshi/osf.io,dplorimer/osf,samanehsan/osf.io,bdyetton/prettychart,ckc6cz/osf.io,crcresearch/osf.io,samchrisinger/osf.io,acshi/osf.io,erinspace/osf.io,RomanZWang/osf.io,mluke93/osf.io,mfraezz/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,mluo613/osf.io,aaxelb/osf.io,saradbowman/osf.io,dplorimer/osf,caneruguz/osf.io,njantrania/osf.io,jnayak1/osf.io,haoyuchen1992/osf.io,pattisdr/osf.io,kwierman/osf.io,caneruguz/osf.io,KAsante95/osf.io,hmoco/osf.io,binoculars/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,mfraezz/osf.io,SSJohns/osf.io,danielneis/osf.io,mfraezz/osf.io,jolene-esposito/osf.io,MerlinZhang/osf.io,chennan47/osf.io,MerlinZhang/osf.io,jnayak1/osf.io,ZobairAlijan/osf.io,caseyrygt/osf.io,amyshi188/osf.io,GageGaskins/osf.io,Ghalko/osf.io,emetsger/osf.io,felliott/osf.io,chennan47/osf.io,emetsger/osf.io,leb2dg/osf.io,reinaH/osf.io,caseyrygt/osf.io,emetsger/osf.io,doublebits/osf.io,sbt9uc/osf.io,sloria/osf.io
|
from os.path import basename
from website import settings
def serialize_addon_config(config):
lookup = config.template_lookup
return {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
}
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
addon_settings = []
for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
short_name = addon_config.short_name
config = serialize_addon_config(addon_config)
user_settings = user.get_addon(short_name)
if user_settings:
user_settings = user_settings.to_json(user)
config.update({
'user_settings': user_settings,
})
addon_settings.append(config)
return addon_settings
Use default settings if no user settings
|
from os.path import basename
from website import settings
def serialize_addon_config(config):
lookup = config.template_lookup
return {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
}
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
addon_settings = []
for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
short_name = addon_config.short_name
config = serialize_addon_config(addon_config)
user_settings = user.get_addon(short_name)
if user_settings:
user_settings = user_settings.to_json(user)
config.update({
'user_settiongs': user_settings or addon_config.DEFAULT_SETTINGS,
})
addon_settings.append(config)
return addon_settings
|
<commit_before>from os.path import basename
from website import settings
def serialize_addon_config(config):
lookup = config.template_lookup
return {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
}
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
addon_settings = []
for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
short_name = addon_config.short_name
config = serialize_addon_config(addon_config)
user_settings = user.get_addon(short_name)
if user_settings:
user_settings = user_settings.to_json(user)
config.update({
'user_settings': user_settings,
})
addon_settings.append(config)
return addon_settings
<commit_msg>Use default settings if no user settings<commit_after>
|
from os.path import basename
from website import settings
def serialize_addon_config(config):
lookup = config.template_lookup
return {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
}
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
addon_settings = []
for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
short_name = addon_config.short_name
config = serialize_addon_config(addon_config)
user_settings = user.get_addon(short_name)
if user_settings:
user_settings = user_settings.to_json(user)
config.update({
'user_settiongs': user_settings or addon_config.DEFAULT_SETTINGS,
})
addon_settings.append(config)
return addon_settings
|
from os.path import basename
from website import settings
def serialize_addon_config(config):
lookup = config.template_lookup
return {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
}
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
addon_settings = []
for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
short_name = addon_config.short_name
config = serialize_addon_config(addon_config)
user_settings = user.get_addon(short_name)
if user_settings:
user_settings = user_settings.to_json(user)
config.update({
'user_settings': user_settings,
})
addon_settings.append(config)
return addon_settings
Use default settings if no user settingsfrom os.path import basename
from website import settings
def serialize_addon_config(config):
lookup = config.template_lookup
return {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
}
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
addon_settings = []
for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
short_name = addon_config.short_name
config = serialize_addon_config(addon_config)
user_settings = user.get_addon(short_name)
if user_settings:
user_settings = user_settings.to_json(user)
config.update({
'user_settiongs': user_settings or addon_config.DEFAULT_SETTINGS,
})
addon_settings.append(config)
return addon_settings
|
<commit_before>from os.path import basename
from website import settings
def serialize_addon_config(config):
lookup = config.template_lookup
return {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
}
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
addon_settings = []
for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
short_name = addon_config.short_name
config = serialize_addon_config(addon_config)
user_settings = user.get_addon(short_name)
if user_settings:
user_settings = user_settings.to_json(user)
config.update({
'user_settings': user_settings,
})
addon_settings.append(config)
return addon_settings
<commit_msg>Use default settings if no user settings<commit_after>from os.path import basename
from website import settings
def serialize_addon_config(config):
lookup = config.template_lookup
return {
'addon_short_name': config.short_name,
'addon_full_name': config.full_name,
'node_settings_template': lookup.get_template(basename(config.node_settings_template)),
'user_settings_template': lookup.get_template(basename(config.user_settings_template)),
}
def get_addons_by_config_type(config_type, user):
addons = [addon for addon in settings.ADDONS_AVAILABLE if config_type in addon.configs]
addon_settings = []
for addon_config in sorted(addons, key=lambda cfg: cfg.full_name.lower()):
short_name = addon_config.short_name
config = serialize_addon_config(addon_config)
user_settings = user.get_addon(short_name)
if user_settings:
user_settings = user_settings.to_json(user)
config.update({
'user_settiongs': user_settings or addon_config.DEFAULT_SETTINGS,
})
addon_settings.append(config)
return addon_settings
|
45046857f688d4f640dc0b920a42d5b92faa4d9c
|
jax/_src/lib/mlir/dialects/__init__.py
|
jax/_src/lib/mlir/dialects/__init__.py
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# flake8: noqa: F401
import jaxlib.mlir.dialects.builtin as builtin
import jaxlib.mlir.dialects.chlo as chlo
import jaxlib.mlir.dialects.mhlo as mhlo
import jaxlib.mlir.dialects.func as func
try:
import jaxlib.mlir.dialects.ml_program as ml_program
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
try:
import jaxlib.mlir.dialects.sparse_tensor as sparse_tensor
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# flake8: noqa: F401
import jaxlib.mlir.dialects.builtin as builtin
import jaxlib.mlir.dialects.chlo as chlo
import jaxlib.mlir.dialects.mhlo as mhlo
import jaxlib.mlir.dialects.func as func
try:
import jaxlib.mlir.dialects.ml_program as ml_program
except (ModuleNotFoundError, ImportError):
# TODO(phawkins): make this unconditional when jaxlib > 0.3.14
# is the minimum version.
pass
try:
import jaxlib.mlir.dialects.sparse_tensor as sparse_tensor
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
|
Fix ModuleNotFoundError for phawkins only with version
|
Fix ModuleNotFoundError for phawkins only with version
|
Python
|
apache-2.0
|
google/jax,google/jax,google/jax,google/jax
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# flake8: noqa: F401
import jaxlib.mlir.dialects.builtin as builtin
import jaxlib.mlir.dialects.chlo as chlo
import jaxlib.mlir.dialects.mhlo as mhlo
import jaxlib.mlir.dialects.func as func
try:
import jaxlib.mlir.dialects.ml_program as ml_program
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
try:
import jaxlib.mlir.dialects.sparse_tensor as sparse_tensor
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
Fix ModuleNotFoundError for phawkins only with version
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# flake8: noqa: F401
import jaxlib.mlir.dialects.builtin as builtin
import jaxlib.mlir.dialects.chlo as chlo
import jaxlib.mlir.dialects.mhlo as mhlo
import jaxlib.mlir.dialects.func as func
try:
import jaxlib.mlir.dialects.ml_program as ml_program
except (ModuleNotFoundError, ImportError):
# TODO(phawkins): make this unconditional when jaxlib > 0.3.14
# is the minimum version.
pass
try:
import jaxlib.mlir.dialects.sparse_tensor as sparse_tensor
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
|
<commit_before># Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# flake8: noqa: F401
import jaxlib.mlir.dialects.builtin as builtin
import jaxlib.mlir.dialects.chlo as chlo
import jaxlib.mlir.dialects.mhlo as mhlo
import jaxlib.mlir.dialects.func as func
try:
import jaxlib.mlir.dialects.ml_program as ml_program
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
try:
import jaxlib.mlir.dialects.sparse_tensor as sparse_tensor
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
<commit_msg>Fix ModuleNotFoundError for phawkins only with version<commit_after>
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# flake8: noqa: F401
import jaxlib.mlir.dialects.builtin as builtin
import jaxlib.mlir.dialects.chlo as chlo
import jaxlib.mlir.dialects.mhlo as mhlo
import jaxlib.mlir.dialects.func as func
try:
import jaxlib.mlir.dialects.ml_program as ml_program
except (ModuleNotFoundError, ImportError):
# TODO(phawkins): make this unconditional when jaxlib > 0.3.14
# is the minimum version.
pass
try:
import jaxlib.mlir.dialects.sparse_tensor as sparse_tensor
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# flake8: noqa: F401
import jaxlib.mlir.dialects.builtin as builtin
import jaxlib.mlir.dialects.chlo as chlo
import jaxlib.mlir.dialects.mhlo as mhlo
import jaxlib.mlir.dialects.func as func
try:
import jaxlib.mlir.dialects.ml_program as ml_program
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
try:
import jaxlib.mlir.dialects.sparse_tensor as sparse_tensor
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
Fix ModuleNotFoundError for phawkins only with version# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# flake8: noqa: F401
import jaxlib.mlir.dialects.builtin as builtin
import jaxlib.mlir.dialects.chlo as chlo
import jaxlib.mlir.dialects.mhlo as mhlo
import jaxlib.mlir.dialects.func as func
try:
import jaxlib.mlir.dialects.ml_program as ml_program
except (ModuleNotFoundError, ImportError):
# TODO(phawkins): make this unconditional when jaxlib > 0.3.14
# is the minimum version.
pass
try:
import jaxlib.mlir.dialects.sparse_tensor as sparse_tensor
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
|
<commit_before># Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# flake8: noqa: F401
import jaxlib.mlir.dialects.builtin as builtin
import jaxlib.mlir.dialects.chlo as chlo
import jaxlib.mlir.dialects.mhlo as mhlo
import jaxlib.mlir.dialects.func as func
try:
import jaxlib.mlir.dialects.ml_program as ml_program
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
try:
import jaxlib.mlir.dialects.sparse_tensor as sparse_tensor
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
<commit_msg>Fix ModuleNotFoundError for phawkins only with version<commit_after># Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# flake8: noqa: F401
import jaxlib.mlir.dialects.builtin as builtin
import jaxlib.mlir.dialects.chlo as chlo
import jaxlib.mlir.dialects.mhlo as mhlo
import jaxlib.mlir.dialects.func as func
try:
import jaxlib.mlir.dialects.ml_program as ml_program
except (ModuleNotFoundError, ImportError):
# TODO(phawkins): make this unconditional when jaxlib > 0.3.14
# is the minimum version.
pass
try:
import jaxlib.mlir.dialects.sparse_tensor as sparse_tensor
except (ModuleNotFoundError, ImportError):
# TODO(ajcbik,phawkins): make this unconditional when jaxlib > 0.3.7
# is the minimum version.
pass
|
b45c0cc0e9f2964ad442115f7a83292fb83611ec
|
test/vim_autopep8.py
|
test/vim_autopep8.py
|
"""Run autopep8 on the selected buffer in Vim.
map <C-I> :pyfile <path_to>/vim_autopep8.py<CR>
Replace ":pyfile" with ":py3file" if Vim is built with Python 3 support.
"""
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
if vim.eval('&syntax') == 'python':
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
|
"""Run autopep8 on the selected buffer in Vim.
map <C-I> :pyfile <path_to>/vim_autopep8.py<CR>
Replace ":pyfile" with ":py3file" if Vim is built with Python 3 support.
"""
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
def main():
if vim.eval('&syntax') != 'python':
return
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
if __name__ == '__main__':
main()
|
Put code in main function
|
Put code in main function
|
Python
|
mit
|
SG345/autopep8,MeteorAdminz/autopep8,Vauxoo/autopep8,hhatto/autopep8,Vauxoo/autopep8,MeteorAdminz/autopep8,SG345/autopep8,vauxoo-dev/autopep8,vauxoo-dev/autopep8,hhatto/autopep8
|
"""Run autopep8 on the selected buffer in Vim.
map <C-I> :pyfile <path_to>/vim_autopep8.py<CR>
Replace ":pyfile" with ":py3file" if Vim is built with Python 3 support.
"""
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
if vim.eval('&syntax') == 'python':
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
Put code in main function
|
"""Run autopep8 on the selected buffer in Vim.
map <C-I> :pyfile <path_to>/vim_autopep8.py<CR>
Replace ":pyfile" with ":py3file" if Vim is built with Python 3 support.
"""
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
def main():
if vim.eval('&syntax') != 'python':
return
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
if __name__ == '__main__':
main()
|
<commit_before>"""Run autopep8 on the selected buffer in Vim.
map <C-I> :pyfile <path_to>/vim_autopep8.py<CR>
Replace ":pyfile" with ":py3file" if Vim is built with Python 3 support.
"""
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
if vim.eval('&syntax') == 'python':
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
<commit_msg>Put code in main function<commit_after>
|
"""Run autopep8 on the selected buffer in Vim.
map <C-I> :pyfile <path_to>/vim_autopep8.py<CR>
Replace ":pyfile" with ":py3file" if Vim is built with Python 3 support.
"""
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
def main():
if vim.eval('&syntax') != 'python':
return
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
if __name__ == '__main__':
main()
|
"""Run autopep8 on the selected buffer in Vim.
map <C-I> :pyfile <path_to>/vim_autopep8.py<CR>
Replace ":pyfile" with ":py3file" if Vim is built with Python 3 support.
"""
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
if vim.eval('&syntax') == 'python':
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
Put code in main function"""Run autopep8 on the selected buffer in Vim.
map <C-I> :pyfile <path_to>/vim_autopep8.py<CR>
Replace ":pyfile" with ":py3file" if Vim is built with Python 3 support.
"""
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
def main():
if vim.eval('&syntax') != 'python':
return
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
if __name__ == '__main__':
main()
|
<commit_before>"""Run autopep8 on the selected buffer in Vim.
map <C-I> :pyfile <path_to>/vim_autopep8.py<CR>
Replace ":pyfile" with ":py3file" if Vim is built with Python 3 support.
"""
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
if vim.eval('&syntax') == 'python':
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
<commit_msg>Put code in main function<commit_after>"""Run autopep8 on the selected buffer in Vim.
map <C-I> :pyfile <path_to>/vim_autopep8.py<CR>
Replace ":pyfile" with ":py3file" if Vim is built with Python 3 support.
"""
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
def main():
if vim.eval('&syntax') != 'python':
return
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
if __name__ == '__main__':
main()
|
222935ffc347f9787f08b50cccb1981151db5cec
|
test_jeni_python3.py
|
test_jeni_python3.py
|
import unittest
import jeni
from test_jeni import BasicInjector
class Python3AnnotationTestCase(unittest.TestCase):
def test_annotate_without_annotations(self):
def fn(hello):
"unused"
jeni.annotate(fn)
self.assertTrue(jeni.annotate.has_annotations(fn))
@jeni.annotate
def annotated_function(hello: 'hello:thing', eggs: 'eggs'):
return hello, eggs
class FunctionAnnotationTestCase(unittest.TestCase):
def setUp(self):
self.injector = BasicInjector()
def test_function_annotation(self):
self.assertEqual(
('Hello, thing!', 'eggs!'),
self.injector.apply(annotated_function))
if __name__ == '__main__': unittest.main()
|
import unittest
import jeni
from test_jeni import BasicInjector
class Python3AnnotationTestCase(unittest.TestCase):
def test_annotate_without_annotations(self):
def fn(hello):
"unused"
jeni.annotate(fn)
self.assertTrue(jeni.annotate.has_annotations(fn))
def test_annotate_without_dunder_annotations(self):
# Unclear when this would come up; testing it given Python 2 support.
class NoDunderAnnotations(object):
def __getattr__(self, name):
if name == '__annotations__':
raise AttributeError()
return super().__getattr__(name)
def __call__(self):
"unused"
fn = NoDunderAnnotations()
self.assertTrue(hasattr(fn, '__call__'))
self.assertFalse(hasattr(fn, '__annotations__'))
self.assertFalse(hasattr(fn, 'fake')) # coverage
with self.assertRaises(AttributeError):
jeni.annotate(fn)
@jeni.annotate
def annotated_function(hello: 'hello:thing', eggs: 'eggs'):
return hello, eggs
class FunctionAnnotationTestCase(unittest.TestCase):
def setUp(self):
self.injector = BasicInjector()
def test_function_annotation(self):
self.assertEqual(
('Hello, thing!', 'eggs!'),
self.injector.apply(annotated_function))
if __name__ == '__main__': unittest.main()
|
Test for missing __annotations__ in Python 3.
|
Test for missing __annotations__ in Python 3.
|
Python
|
bsd-2-clause
|
groner/jeni-python,rduplain/jeni-python
|
import unittest
import jeni
from test_jeni import BasicInjector
class Python3AnnotationTestCase(unittest.TestCase):
def test_annotate_without_annotations(self):
def fn(hello):
"unused"
jeni.annotate(fn)
self.assertTrue(jeni.annotate.has_annotations(fn))
@jeni.annotate
def annotated_function(hello: 'hello:thing', eggs: 'eggs'):
return hello, eggs
class FunctionAnnotationTestCase(unittest.TestCase):
def setUp(self):
self.injector = BasicInjector()
def test_function_annotation(self):
self.assertEqual(
('Hello, thing!', 'eggs!'),
self.injector.apply(annotated_function))
if __name__ == '__main__': unittest.main()
Test for missing __annotations__ in Python 3.
|
import unittest
import jeni
from test_jeni import BasicInjector
class Python3AnnotationTestCase(unittest.TestCase):
def test_annotate_without_annotations(self):
def fn(hello):
"unused"
jeni.annotate(fn)
self.assertTrue(jeni.annotate.has_annotations(fn))
def test_annotate_without_dunder_annotations(self):
# Unclear when this would come up; testing it given Python 2 support.
class NoDunderAnnotations(object):
def __getattr__(self, name):
if name == '__annotations__':
raise AttributeError()
return super().__getattr__(name)
def __call__(self):
"unused"
fn = NoDunderAnnotations()
self.assertTrue(hasattr(fn, '__call__'))
self.assertFalse(hasattr(fn, '__annotations__'))
self.assertFalse(hasattr(fn, 'fake')) # coverage
with self.assertRaises(AttributeError):
jeni.annotate(fn)
@jeni.annotate
def annotated_function(hello: 'hello:thing', eggs: 'eggs'):
return hello, eggs
class FunctionAnnotationTestCase(unittest.TestCase):
def setUp(self):
self.injector = BasicInjector()
def test_function_annotation(self):
self.assertEqual(
('Hello, thing!', 'eggs!'),
self.injector.apply(annotated_function))
if __name__ == '__main__': unittest.main()
|
<commit_before>import unittest
import jeni
from test_jeni import BasicInjector
class Python3AnnotationTestCase(unittest.TestCase):
def test_annotate_without_annotations(self):
def fn(hello):
"unused"
jeni.annotate(fn)
self.assertTrue(jeni.annotate.has_annotations(fn))
@jeni.annotate
def annotated_function(hello: 'hello:thing', eggs: 'eggs'):
return hello, eggs
class FunctionAnnotationTestCase(unittest.TestCase):
def setUp(self):
self.injector = BasicInjector()
def test_function_annotation(self):
self.assertEqual(
('Hello, thing!', 'eggs!'),
self.injector.apply(annotated_function))
if __name__ == '__main__': unittest.main()
<commit_msg>Test for missing __annotations__ in Python 3.<commit_after>
|
import unittest
import jeni
from test_jeni import BasicInjector
class Python3AnnotationTestCase(unittest.TestCase):
def test_annotate_without_annotations(self):
def fn(hello):
"unused"
jeni.annotate(fn)
self.assertTrue(jeni.annotate.has_annotations(fn))
def test_annotate_without_dunder_annotations(self):
# Unclear when this would come up; testing it given Python 2 support.
class NoDunderAnnotations(object):
def __getattr__(self, name):
if name == '__annotations__':
raise AttributeError()
return super().__getattr__(name)
def __call__(self):
"unused"
fn = NoDunderAnnotations()
self.assertTrue(hasattr(fn, '__call__'))
self.assertFalse(hasattr(fn, '__annotations__'))
self.assertFalse(hasattr(fn, 'fake')) # coverage
with self.assertRaises(AttributeError):
jeni.annotate(fn)
@jeni.annotate
def annotated_function(hello: 'hello:thing', eggs: 'eggs'):
return hello, eggs
class FunctionAnnotationTestCase(unittest.TestCase):
def setUp(self):
self.injector = BasicInjector()
def test_function_annotation(self):
self.assertEqual(
('Hello, thing!', 'eggs!'),
self.injector.apply(annotated_function))
if __name__ == '__main__': unittest.main()
|
import unittest
import jeni
from test_jeni import BasicInjector
class Python3AnnotationTestCase(unittest.TestCase):
def test_annotate_without_annotations(self):
def fn(hello):
"unused"
jeni.annotate(fn)
self.assertTrue(jeni.annotate.has_annotations(fn))
@jeni.annotate
def annotated_function(hello: 'hello:thing', eggs: 'eggs'):
return hello, eggs
class FunctionAnnotationTestCase(unittest.TestCase):
def setUp(self):
self.injector = BasicInjector()
def test_function_annotation(self):
self.assertEqual(
('Hello, thing!', 'eggs!'),
self.injector.apply(annotated_function))
if __name__ == '__main__': unittest.main()
Test for missing __annotations__ in Python 3.import unittest
import jeni
from test_jeni import BasicInjector
class Python3AnnotationTestCase(unittest.TestCase):
def test_annotate_without_annotations(self):
def fn(hello):
"unused"
jeni.annotate(fn)
self.assertTrue(jeni.annotate.has_annotations(fn))
def test_annotate_without_dunder_annotations(self):
# Unclear when this would come up; testing it given Python 2 support.
class NoDunderAnnotations(object):
def __getattr__(self, name):
if name == '__annotations__':
raise AttributeError()
return super().__getattr__(name)
def __call__(self):
"unused"
fn = NoDunderAnnotations()
self.assertTrue(hasattr(fn, '__call__'))
self.assertFalse(hasattr(fn, '__annotations__'))
self.assertFalse(hasattr(fn, 'fake')) # coverage
with self.assertRaises(AttributeError):
jeni.annotate(fn)
@jeni.annotate
def annotated_function(hello: 'hello:thing', eggs: 'eggs'):
return hello, eggs
class FunctionAnnotationTestCase(unittest.TestCase):
def setUp(self):
self.injector = BasicInjector()
def test_function_annotation(self):
self.assertEqual(
('Hello, thing!', 'eggs!'),
self.injector.apply(annotated_function))
if __name__ == '__main__': unittest.main()
|
<commit_before>import unittest
import jeni
from test_jeni import BasicInjector
class Python3AnnotationTestCase(unittest.TestCase):
def test_annotate_without_annotations(self):
def fn(hello):
"unused"
jeni.annotate(fn)
self.assertTrue(jeni.annotate.has_annotations(fn))
@jeni.annotate
def annotated_function(hello: 'hello:thing', eggs: 'eggs'):
return hello, eggs
class FunctionAnnotationTestCase(unittest.TestCase):
def setUp(self):
self.injector = BasicInjector()
def test_function_annotation(self):
self.assertEqual(
('Hello, thing!', 'eggs!'),
self.injector.apply(annotated_function))
if __name__ == '__main__': unittest.main()
<commit_msg>Test for missing __annotations__ in Python 3.<commit_after>import unittest
import jeni
from test_jeni import BasicInjector
class Python3AnnotationTestCase(unittest.TestCase):
def test_annotate_without_annotations(self):
def fn(hello):
"unused"
jeni.annotate(fn)
self.assertTrue(jeni.annotate.has_annotations(fn))
def test_annotate_without_dunder_annotations(self):
# Unclear when this would come up; testing it given Python 2 support.
class NoDunderAnnotations(object):
def __getattr__(self, name):
if name == '__annotations__':
raise AttributeError()
return super().__getattr__(name)
def __call__(self):
"unused"
fn = NoDunderAnnotations()
self.assertTrue(hasattr(fn, '__call__'))
self.assertFalse(hasattr(fn, '__annotations__'))
self.assertFalse(hasattr(fn, 'fake')) # coverage
with self.assertRaises(AttributeError):
jeni.annotate(fn)
@jeni.annotate
def annotated_function(hello: 'hello:thing', eggs: 'eggs'):
return hello, eggs
class FunctionAnnotationTestCase(unittest.TestCase):
def setUp(self):
self.injector = BasicInjector()
def test_function_annotation(self):
self.assertEqual(
('Hello, thing!', 'eggs!'),
self.injector.apply(annotated_function))
if __name__ == '__main__': unittest.main()
|
1fc2e747f1c02d5b8559f03187464eecda008190
|
fernet_fields/test/testmigrate/migrations/0004_copy_values.py
|
fernet_fields/test/testmigrate/migrations/0004_copy_values.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
obj.save(force_update=True)
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
obj.save(force_update=True)
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
|
Fix test migration to actually save updates.
|
Fix test migration to actually save updates.
|
Python
|
bsd-3-clause
|
orcasgit/django-fernet-fields
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
Fix test migration to actually save updates.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
obj.save(force_update=True)
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
obj.save(force_update=True)
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
<commit_msg>Fix test migration to actually save updates.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
obj.save(force_update=True)
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
obj.save(force_update=True)
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
Fix test migration to actually save updates.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
obj.save(force_update=True)
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
obj.save(force_update=True)
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
<commit_msg>Fix test migration to actually save updates.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value_dual = obj.value
obj.save(force_update=True)
def backwards(apps, schema_editor):
DualText = apps.get_model('testmigrate', 'DualText')
for obj in DualText.objects.all():
obj.value = obj.value_dual
obj.save(force_update=True)
class Migration(migrations.Migration):
dependencies = [
('testmigrate', '0003_add_value_dual'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
|
e9941e34253768e33cbfa54ff2bb9cf2e8267e1d
|
workflow-diagnosetargets.py
|
workflow-diagnosetargets.py
|
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
# args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs)
for sample in samples:
diagnose_targets_job = Job.wrapJobFn(gatk.diagnosetargets, config, sample, samples, samples[sample]['bam'],
cores=int(config['gatk']['num_cores']),
memory="{}G".format(config['gatk']['max_mem']))
root_job.addChild(diagnose_targets_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
# args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs)
for sample in samples:
diagnose_targets_job = Job.wrapJobFn(gatk.diagnosetargets, config, sample, samples,
"{}.recalibrated.sorted.bam".format(sample),
cores=int(config['gatk']['num_cores']),
memory="{}G".format(config['gatk']['max_mem']))
root_job.addChild(diagnose_targets_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
Tweak to bam file name calling
|
Tweak to bam file name calling
|
Python
|
mit
|
dgaston/ddb-scripts,GastonLab/ddb-scripts,dgaston/ddb-ngsflow-scripts
|
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
# args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs)
for sample in samples:
diagnose_targets_job = Job.wrapJobFn(gatk.diagnosetargets, config, sample, samples, samples[sample]['bam'],
cores=int(config['gatk']['num_cores']),
memory="{}G".format(config['gatk']['max_mem']))
root_job.addChild(diagnose_targets_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
Tweak to bam file name calling
|
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
# args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs)
for sample in samples:
diagnose_targets_job = Job.wrapJobFn(gatk.diagnosetargets, config, sample, samples,
"{}.recalibrated.sorted.bam".format(sample),
cores=int(config['gatk']['num_cores']),
memory="{}G".format(config['gatk']['max_mem']))
root_job.addChild(diagnose_targets_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
<commit_before>#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
# args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs)
for sample in samples:
diagnose_targets_job = Job.wrapJobFn(gatk.diagnosetargets, config, sample, samples, samples[sample]['bam'],
cores=int(config['gatk']['num_cores']),
memory="{}G".format(config['gatk']['max_mem']))
root_job.addChild(diagnose_targets_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
<commit_msg>Tweak to bam file name calling<commit_after>
|
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
# args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs)
for sample in samples:
diagnose_targets_job = Job.wrapJobFn(gatk.diagnosetargets, config, sample, samples,
"{}.recalibrated.sorted.bam".format(sample),
cores=int(config['gatk']['num_cores']),
memory="{}G".format(config['gatk']['max_mem']))
root_job.addChild(diagnose_targets_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
# args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs)
for sample in samples:
diagnose_targets_job = Job.wrapJobFn(gatk.diagnosetargets, config, sample, samples, samples[sample]['bam'],
cores=int(config['gatk']['num_cores']),
memory="{}G".format(config['gatk']['max_mem']))
root_job.addChild(diagnose_targets_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
Tweak to bam file name calling#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
# args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs)
for sample in samples:
diagnose_targets_job = Job.wrapJobFn(gatk.diagnosetargets, config, sample, samples,
"{}.recalibrated.sorted.bam".format(sample),
cores=int(config['gatk']['num_cores']),
memory="{}G".format(config['gatk']['max_mem']))
root_job.addChild(diagnose_targets_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
<commit_before>#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
# args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs)
for sample in samples:
diagnose_targets_job = Job.wrapJobFn(gatk.diagnosetargets, config, sample, samples, samples[sample]['bam'],
cores=int(config['gatk']['num_cores']),
memory="{}G".format(config['gatk']['max_mem']))
root_job.addChild(diagnose_targets_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
<commit_msg>Tweak to bam file name calling<commit_after>#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import pipeline
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
# args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs)
for sample in samples:
diagnose_targets_job = Job.wrapJobFn(gatk.diagnosetargets, config, sample, samples,
"{}.recalibrated.sorted.bam".format(sample),
cores=int(config['gatk']['num_cores']),
memory="{}G".format(config['gatk']['max_mem']))
root_job.addChild(diagnose_targets_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
dc6d9ec75ffb2ac776d10a924395d05284bc031e
|
tests/test_compat.py
|
tests/test_compat.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_compat
------------
Tests for `cookiecutter.compat` module.
"""
from cookiecutter.compat import unittest, which
def test_existing_command():
assert which('cookiecutter')
def test_non_existing_command():
assert not which('stringthatisntashellcommand')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_compat
------------
Tests for `cookiecutter.compat` module.
"""
from cookiecutter.compat import which
def test_existing_command():
assert which('cookiecutter')
def test_non_existing_command():
assert not which('stringthatisntashellcommand')
|
Remove unused import of compat unittest
|
Remove unused import of compat unittest
|
Python
|
bsd-3-clause
|
luzfcb/cookiecutter,jhermann/cookiecutter,christabor/cookiecutter,terryjbates/cookiecutter,audreyr/cookiecutter,agconti/cookiecutter,vincentbernat/cookiecutter,kkujawinski/cookiecutter,atlassian/cookiecutter,foodszhang/cookiecutter,lucius-feng/cookiecutter,nhomar/cookiecutter,benthomasson/cookiecutter,ionelmc/cookiecutter,cguardia/cookiecutter,sp1rs/cookiecutter,kkujawinski/cookiecutter,hackebrot/cookiecutter,audreyr/cookiecutter,tylerdave/cookiecutter,Vauxoo/cookiecutter,sp1rs/cookiecutter,ramiroluz/cookiecutter,michaeljoseph/cookiecutter,stevepiercy/cookiecutter,lgp171188/cookiecutter,dajose/cookiecutter,janusnic/cookiecutter,lgp171188/cookiecutter,takeflight/cookiecutter,terryjbates/cookiecutter,benthomasson/cookiecutter,hackebrot/cookiecutter,vincentbernat/cookiecutter,cguardia/cookiecutter,stevepiercy/cookiecutter,janusnic/cookiecutter,nhomar/cookiecutter,moi65/cookiecutter,cichm/cookiecutter,ramiroluz/cookiecutter,venumech/cookiecutter,christabor/cookiecutter,pjbull/cookiecutter,agconti/cookiecutter,luzfcb/cookiecutter,pjbull/cookiecutter,dajose/cookiecutter,venumech/cookiecutter,cichm/cookiecutter,Springerle/cookiecutter,drgarcia1986/cookiecutter,willingc/cookiecutter,vintasoftware/cookiecutter,takeflight/cookiecutter,tylerdave/cookiecutter,lucius-feng/cookiecutter,foodszhang/cookiecutter,jhermann/cookiecutter,moi65/cookiecutter,michaeljoseph/cookiecutter,Vauxoo/cookiecutter,vintasoftware/cookiecutter,ionelmc/cookiecutter,0k/cookiecutter,drgarcia1986/cookiecutter,willingc/cookiecutter,atlassian/cookiecutter,0k/cookiecutter,Springerle/cookiecutter
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_compat
------------
Tests for `cookiecutter.compat` module.
"""
from cookiecutter.compat import unittest, which
def test_existing_command():
assert which('cookiecutter')
def test_non_existing_command():
assert not which('stringthatisntashellcommand')
Remove unused import of compat unittest
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_compat
------------
Tests for `cookiecutter.compat` module.
"""
from cookiecutter.compat import which
def test_existing_command():
assert which('cookiecutter')
def test_non_existing_command():
assert not which('stringthatisntashellcommand')
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_compat
------------
Tests for `cookiecutter.compat` module.
"""
from cookiecutter.compat import unittest, which
def test_existing_command():
assert which('cookiecutter')
def test_non_existing_command():
assert not which('stringthatisntashellcommand')
<commit_msg>Remove unused import of compat unittest<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_compat
------------
Tests for `cookiecutter.compat` module.
"""
from cookiecutter.compat import which
def test_existing_command():
assert which('cookiecutter')
def test_non_existing_command():
assert not which('stringthatisntashellcommand')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_compat
------------
Tests for `cookiecutter.compat` module.
"""
from cookiecutter.compat import unittest, which
def test_existing_command():
assert which('cookiecutter')
def test_non_existing_command():
assert not which('stringthatisntashellcommand')
Remove unused import of compat unittest#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_compat
------------
Tests for `cookiecutter.compat` module.
"""
from cookiecutter.compat import which
def test_existing_command():
assert which('cookiecutter')
def test_non_existing_command():
assert not which('stringthatisntashellcommand')
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_compat
------------
Tests for `cookiecutter.compat` module.
"""
from cookiecutter.compat import unittest, which
def test_existing_command():
assert which('cookiecutter')
def test_non_existing_command():
assert not which('stringthatisntashellcommand')
<commit_msg>Remove unused import of compat unittest<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_compat
------------
Tests for `cookiecutter.compat` module.
"""
from cookiecutter.compat import which
def test_existing_command():
assert which('cookiecutter')
def test_non_existing_command():
assert not which('stringthatisntashellcommand')
|
55af2016102ec16a4ec3878f45306e3ac4d520e6
|
qingcloud/cli/iaas_client/actions/instance/reset_instances.py
|
qingcloud/cli/iaas_client/actions/instance/reset_instances.py
|
# coding: utf-8
from qingcloud.cli.misc.utils import explode_array
from qingcloud.cli.iaas_client.actions.base import BaseAction
class ResetInstancesAction(BaseAction):
action = 'ResetInstances'
command = 'reset-instances'
usage = '%(prog)s -i "instance_id, ..." [-f <conf_file>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-i', '--instances', dest='instances',
action='store', type=str, default='',
help='the comma separated IDs of instances you want to reset.')
return parser
@classmethod
def build_directive(cls, options):
instances = explode_array(options.instances)
if len(instances) == 0:
print 'error: [instances] should be specified'
return None
return {'instances': instances}
|
# coding: utf-8
from qingcloud.cli.misc.utils import explode_array
from qingcloud.cli.iaas_client.actions.base import BaseAction
class ResetInstancesAction(BaseAction):
action = 'ResetInstances'
command = 'reset-instances'
usage = '%(prog)s -i "instance_id, ..." [-f <conf_file> -m <login_mode> -p <login_passwd> -k <login_keypair>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-i', '--instances', dest='instances',
action='store', type=str, default='',
help='the comma separated IDs of instances you want to reset.')
parser.add_argument('-l', '--login_mode', dest='login_mode',
action='store', type=str, default=None,
help='SSH login mode: keypair or passwd')
parser.add_argument('-p', '--login_passwd', dest='login_passwd',
action='store', type=str, default=None,
help='login_passwd, should specified when SSH login mode is "passwd".')
parser.add_argument('-k', '--login_keypair', dest='login_keypair',
action='store', type=str, default=None,
help='login_keypair, should specified when SSH login mode is "keypair".')
return parser
@classmethod
def build_directive(cls, options):
instances = explode_array(options.instances)
if len(instances) == 0:
print 'error: [instances] should be specified'
return None
return {
'instances': instances,
'login_mode': options.login_mode,
'login_passwd': options.login_passwd,
'login_keypair': options.login_keypair,
}
|
Add login mode to reset-instances
|
Add login mode to reset-instances
|
Python
|
apache-2.0
|
yunify/qingcloud-cli
|
# coding: utf-8
from qingcloud.cli.misc.utils import explode_array
from qingcloud.cli.iaas_client.actions.base import BaseAction
class ResetInstancesAction(BaseAction):
action = 'ResetInstances'
command = 'reset-instances'
usage = '%(prog)s -i "instance_id, ..." [-f <conf_file>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-i', '--instances', dest='instances',
action='store', type=str, default='',
help='the comma separated IDs of instances you want to reset.')
return parser
@classmethod
def build_directive(cls, options):
instances = explode_array(options.instances)
if len(instances) == 0:
print 'error: [instances] should be specified'
return None
return {'instances': instances}
Add login mode to reset-instances
|
# coding: utf-8
from qingcloud.cli.misc.utils import explode_array
from qingcloud.cli.iaas_client.actions.base import BaseAction
class ResetInstancesAction(BaseAction):
action = 'ResetInstances'
command = 'reset-instances'
usage = '%(prog)s -i "instance_id, ..." [-f <conf_file> -m <login_mode> -p <login_passwd> -k <login_keypair>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-i', '--instances', dest='instances',
action='store', type=str, default='',
help='the comma separated IDs of instances you want to reset.')
parser.add_argument('-l', '--login_mode', dest='login_mode',
action='store', type=str, default=None,
help='SSH login mode: keypair or passwd')
parser.add_argument('-p', '--login_passwd', dest='login_passwd',
action='store', type=str, default=None,
help='login_passwd, should specified when SSH login mode is "passwd".')
parser.add_argument('-k', '--login_keypair', dest='login_keypair',
action='store', type=str, default=None,
help='login_keypair, should specified when SSH login mode is "keypair".')
return parser
@classmethod
def build_directive(cls, options):
instances = explode_array(options.instances)
if len(instances) == 0:
print 'error: [instances] should be specified'
return None
return {
'instances': instances,
'login_mode': options.login_mode,
'login_passwd': options.login_passwd,
'login_keypair': options.login_keypair,
}
|
<commit_before># coding: utf-8
from qingcloud.cli.misc.utils import explode_array
from qingcloud.cli.iaas_client.actions.base import BaseAction
class ResetInstancesAction(BaseAction):
action = 'ResetInstances'
command = 'reset-instances'
usage = '%(prog)s -i "instance_id, ..." [-f <conf_file>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-i', '--instances', dest='instances',
action='store', type=str, default='',
help='the comma separated IDs of instances you want to reset.')
return parser
@classmethod
def build_directive(cls, options):
instances = explode_array(options.instances)
if len(instances) == 0:
print 'error: [instances] should be specified'
return None
return {'instances': instances}
<commit_msg>Add login mode to reset-instances<commit_after>
|
# coding: utf-8
from qingcloud.cli.misc.utils import explode_array
from qingcloud.cli.iaas_client.actions.base import BaseAction
class ResetInstancesAction(BaseAction):
action = 'ResetInstances'
command = 'reset-instances'
usage = '%(prog)s -i "instance_id, ..." [-f <conf_file> -m <login_mode> -p <login_passwd> -k <login_keypair>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-i', '--instances', dest='instances',
action='store', type=str, default='',
help='the comma separated IDs of instances you want to reset.')
parser.add_argument('-l', '--login_mode', dest='login_mode',
action='store', type=str, default=None,
help='SSH login mode: keypair or passwd')
parser.add_argument('-p', '--login_passwd', dest='login_passwd',
action='store', type=str, default=None,
help='login_passwd, should specified when SSH login mode is "passwd".')
parser.add_argument('-k', '--login_keypair', dest='login_keypair',
action='store', type=str, default=None,
help='login_keypair, should specified when SSH login mode is "keypair".')
return parser
@classmethod
def build_directive(cls, options):
instances = explode_array(options.instances)
if len(instances) == 0:
print 'error: [instances] should be specified'
return None
return {
'instances': instances,
'login_mode': options.login_mode,
'login_passwd': options.login_passwd,
'login_keypair': options.login_keypair,
}
|
# coding: utf-8
from qingcloud.cli.misc.utils import explode_array
from qingcloud.cli.iaas_client.actions.base import BaseAction
class ResetInstancesAction(BaseAction):
action = 'ResetInstances'
command = 'reset-instances'
usage = '%(prog)s -i "instance_id, ..." [-f <conf_file>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-i', '--instances', dest='instances',
action='store', type=str, default='',
help='the comma separated IDs of instances you want to reset.')
return parser
@classmethod
def build_directive(cls, options):
instances = explode_array(options.instances)
if len(instances) == 0:
print 'error: [instances] should be specified'
return None
return {'instances': instances}
Add login mode to reset-instances# coding: utf-8
from qingcloud.cli.misc.utils import explode_array
from qingcloud.cli.iaas_client.actions.base import BaseAction
class ResetInstancesAction(BaseAction):
action = 'ResetInstances'
command = 'reset-instances'
usage = '%(prog)s -i "instance_id, ..." [-f <conf_file> -m <login_mode> -p <login_passwd> -k <login_keypair>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-i', '--instances', dest='instances',
action='store', type=str, default='',
help='the comma separated IDs of instances you want to reset.')
parser.add_argument('-l', '--login_mode', dest='login_mode',
action='store', type=str, default=None,
help='SSH login mode: keypair or passwd')
parser.add_argument('-p', '--login_passwd', dest='login_passwd',
action='store', type=str, default=None,
help='login_passwd, should specified when SSH login mode is "passwd".')
parser.add_argument('-k', '--login_keypair', dest='login_keypair',
action='store', type=str, default=None,
help='login_keypair, should specified when SSH login mode is "keypair".')
return parser
@classmethod
def build_directive(cls, options):
instances = explode_array(options.instances)
if len(instances) == 0:
print 'error: [instances] should be specified'
return None
return {
'instances': instances,
'login_mode': options.login_mode,
'login_passwd': options.login_passwd,
'login_keypair': options.login_keypair,
}
|
<commit_before># coding: utf-8
from qingcloud.cli.misc.utils import explode_array
from qingcloud.cli.iaas_client.actions.base import BaseAction
class ResetInstancesAction(BaseAction):
action = 'ResetInstances'
command = 'reset-instances'
usage = '%(prog)s -i "instance_id, ..." [-f <conf_file>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-i', '--instances', dest='instances',
action='store', type=str, default='',
help='the comma separated IDs of instances you want to reset.')
return parser
@classmethod
def build_directive(cls, options):
instances = explode_array(options.instances)
if len(instances) == 0:
print 'error: [instances] should be specified'
return None
return {'instances': instances}
<commit_msg>Add login mode to reset-instances<commit_after># coding: utf-8
from qingcloud.cli.misc.utils import explode_array
from qingcloud.cli.iaas_client.actions.base import BaseAction
class ResetInstancesAction(BaseAction):
action = 'ResetInstances'
command = 'reset-instances'
usage = '%(prog)s -i "instance_id, ..." [-f <conf_file> -m <login_mode> -p <login_passwd> -k <login_keypair>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-i', '--instances', dest='instances',
action='store', type=str, default='',
help='the comma separated IDs of instances you want to reset.')
parser.add_argument('-l', '--login_mode', dest='login_mode',
action='store', type=str, default=None,
help='SSH login mode: keypair or passwd')
parser.add_argument('-p', '--login_passwd', dest='login_passwd',
action='store', type=str, default=None,
help='login_passwd, should specified when SSH login mode is "passwd".')
parser.add_argument('-k', '--login_keypair', dest='login_keypair',
action='store', type=str, default=None,
help='login_keypair, should specified when SSH login mode is "keypair".')
return parser
@classmethod
def build_directive(cls, options):
instances = explode_array(options.instances)
if len(instances) == 0:
print 'error: [instances] should be specified'
return None
return {
'instances': instances,
'login_mode': options.login_mode,
'login_passwd': options.login_passwd,
'login_keypair': options.login_keypair,
}
|
270d06c880fe72987b82fe00f234852e8d49eca0
|
icekit/plugins/image_gallery/content_plugins.py
|
icekit/plugins/image_gallery/content_plugins.py
|
"""
Definition of the plugin.
"""
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from . import models
@plugin_pool.register
class ImageGalleryPlugin(ContentPlugin):
model = models.ImageGalleryShowItem
category = _('Assets')
render_template = 'icekit/plugins/image_gallery/default.html'
raw_id_fields = ['slide_show', ]
class FrontendMedia:
js = (
'plugins/image_gallery/init.js',
)
|
"""
Definition of the plugin.
"""
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from . import models
@plugin_pool.register
class ImageGalleryPlugin(ContentPlugin):
model = models.ImageGalleryShowItem
category = _('Assets')
render_template = 'icekit/plugins/image_gallery/default.html'
raw_id_fields = ['slide_show', ]
|
Remove invalid reference to unnecessary JS for image gallery plugin
|
Remove invalid reference to unnecessary JS for image gallery plugin
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
"""
Definition of the plugin.
"""
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from . import models
@plugin_pool.register
class ImageGalleryPlugin(ContentPlugin):
model = models.ImageGalleryShowItem
category = _('Assets')
render_template = 'icekit/plugins/image_gallery/default.html'
raw_id_fields = ['slide_show', ]
class FrontendMedia:
js = (
'plugins/image_gallery/init.js',
)
Remove invalid reference to unnecessary JS for image gallery plugin
|
"""
Definition of the plugin.
"""
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from . import models
@plugin_pool.register
class ImageGalleryPlugin(ContentPlugin):
model = models.ImageGalleryShowItem
category = _('Assets')
render_template = 'icekit/plugins/image_gallery/default.html'
raw_id_fields = ['slide_show', ]
|
<commit_before>"""
Definition of the plugin.
"""
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from . import models
@plugin_pool.register
class ImageGalleryPlugin(ContentPlugin):
model = models.ImageGalleryShowItem
category = _('Assets')
render_template = 'icekit/plugins/image_gallery/default.html'
raw_id_fields = ['slide_show', ]
class FrontendMedia:
js = (
'plugins/image_gallery/init.js',
)
<commit_msg>Remove invalid reference to unnecessary JS for image gallery plugin<commit_after>
|
"""
Definition of the plugin.
"""
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from . import models
@plugin_pool.register
class ImageGalleryPlugin(ContentPlugin):
model = models.ImageGalleryShowItem
category = _('Assets')
render_template = 'icekit/plugins/image_gallery/default.html'
raw_id_fields = ['slide_show', ]
|
"""
Definition of the plugin.
"""
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from . import models
@plugin_pool.register
class ImageGalleryPlugin(ContentPlugin):
model = models.ImageGalleryShowItem
category = _('Assets')
render_template = 'icekit/plugins/image_gallery/default.html'
raw_id_fields = ['slide_show', ]
class FrontendMedia:
js = (
'plugins/image_gallery/init.js',
)
Remove invalid reference to unnecessary JS for image gallery plugin"""
Definition of the plugin.
"""
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from . import models
@plugin_pool.register
class ImageGalleryPlugin(ContentPlugin):
model = models.ImageGalleryShowItem
category = _('Assets')
render_template = 'icekit/plugins/image_gallery/default.html'
raw_id_fields = ['slide_show', ]
|
<commit_before>"""
Definition of the plugin.
"""
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from . import models
@plugin_pool.register
class ImageGalleryPlugin(ContentPlugin):
model = models.ImageGalleryShowItem
category = _('Assets')
render_template = 'icekit/plugins/image_gallery/default.html'
raw_id_fields = ['slide_show', ]
class FrontendMedia:
js = (
'plugins/image_gallery/init.js',
)
<commit_msg>Remove invalid reference to unnecessary JS for image gallery plugin<commit_after>"""
Definition of the plugin.
"""
from django.utils.translation import ugettext_lazy as _
from fluent_contents.extensions import ContentPlugin, plugin_pool
from . import models
@plugin_pool.register
class ImageGalleryPlugin(ContentPlugin):
model = models.ImageGalleryShowItem
category = _('Assets')
render_template = 'icekit/plugins/image_gallery/default.html'
raw_id_fields = ['slide_show', ]
|
6e02ea0b94b237dbd8da63b77806530a904deef9
|
alignak_backend_import/__init__.py
|
alignak_backend_import/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak backend import
This module contains utility tools to import Nagios-like flat files configuration into
an Alignak REST backend.
"""
# Application version and manifest
VERSION = (0, 8,0 )
__application__ = u"Alignak backend import"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Frédéric Mohier"
__author_email__ = u"frederic.mohier@gmail.com"
__copyright__ = u"(c) 2015-2016, %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak backend import tools"
__releasenotes__ = u"""Alignak Backend import tools"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend-import"
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak backend import
This module contains utility tools to import Nagios-like flat files configuration into
an Alignak REST backend.
"""
# Application version and manifest
VERSION = (0, 8, 0, 1)
__application__ = u"Alignak backend import"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Frédéric Mohier"
__author_email__ = u"frederic.mohier@gmail.com"
__copyright__ = u"(c) 2015-2016, %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak backend import tools"
__releasenotes__ = u"""Alignak Backend import tools"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend-import"
|
Fix pep8 Set version as 0.8.0.1
|
Fix pep8
Set version as 0.8.0.1
|
Python
|
agpl-3.0
|
Alignak-monitoring-contrib/alignak-backend-import,Alignak-monitoring-contrib/alignak-backend-import
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak backend import
This module contains utility tools to import Nagios-like flat files configuration into
an Alignak REST backend.
"""
# Application version and manifest
VERSION = (0, 8,0 )
__application__ = u"Alignak backend import"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Frédéric Mohier"
__author_email__ = u"frederic.mohier@gmail.com"
__copyright__ = u"(c) 2015-2016, %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak backend import tools"
__releasenotes__ = u"""Alignak Backend import tools"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend-import"
Fix pep8
Set version as 0.8.0.1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak backend import
This module contains utility tools to import Nagios-like flat files configuration into
an Alignak REST backend.
"""
# Application version and manifest
VERSION = (0, 8, 0, 1)
__application__ = u"Alignak backend import"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Frédéric Mohier"
__author_email__ = u"frederic.mohier@gmail.com"
__copyright__ = u"(c) 2015-2016, %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak backend import tools"
__releasenotes__ = u"""Alignak Backend import tools"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend-import"
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak backend import
This module contains utility tools to import Nagios-like flat files configuration into
an Alignak REST backend.
"""
# Application version and manifest
VERSION = (0, 8,0 )
__application__ = u"Alignak backend import"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Frédéric Mohier"
__author_email__ = u"frederic.mohier@gmail.com"
__copyright__ = u"(c) 2015-2016, %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak backend import tools"
__releasenotes__ = u"""Alignak Backend import tools"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend-import"
<commit_msg>Fix pep8
Set version as 0.8.0.1<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak backend import
This module contains utility tools to import Nagios-like flat files configuration into
an Alignak REST backend.
"""
# Application version and manifest
VERSION = (0, 8, 0, 1)
__application__ = u"Alignak backend import"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Frédéric Mohier"
__author_email__ = u"frederic.mohier@gmail.com"
__copyright__ = u"(c) 2015-2016, %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak backend import tools"
__releasenotes__ = u"""Alignak Backend import tools"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend-import"
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak backend import
This module contains utility tools to import Nagios-like flat files configuration into
an Alignak REST backend.
"""
# Application version and manifest
VERSION = (0, 8,0 )
__application__ = u"Alignak backend import"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Frédéric Mohier"
__author_email__ = u"frederic.mohier@gmail.com"
__copyright__ = u"(c) 2015-2016, %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak backend import tools"
__releasenotes__ = u"""Alignak Backend import tools"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend-import"
Fix pep8
Set version as 0.8.0.1#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak backend import
This module contains utility tools to import Nagios-like flat files configuration into
an Alignak REST backend.
"""
# Application version and manifest
VERSION = (0, 8, 0, 1)
__application__ = u"Alignak backend import"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Frédéric Mohier"
__author_email__ = u"frederic.mohier@gmail.com"
__copyright__ = u"(c) 2015-2016, %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak backend import tools"
__releasenotes__ = u"""Alignak Backend import tools"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend-import"
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak backend import
This module contains utility tools to import Nagios-like flat files configuration into
an Alignak REST backend.
"""
# Application version and manifest
VERSION = (0, 8,0 )
__application__ = u"Alignak backend import"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Frédéric Mohier"
__author_email__ = u"frederic.mohier@gmail.com"
__copyright__ = u"(c) 2015-2016, %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak backend import tools"
__releasenotes__ = u"""Alignak Backend import tools"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend-import"
<commit_msg>Fix pep8
Set version as 0.8.0.1<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak backend import
This module contains utility tools to import Nagios-like flat files configuration into
an Alignak REST backend.
"""
# Application version and manifest
VERSION = (0, 8, 0, 1)
__application__ = u"Alignak backend import"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Frédéric Mohier"
__author_email__ = u"frederic.mohier@gmail.com"
__copyright__ = u"(c) 2015-2016, %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak backend import tools"
__releasenotes__ = u"""Alignak Backend import tools"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend-import"
|
8f4d0247b56ae157e8486c37e38992015e55ac3e
|
skimage/io/_plugins/matplotlib_plugin.py
|
skimage/io/_plugins/matplotlib_plugin.py
|
import matplotlib.pyplot as plt
def imshow(*args, **kwargs):
if plt.gca().has_data():
plt.figure()
kwargs.setdefault('interpolation', 'nearest')
kwargs.setdefault('cmap', 'gray')
plt.imshow(*args, **kwargs)
imread = plt.imread
show = plt.show
def _app_show():
show()
|
import matplotlib.pyplot as plt
def imshow(im, *args, **kwargs):
"""Show the input image and return the current axes.
Parameters
----------
im : array, shape (M, N[, 3])
The image to display.
*args, **kwargs : positional and keyword arguments
These are passed directly to `matplotlib.pyplot.imshow`.
Returns
-------
ax : `matplotlib.pyplot.Axes`
The axes showing the image.
"""
if plt.gca().has_data():
plt.figure()
kwargs.setdefault('interpolation', 'nearest')
kwargs.setdefault('cmap', 'gray')
return plt.imshow(im, *args, **kwargs)
imread = plt.imread
show = plt.show
def _app_show():
show()
|
Add docstring to matplotlib imshow plugin
|
Add docstring to matplotlib imshow plugin
The image is now named as an argument, and the axes are returned, in
keeping with matplotlib convention.
|
Python
|
bsd-3-clause
|
emon10005/scikit-image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,Midafi/scikit-image,bennlich/scikit-image,chriscrosscutler/scikit-image,dpshelio/scikit-image,emon10005/scikit-image,ofgulban/scikit-image,newville/scikit-image,WarrenWeckesser/scikits-image,Britefury/scikit-image,juliusbierk/scikit-image,GaZ3ll3/scikit-image,rjeli/scikit-image,blink1073/scikit-image,vighneshbirodkar/scikit-image,vighneshbirodkar/scikit-image,michaelaye/scikit-image,bsipocz/scikit-image,youprofit/scikit-image,rjeli/scikit-image,ajaybhat/scikit-image,pratapvardhan/scikit-image,Hiyorimi/scikit-image,michaelpacer/scikit-image,Midafi/scikit-image,pratapvardhan/scikit-image,jwiggins/scikit-image,michaelaye/scikit-image,bennlich/scikit-image,ofgulban/scikit-image,warmspringwinds/scikit-image,blink1073/scikit-image,oew1v07/scikit-image,keflavich/scikit-image,robintw/scikit-image,ClinicalGraphics/scikit-image,chriscrosscutler/scikit-image,paalge/scikit-image,warmspringwinds/scikit-image,paalge/scikit-image,Hiyorimi/scikit-image,rjeli/scikit-image,oew1v07/scikit-image,newville/scikit-image,robintw/scikit-image,youprofit/scikit-image,dpshelio/scikit-image,keflavich/scikit-image,jwiggins/scikit-image,bsipocz/scikit-image,michaelpacer/scikit-image,paalge/scikit-image,GaZ3ll3/scikit-image,ajaybhat/scikit-image,WarrenWeckesser/scikits-image,juliusbierk/scikit-image,ClinicalGraphics/scikit-image,Britefury/scikit-image
|
import matplotlib.pyplot as plt
def imshow(*args, **kwargs):
if plt.gca().has_data():
plt.figure()
kwargs.setdefault('interpolation', 'nearest')
kwargs.setdefault('cmap', 'gray')
plt.imshow(*args, **kwargs)
imread = plt.imread
show = plt.show
def _app_show():
show()
Add docstring to matplotlib imshow plugin
The image is now named as an argument, and the axes are returned, in
keeping with matplotlib convention.
|
import matplotlib.pyplot as plt
def imshow(im, *args, **kwargs):
"""Show the input image and return the current axes.
Parameters
----------
im : array, shape (M, N[, 3])
The image to display.
*args, **kwargs : positional and keyword arguments
These are passed directly to `matplotlib.pyplot.imshow`.
Returns
-------
ax : `matplotlib.pyplot.Axes`
The axes showing the image.
"""
if plt.gca().has_data():
plt.figure()
kwargs.setdefault('interpolation', 'nearest')
kwargs.setdefault('cmap', 'gray')
return plt.imshow(im, *args, **kwargs)
imread = plt.imread
show = plt.show
def _app_show():
show()
|
<commit_before>import matplotlib.pyplot as plt
def imshow(*args, **kwargs):
if plt.gca().has_data():
plt.figure()
kwargs.setdefault('interpolation', 'nearest')
kwargs.setdefault('cmap', 'gray')
plt.imshow(*args, **kwargs)
imread = plt.imread
show = plt.show
def _app_show():
show()
<commit_msg>Add docstring to matplotlib imshow plugin
The image is now named as an argument, and the axes are returned, in
keeping with matplotlib convention.<commit_after>
|
import matplotlib.pyplot as plt
def imshow(im, *args, **kwargs):
"""Show the input image and return the current axes.
Parameters
----------
im : array, shape (M, N[, 3])
The image to display.
*args, **kwargs : positional and keyword arguments
These are passed directly to `matplotlib.pyplot.imshow`.
Returns
-------
ax : `matplotlib.pyplot.Axes`
The axes showing the image.
"""
if plt.gca().has_data():
plt.figure()
kwargs.setdefault('interpolation', 'nearest')
kwargs.setdefault('cmap', 'gray')
return plt.imshow(im, *args, **kwargs)
imread = plt.imread
show = plt.show
def _app_show():
show()
|
import matplotlib.pyplot as plt
def imshow(*args, **kwargs):
if plt.gca().has_data():
plt.figure()
kwargs.setdefault('interpolation', 'nearest')
kwargs.setdefault('cmap', 'gray')
plt.imshow(*args, **kwargs)
imread = plt.imread
show = plt.show
def _app_show():
show()
Add docstring to matplotlib imshow plugin
The image is now named as an argument, and the axes are returned, in
keeping with matplotlib convention.import matplotlib.pyplot as plt
def imshow(im, *args, **kwargs):
"""Show the input image and return the current axes.
Parameters
----------
im : array, shape (M, N[, 3])
The image to display.
*args, **kwargs : positional and keyword arguments
These are passed directly to `matplotlib.pyplot.imshow`.
Returns
-------
ax : `matplotlib.pyplot.Axes`
The axes showing the image.
"""
if plt.gca().has_data():
plt.figure()
kwargs.setdefault('interpolation', 'nearest')
kwargs.setdefault('cmap', 'gray')
return plt.imshow(im, *args, **kwargs)
imread = plt.imread
show = plt.show
def _app_show():
show()
|
<commit_before>import matplotlib.pyplot as plt
def imshow(*args, **kwargs):
if plt.gca().has_data():
plt.figure()
kwargs.setdefault('interpolation', 'nearest')
kwargs.setdefault('cmap', 'gray')
plt.imshow(*args, **kwargs)
imread = plt.imread
show = plt.show
def _app_show():
show()
<commit_msg>Add docstring to matplotlib imshow plugin
The image is now named as an argument, and the axes are returned, in
keeping with matplotlib convention.<commit_after>import matplotlib.pyplot as plt
def imshow(im, *args, **kwargs):
"""Show the input image and return the current axes.
Parameters
----------
im : array, shape (M, N[, 3])
The image to display.
*args, **kwargs : positional and keyword arguments
These are passed directly to `matplotlib.pyplot.imshow`.
Returns
-------
ax : `matplotlib.pyplot.Axes`
The axes showing the image.
"""
if plt.gca().has_data():
plt.figure()
kwargs.setdefault('interpolation', 'nearest')
kwargs.setdefault('cmap', 'gray')
return plt.imshow(im, *args, **kwargs)
imread = plt.imread
show = plt.show
def _app_show():
show()
|
1c3d4488566576e3181f7acbf902f0adab3876dd
|
api/spawner/templates/constants.py
|
api/spawner/templates/constants.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.conf import settings
from polyaxon_schemas.polyaxonfile import constants
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(container_job_name, pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
Update naming for spawner jobs
|
Update naming for spawner jobs
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.conf import settings
from polyaxon_schemas.polyaxonfile import constants
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(container_job_name, pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
Update naming for spawner jobs
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.conf import settings
from polyaxon_schemas.polyaxonfile import constants
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(container_job_name, pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
<commit_msg>Update naming for spawner jobs<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.conf import settings
from polyaxon_schemas.polyaxonfile import constants
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(container_job_name, pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
Update naming for spawner jobs# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.conf import settings
from polyaxon_schemas.polyaxonfile import constants
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(container_job_name, pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
<commit_msg>Update naming for spawner jobs<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
3607309193c5d8b2b5ce0fd98d976b6e6aa49644
|
test/test_client.py
|
test/test_client.py
|
import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
|
import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization, VisualizationLocal
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_local_mode(self, lgn):
lgn.local = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, VisualizationLocal)
assert hasattr(viz, 'id')
|
Add test for local visualization
|
Add test for local visualization
|
Python
|
mit
|
garretstuber/lightning-python,garretstuber/lightning-python,peterkshultz/lightning-python,lightning-viz/lightning-python,garretstuber/lightning-python,lightning-viz/lightning-python,peterkshultz/lightning-python,peterkshultz/lightning-python
|
import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
Add test for local visualization
|
import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization, VisualizationLocal
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_local_mode(self, lgn):
lgn.local = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, VisualizationLocal)
assert hasattr(viz, 'id')
|
<commit_before>import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
<commit_msg>Add test for local visualization<commit_after>
|
import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization, VisualizationLocal
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_local_mode(self, lgn):
lgn.local = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, VisualizationLocal)
assert hasattr(viz, 'id')
|
import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
Add test for local visualizationimport pytest
from numpy import random, ceil
from lightning import Lightning, Visualization, VisualizationLocal
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_local_mode(self, lgn):
lgn.local = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, VisualizationLocal)
assert hasattr(viz, 'id')
|
<commit_before>import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
<commit_msg>Add test for local visualization<commit_after>import pytest
from numpy import random, ceil
from lightning import Lightning, Visualization, VisualizationLocal
class TestLightningAPIClient(object):
@pytest.fixture(scope="module")
def lgn(self, host):
lgn = Lightning(host)
lgn.create_session("test-session")
return lgn
def test_create_generic(self, lgn):
series = random.randn(5, 100)
viz = lgn.plot(data={"series": series}, type='line')
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_ipython_support(self, lgn):
lgn.ipython = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, Visualization)
assert hasattr(viz, 'id')
def test_local_mode(self, lgn):
lgn.local = True
x = random.randn(100)
viz = lgn.line(x)
assert isinstance(viz, VisualizationLocal)
assert hasattr(viz, 'id')
|
427931c5c8847d01e4ce563a9c605a78eceb39f3
|
amplpy/amplpython/__init__.py
|
amplpy/amplpython/__init__.py
|
# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
libbase = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib')
lib32 = os.path.join(libbase, 'intel32')
lib64 = os.path.join(libbase, 'amd64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
|
# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system().startswith(('Windows', 'MSYS', 'CYGWIN', 'MINGW')):
libbase = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib')
lib32 = os.path.join(libbase, 'intel32')
lib64 = os.path.join(libbase, 'amd64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
|
Add basic support for MSYS, CYGWIN, and MINGW
|
Add basic support for MSYS, CYGWIN, and MINGW
|
Python
|
bsd-3-clause
|
ampl/amplpy,ampl/amplpy,ampl/amplpy
|
# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
libbase = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib')
lib32 = os.path.join(libbase, 'intel32')
lib64 = os.path.join(libbase, 'amd64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
Add basic support for MSYS, CYGWIN, and MINGW
|
# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system().startswith(('Windows', 'MSYS', 'CYGWIN', 'MINGW')):
libbase = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib')
lib32 = os.path.join(libbase, 'intel32')
lib64 = os.path.join(libbase, 'amd64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
|
<commit_before># -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
libbase = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib')
lib32 = os.path.join(libbase, 'intel32')
lib64 = os.path.join(libbase, 'amd64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
<commit_msg>Add basic support for MSYS, CYGWIN, and MINGW<commit_after>
|
# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system().startswith(('Windows', 'MSYS', 'CYGWIN', 'MINGW')):
libbase = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib')
lib32 = os.path.join(libbase, 'intel32')
lib64 = os.path.join(libbase, 'amd64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
|
# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
libbase = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib')
lib32 = os.path.join(libbase, 'intel32')
lib64 = os.path.join(libbase, 'amd64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
Add basic support for MSYS, CYGWIN, and MINGW# -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system().startswith(('Windows', 'MSYS', 'CYGWIN', 'MINGW')):
libbase = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib')
lib32 = os.path.join(libbase, 'intel32')
lib64 = os.path.join(libbase, 'amd64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
|
<commit_before># -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system() == 'Windows':
libbase = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib')
lib32 = os.path.join(libbase, 'intel32')
lib64 = os.path.join(libbase, 'amd64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
<commit_msg>Add basic support for MSYS, CYGWIN, and MINGW<commit_after># -*- coding: utf-8 -*-
import os
import sys
import ctypes
import platform
if platform.system().startswith(('Windows', 'MSYS', 'CYGWIN', 'MINGW')):
libbase = os.path.join(os.path.dirname(__file__), 'cppinterface', 'lib')
lib32 = os.path.join(libbase, 'intel32')
lib64 = os.path.join(libbase, 'amd64')
from glob import glob
try:
if ctypes.sizeof(ctypes.c_voidp) == 4:
dllfile = glob(lib32 + '/*.dll')[0]
else:
dllfile = glob(lib64 + '/*.dll')[0]
ctypes.CDLL(dllfile)
except:
pass
sys.path.append(os.path.join(os.path.dirname(__file__), 'cppinterface'))
from amplpython import *
from amplpython import _READTABLE, _WRITETABLE
|
0673df239d14edb8d65c17eaa8291ac26fd0b976
|
test_skewstudent.py
|
test_skewstudent.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Testing suite for ARG class.
"""
from __future__ import print_function, division
import unittest as ut
import numpy as np
from skewstudent import SkewStudent
__author__ = "Stanislav Khrapov"
__email__ = "khrapovs@gmail.com"
class ARGTestCase(ut.TestCase):
"""Test SkewStudent distribution class."""
def test_init(self):
"""Test __init__."""
skewt = SkewStudent()
self.assertIsInstance(skewt.nup, float)
self.assertIsInstance(skewt.lam, float)
nup, lam = 5., -.2
skewt = SkewStudent(nup=nup, lam=lam)
self.assertEqual(skewt.nup, nup)
self.assertEqual(skewt.lam, lam)
def test_pdf(self):
"""Test pdf method."""
skewt = SkewStudent()
num = 50
arg = np.linspace(-1, 1, 50)
pdf = skewt.pdf(arg)
self.assertEqual(pdf.shape[0], num)
self.assertIsInstance(skewt.pdf(0), float)
if __name__ == '__main__':
ut.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Testing suite for SkewStudent class.
"""
from __future__ import print_function, division
import unittest as ut
import numpy as np
from skewstudent import SkewStudent
__author__ = "Stanislav Khrapov"
__email__ = "khrapovs@gmail.com"
class ARGTestCase(ut.TestCase):
"""Test SkewStudent distribution class."""
def test_init(self):
"""Test __init__."""
skewt = SkewStudent()
self.assertIsInstance(skewt.nup, float)
self.assertIsInstance(skewt.lam, float)
nup, lam = 5., -.2
skewt = SkewStudent(nup=nup, lam=lam)
self.assertEqual(skewt.nup, nup)
self.assertEqual(skewt.lam, lam)
def test_pdf(self):
"""Test pdf method."""
skewt = SkewStudent()
num = 50
arg = np.linspace(-1, 1, 50)
pdf = skewt.pdf(arg)
self.assertEqual(pdf.shape[0], num)
self.assertIsInstance(skewt.pdf(0), float)
if __name__ == '__main__':
ut.main()
|
Fix title in the test
|
Fix title in the test
|
Python
|
mit
|
khrapovs/skewstudent
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Testing suite for ARG class.
"""
from __future__ import print_function, division
import unittest as ut
import numpy as np
from skewstudent import SkewStudent
__author__ = "Stanislav Khrapov"
__email__ = "khrapovs@gmail.com"
class ARGTestCase(ut.TestCase):
"""Test SkewStudent distribution class."""
def test_init(self):
"""Test __init__."""
skewt = SkewStudent()
self.assertIsInstance(skewt.nup, float)
self.assertIsInstance(skewt.lam, float)
nup, lam = 5., -.2
skewt = SkewStudent(nup=nup, lam=lam)
self.assertEqual(skewt.nup, nup)
self.assertEqual(skewt.lam, lam)
def test_pdf(self):
"""Test pdf method."""
skewt = SkewStudent()
num = 50
arg = np.linspace(-1, 1, 50)
pdf = skewt.pdf(arg)
self.assertEqual(pdf.shape[0], num)
self.assertIsInstance(skewt.pdf(0), float)
if __name__ == '__main__':
ut.main()
Fix title in the test
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Testing suite for SkewStudent class.
"""
from __future__ import print_function, division
import unittest as ut
import numpy as np
from skewstudent import SkewStudent
__author__ = "Stanislav Khrapov"
__email__ = "khrapovs@gmail.com"
class ARGTestCase(ut.TestCase):
"""Test SkewStudent distribution class."""
def test_init(self):
"""Test __init__."""
skewt = SkewStudent()
self.assertIsInstance(skewt.nup, float)
self.assertIsInstance(skewt.lam, float)
nup, lam = 5., -.2
skewt = SkewStudent(nup=nup, lam=lam)
self.assertEqual(skewt.nup, nup)
self.assertEqual(skewt.lam, lam)
def test_pdf(self):
"""Test pdf method."""
skewt = SkewStudent()
num = 50
arg = np.linspace(-1, 1, 50)
pdf = skewt.pdf(arg)
self.assertEqual(pdf.shape[0], num)
self.assertIsInstance(skewt.pdf(0), float)
if __name__ == '__main__':
ut.main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Testing suite for ARG class.
"""
from __future__ import print_function, division
import unittest as ut
import numpy as np
from skewstudent import SkewStudent
__author__ = "Stanislav Khrapov"
__email__ = "khrapovs@gmail.com"
class ARGTestCase(ut.TestCase):
"""Test SkewStudent distribution class."""
def test_init(self):
"""Test __init__."""
skewt = SkewStudent()
self.assertIsInstance(skewt.nup, float)
self.assertIsInstance(skewt.lam, float)
nup, lam = 5., -.2
skewt = SkewStudent(nup=nup, lam=lam)
self.assertEqual(skewt.nup, nup)
self.assertEqual(skewt.lam, lam)
def test_pdf(self):
"""Test pdf method."""
skewt = SkewStudent()
num = 50
arg = np.linspace(-1, 1, 50)
pdf = skewt.pdf(arg)
self.assertEqual(pdf.shape[0], num)
self.assertIsInstance(skewt.pdf(0), float)
if __name__ == '__main__':
ut.main()
<commit_msg>Fix title in the test<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Testing suite for SkewStudent class.
"""
from __future__ import print_function, division
import unittest as ut
import numpy as np
from skewstudent import SkewStudent
__author__ = "Stanislav Khrapov"
__email__ = "khrapovs@gmail.com"
class ARGTestCase(ut.TestCase):
"""Test SkewStudent distribution class."""
def test_init(self):
"""Test __init__."""
skewt = SkewStudent()
self.assertIsInstance(skewt.nup, float)
self.assertIsInstance(skewt.lam, float)
nup, lam = 5., -.2
skewt = SkewStudent(nup=nup, lam=lam)
self.assertEqual(skewt.nup, nup)
self.assertEqual(skewt.lam, lam)
def test_pdf(self):
"""Test pdf method."""
skewt = SkewStudent()
num = 50
arg = np.linspace(-1, 1, 50)
pdf = skewt.pdf(arg)
self.assertEqual(pdf.shape[0], num)
self.assertIsInstance(skewt.pdf(0), float)
if __name__ == '__main__':
ut.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Testing suite for ARG class.
"""
from __future__ import print_function, division
import unittest as ut
import numpy as np
from skewstudent import SkewStudent
__author__ = "Stanislav Khrapov"
__email__ = "khrapovs@gmail.com"
class ARGTestCase(ut.TestCase):
"""Test SkewStudent distribution class."""
def test_init(self):
"""Test __init__."""
skewt = SkewStudent()
self.assertIsInstance(skewt.nup, float)
self.assertIsInstance(skewt.lam, float)
nup, lam = 5., -.2
skewt = SkewStudent(nup=nup, lam=lam)
self.assertEqual(skewt.nup, nup)
self.assertEqual(skewt.lam, lam)
def test_pdf(self):
"""Test pdf method."""
skewt = SkewStudent()
num = 50
arg = np.linspace(-1, 1, 50)
pdf = skewt.pdf(arg)
self.assertEqual(pdf.shape[0], num)
self.assertIsInstance(skewt.pdf(0), float)
if __name__ == '__main__':
ut.main()
Fix title in the test#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Testing suite for SkewStudent class.
"""
from __future__ import print_function, division
import unittest as ut
import numpy as np
from skewstudent import SkewStudent
__author__ = "Stanislav Khrapov"
__email__ = "khrapovs@gmail.com"
class ARGTestCase(ut.TestCase):
"""Test SkewStudent distribution class."""
def test_init(self):
"""Test __init__."""
skewt = SkewStudent()
self.assertIsInstance(skewt.nup, float)
self.assertIsInstance(skewt.lam, float)
nup, lam = 5., -.2
skewt = SkewStudent(nup=nup, lam=lam)
self.assertEqual(skewt.nup, nup)
self.assertEqual(skewt.lam, lam)
def test_pdf(self):
"""Test pdf method."""
skewt = SkewStudent()
num = 50
arg = np.linspace(-1, 1, 50)
pdf = skewt.pdf(arg)
self.assertEqual(pdf.shape[0], num)
self.assertIsInstance(skewt.pdf(0), float)
if __name__ == '__main__':
ut.main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Testing suite for ARG class.
"""
from __future__ import print_function, division
import unittest as ut
import numpy as np
from skewstudent import SkewStudent
__author__ = "Stanislav Khrapov"
__email__ = "khrapovs@gmail.com"
class ARGTestCase(ut.TestCase):
"""Test SkewStudent distribution class."""
def test_init(self):
"""Test __init__."""
skewt = SkewStudent()
self.assertIsInstance(skewt.nup, float)
self.assertIsInstance(skewt.lam, float)
nup, lam = 5., -.2
skewt = SkewStudent(nup=nup, lam=lam)
self.assertEqual(skewt.nup, nup)
self.assertEqual(skewt.lam, lam)
def test_pdf(self):
"""Test pdf method."""
skewt = SkewStudent()
num = 50
arg = np.linspace(-1, 1, 50)
pdf = skewt.pdf(arg)
self.assertEqual(pdf.shape[0], num)
self.assertIsInstance(skewt.pdf(0), float)
if __name__ == '__main__':
ut.main()
<commit_msg>Fix title in the test<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Testing suite for SkewStudent class.
"""
from __future__ import print_function, division
import unittest as ut
import numpy as np
from skewstudent import SkewStudent
__author__ = "Stanislav Khrapov"
__email__ = "khrapovs@gmail.com"
class ARGTestCase(ut.TestCase):
"""Test SkewStudent distribution class."""
def test_init(self):
"""Test __init__."""
skewt = SkewStudent()
self.assertIsInstance(skewt.nup, float)
self.assertIsInstance(skewt.lam, float)
nup, lam = 5., -.2
skewt = SkewStudent(nup=nup, lam=lam)
self.assertEqual(skewt.nup, nup)
self.assertEqual(skewt.lam, lam)
def test_pdf(self):
"""Test pdf method."""
skewt = SkewStudent()
num = 50
arg = np.linspace(-1, 1, 50)
pdf = skewt.pdf(arg)
self.assertEqual(pdf.shape[0], num)
self.assertIsInstance(skewt.pdf(0), float)
if __name__ == '__main__':
ut.main()
|
3d5de4b69be9d99fec4a8ffb46338f0684ffac26
|
api/base/waffle_decorators.py
|
api/base/waffle_decorators.py
|
import waffle
from rest_framework.exceptions import NotFound
def require_flag(flag_name):
"""
Decorator to check whether flag is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_flag(*args,**kwargs):
if waffle.flag_is_active(args[0].request, flag_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_flag
return wrapper
|
import waffle
from rest_framework.exceptions import NotFound
def require_flag(flag_name):
"""
Decorator to check whether waffle flag is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_flag(*args,**kwargs):
if waffle.flag_is_active(args[0].request, flag_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_flag
return wrapper
def require_switch(switch_name):
"""
Decorator to check whether waffle switch is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_switch(*args,**kwargs):
if waffle.switch_is_active(switch_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_switch
return wrapper
def require_sample(sample_name):
"""
Decorator to check whether waffle sample is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_sample(*args,**kwargs):
if waffle.sample_is_active(sample_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_sample
return wrapper
|
Add switch and sample decorators.
|
Add switch and sample decorators.
|
Python
|
apache-2.0
|
brianjgeiger/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,laurenrevere/osf.io,adlius/osf.io,mfraezz/osf.io,binoculars/osf.io,pattisdr/osf.io,binoculars/osf.io,brianjgeiger/osf.io,felliott/osf.io,laurenrevere/osf.io,aaxelb/osf.io,cslzchen/osf.io,aaxelb/osf.io,caseyrollins/osf.io,pattisdr/osf.io,erinspace/osf.io,mfraezz/osf.io,sloria/osf.io,baylee-d/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,felliott/osf.io,chennan47/osf.io,chennan47/osf.io,pattisdr/osf.io,saradbowman/osf.io,aaxelb/osf.io,binoculars/osf.io,adlius/osf.io,icereval/osf.io,erinspace/osf.io,mattclark/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,icereval/osf.io,erinspace/osf.io,leb2dg/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,mfraezz/osf.io,baylee-d/osf.io,adlius/osf.io,leb2dg/osf.io,cslzchen/osf.io,saradbowman/osf.io,sloria/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,felliott/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,mattclark/osf.io,mattclark/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,felliott/osf.io,brianjgeiger/osf.io,adlius/osf.io
|
import waffle
from rest_framework.exceptions import NotFound
def require_flag(flag_name):
"""
Decorator to check whether flag is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_flag(*args,**kwargs):
if waffle.flag_is_active(args[0].request, flag_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_flag
return wrapper
Add switch and sample decorators.
|
import waffle
from rest_framework.exceptions import NotFound
def require_flag(flag_name):
"""
Decorator to check whether waffle flag is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_flag(*args,**kwargs):
if waffle.flag_is_active(args[0].request, flag_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_flag
return wrapper
def require_switch(switch_name):
"""
Decorator to check whether waffle switch is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_switch(*args,**kwargs):
if waffle.switch_is_active(switch_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_switch
return wrapper
def require_sample(sample_name):
"""
Decorator to check whether waffle sample is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_sample(*args,**kwargs):
if waffle.sample_is_active(sample_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_sample
return wrapper
|
<commit_before>import waffle
from rest_framework.exceptions import NotFound
def require_flag(flag_name):
"""
Decorator to check whether flag is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_flag(*args,**kwargs):
if waffle.flag_is_active(args[0].request, flag_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_flag
return wrapper
<commit_msg>Add switch and sample decorators.<commit_after>
|
import waffle
from rest_framework.exceptions import NotFound
def require_flag(flag_name):
"""
Decorator to check whether waffle flag is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_flag(*args,**kwargs):
if waffle.flag_is_active(args[0].request, flag_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_flag
return wrapper
def require_switch(switch_name):
"""
Decorator to check whether waffle switch is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_switch(*args,**kwargs):
if waffle.switch_is_active(switch_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_switch
return wrapper
def require_sample(sample_name):
"""
Decorator to check whether waffle sample is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_sample(*args,**kwargs):
if waffle.sample_is_active(sample_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_sample
return wrapper
|
import waffle
from rest_framework.exceptions import NotFound
def require_flag(flag_name):
"""
Decorator to check whether flag is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_flag(*args,**kwargs):
if waffle.flag_is_active(args[0].request, flag_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_flag
return wrapper
Add switch and sample decorators.import waffle
from rest_framework.exceptions import NotFound
def require_flag(flag_name):
"""
Decorator to check whether waffle flag is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_flag(*args,**kwargs):
if waffle.flag_is_active(args[0].request, flag_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_flag
return wrapper
def require_switch(switch_name):
"""
Decorator to check whether waffle switch is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_switch(*args,**kwargs):
if waffle.switch_is_active(switch_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_switch
return wrapper
def require_sample(sample_name):
"""
Decorator to check whether waffle sample is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_sample(*args,**kwargs):
if waffle.sample_is_active(sample_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_sample
return wrapper
|
<commit_before>import waffle
from rest_framework.exceptions import NotFound
def require_flag(flag_name):
"""
Decorator to check whether flag is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_flag(*args,**kwargs):
if waffle.flag_is_active(args[0].request, flag_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_flag
return wrapper
<commit_msg>Add switch and sample decorators.<commit_after>import waffle
from rest_framework.exceptions import NotFound
def require_flag(flag_name):
"""
Decorator to check whether waffle flag is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_flag(*args,**kwargs):
if waffle.flag_is_active(args[0].request, flag_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_flag
return wrapper
def require_switch(switch_name):
"""
Decorator to check whether waffle switch is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_switch(*args,**kwargs):
if waffle.switch_is_active(switch_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_switch
return wrapper
def require_sample(sample_name):
"""
Decorator to check whether waffle sample is active.
If inactive, raise NotFound.
"""
def wrapper(fn):
def check_sample(*args,**kwargs):
if waffle.sample_is_active(sample_name):
return fn(*args,**kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_sample
return wrapper
|
66e16d6e3d80ab81967232d5d154c64c8e277def
|
robotpy_ext/misc/periodic_filter.py
|
robotpy_ext/misc/periodic_filter.py
|
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno > logging.INFO
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
self.bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno >= self.bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
Allow user to select bypass level
|
Allow user to select bypass level
|
Python
|
bsd-3-clause
|
Twinters007/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities,Twinters007/robotpy-wpilib-utilities
|
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno > logging.INFO
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
Allow user to select bypass level
|
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
self.bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno >= self.bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
<commit_before>import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno > logging.INFO
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
<commit_msg>Allow user to select bypass level<commit_after>
|
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
self.bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno >= self.bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno > logging.INFO
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
Allow user to select bypass levelimport logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
self.bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno >= self.bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
<commit_before>import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno > logging.INFO
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
<commit_msg>Allow user to select bypass level<commit_after>import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
self.bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno >= self.bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
56a89d57824d3bd25ac235a8e360d528edd9a7cf
|
test/factories/blogpost_factory.py
|
test/factories/blogpost_factory.py
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model import db
from pybossa.model.blogpost import Blogpost
from . import BaseFactory, factory
class BlogpostFactory(BaseFactory):
FACTORY_FOR = Blogpost
id = factory.Sequence(lambda n: n)
title = u'Blogpost title'
body = u'Blogpost body text'
app = factory.SubFactory('factories.AppFactory')
app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id)
owner = factory.SelfAttribute('app.owner')
user_id = factory.LazyAttribute(lambda blogpost: blogpost.owner.id)
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model import db
from pybossa.model.blogpost import Blogpost
from . import BaseFactory, factory
class BlogpostFactory(BaseFactory):
FACTORY_FOR = Blogpost
id = factory.Sequence(lambda n: n)
title = u'Blogpost title'
body = u'Blogpost body text'
app = factory.SubFactory('factories.AppFactory')
app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id)
owner = factory.SelfAttribute('app.owner')
user_id = factory.LazyAttribute(
lambda blogpost: blogpost.owner.id if blogpost.owner else None)
|
Fix for nullable author in blogpost factory
|
Fix for nullable author in blogpost factory
|
Python
|
agpl-3.0
|
OpenNewsLabs/pybossa,proyectos-analizo-info/pybossa-analizo-info,proyectos-analizo-info/pybossa-analizo-info,geotagx/pybossa,inteligencia-coletiva-lsd/pybossa,jean/pybossa,proyectos-analizo-info/pybossa-analizo-info,harihpr/tweetclickers,stefanhahmann/pybossa,OpenNewsLabs/pybossa,Scifabric/pybossa,inteligencia-coletiva-lsd/pybossa,jean/pybossa,harihpr/tweetclickers,stefanhahmann/pybossa,PyBossa/pybossa,Scifabric/pybossa,geotagx/pybossa,PyBossa/pybossa
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model import db
from pybossa.model.blogpost import Blogpost
from . import BaseFactory, factory
class BlogpostFactory(BaseFactory):
FACTORY_FOR = Blogpost
id = factory.Sequence(lambda n: n)
title = u'Blogpost title'
body = u'Blogpost body text'
app = factory.SubFactory('factories.AppFactory')
app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id)
owner = factory.SelfAttribute('app.owner')
user_id = factory.LazyAttribute(lambda blogpost: blogpost.owner.id)
Fix for nullable author in blogpost factory
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model import db
from pybossa.model.blogpost import Blogpost
from . import BaseFactory, factory
class BlogpostFactory(BaseFactory):
FACTORY_FOR = Blogpost
id = factory.Sequence(lambda n: n)
title = u'Blogpost title'
body = u'Blogpost body text'
app = factory.SubFactory('factories.AppFactory')
app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id)
owner = factory.SelfAttribute('app.owner')
user_id = factory.LazyAttribute(
lambda blogpost: blogpost.owner.id if blogpost.owner else None)
|
<commit_before># -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model import db
from pybossa.model.blogpost import Blogpost
from . import BaseFactory, factory
class BlogpostFactory(BaseFactory):
FACTORY_FOR = Blogpost
id = factory.Sequence(lambda n: n)
title = u'Blogpost title'
body = u'Blogpost body text'
app = factory.SubFactory('factories.AppFactory')
app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id)
owner = factory.SelfAttribute('app.owner')
user_id = factory.LazyAttribute(lambda blogpost: blogpost.owner.id)
<commit_msg>Fix for nullable author in blogpost factory<commit_after>
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model import db
from pybossa.model.blogpost import Blogpost
from . import BaseFactory, factory
class BlogpostFactory(BaseFactory):
FACTORY_FOR = Blogpost
id = factory.Sequence(lambda n: n)
title = u'Blogpost title'
body = u'Blogpost body text'
app = factory.SubFactory('factories.AppFactory')
app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id)
owner = factory.SelfAttribute('app.owner')
user_id = factory.LazyAttribute(
lambda blogpost: blogpost.owner.id if blogpost.owner else None)
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model import db
from pybossa.model.blogpost import Blogpost
from . import BaseFactory, factory
class BlogpostFactory(BaseFactory):
FACTORY_FOR = Blogpost
id = factory.Sequence(lambda n: n)
title = u'Blogpost title'
body = u'Blogpost body text'
app = factory.SubFactory('factories.AppFactory')
app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id)
owner = factory.SelfAttribute('app.owner')
user_id = factory.LazyAttribute(lambda blogpost: blogpost.owner.id)
Fix for nullable author in blogpost factory# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model import db
from pybossa.model.blogpost import Blogpost
from . import BaseFactory, factory
class BlogpostFactory(BaseFactory):
FACTORY_FOR = Blogpost
id = factory.Sequence(lambda n: n)
title = u'Blogpost title'
body = u'Blogpost body text'
app = factory.SubFactory('factories.AppFactory')
app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id)
owner = factory.SelfAttribute('app.owner')
user_id = factory.LazyAttribute(
lambda blogpost: blogpost.owner.id if blogpost.owner else None)
|
<commit_before># -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model import db
from pybossa.model.blogpost import Blogpost
from . import BaseFactory, factory
class BlogpostFactory(BaseFactory):
FACTORY_FOR = Blogpost
id = factory.Sequence(lambda n: n)
title = u'Blogpost title'
body = u'Blogpost body text'
app = factory.SubFactory('factories.AppFactory')
app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id)
owner = factory.SelfAttribute('app.owner')
user_id = factory.LazyAttribute(lambda blogpost: blogpost.owner.id)
<commit_msg>Fix for nullable author in blogpost factory<commit_after># -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.model import db
from pybossa.model.blogpost import Blogpost
from . import BaseFactory, factory
class BlogpostFactory(BaseFactory):
FACTORY_FOR = Blogpost
id = factory.Sequence(lambda n: n)
title = u'Blogpost title'
body = u'Blogpost body text'
app = factory.SubFactory('factories.AppFactory')
app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id)
owner = factory.SelfAttribute('app.owner')
user_id = factory.LazyAttribute(
lambda blogpost: blogpost.owner.id if blogpost.owner else None)
|
4831c45b53d53d31a6514d5c3e2d0465283b4076
|
topological_sort.py
|
topological_sort.py
|
def topological_sort():
pass
def main():
pass
if __name__ == '__main__':
main()
|
def topological_sort_recur():
"""Topological Sorting by Recursion."""
pass
def topological_sort():
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
pass
def main():
# DAG.
adjacency_dict = {
'0': {},
'1': {},
'2': {'3'},
'3': {'1'},
'4': {'0', '1'},
'5': {'0', '2'}
}
if __name__ == '__main__':
main()
|
Add topolofical_sort_recur(), 2 functions’s doc strings and DAG
|
Add topolofical_sort_recur(), 2 functions’s doc strings and DAG
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
def topological_sort():
pass
def main():
pass
if __name__ == '__main__':
main()
Add topolofical_sort_recur(), 2 functions’s doc strings and DAG
|
def topological_sort_recur():
"""Topological Sorting by Recursion."""
pass
def topological_sort():
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
pass
def main():
# DAG.
adjacency_dict = {
'0': {},
'1': {},
'2': {'3'},
'3': {'1'},
'4': {'0', '1'},
'5': {'0', '2'}
}
if __name__ == '__main__':
main()
|
<commit_before>def topological_sort():
pass
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>Add topolofical_sort_recur(), 2 functions’s doc strings and DAG<commit_after>
|
def topological_sort_recur():
"""Topological Sorting by Recursion."""
pass
def topological_sort():
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
pass
def main():
# DAG.
adjacency_dict = {
'0': {},
'1': {},
'2': {'3'},
'3': {'1'},
'4': {'0', '1'},
'5': {'0', '2'}
}
if __name__ == '__main__':
main()
|
def topological_sort():
pass
def main():
pass
if __name__ == '__main__':
main()
Add topolofical_sort_recur(), 2 functions’s doc strings and DAGdef topological_sort_recur():
"""Topological Sorting by Recursion."""
pass
def topological_sort():
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
pass
def main():
# DAG.
adjacency_dict = {
'0': {},
'1': {},
'2': {'3'},
'3': {'1'},
'4': {'0', '1'},
'5': {'0', '2'}
}
if __name__ == '__main__':
main()
|
<commit_before>def topological_sort():
pass
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>Add topolofical_sort_recur(), 2 functions’s doc strings and DAG<commit_after>def topological_sort_recur():
"""Topological Sorting by Recursion."""
pass
def topological_sort():
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
pass
def main():
# DAG.
adjacency_dict = {
'0': {},
'1': {},
'2': {'3'},
'3': {'1'},
'4': {'0', '1'},
'5': {'0', '2'}
}
if __name__ == '__main__':
main()
|
7c69d30de5aa58d330a183a0e5015e67c36ca7bc
|
spacy/tests/regression/test_issue4674.py
|
spacy/tests/regression/test_issue4674.py
|
# coding: utf-8
from __future__ import unicode_literals
from spacy.kb import KnowledgeBase
from spacy.util import ensure_path
from spacy.lang.en import English
from spacy.tests.util import make_tempdir
def test_issue4674():
"""Test that setting entities with overlapping identifiers does not mess up IO"""
nlp = English()
kb = KnowledgeBase(nlp.vocab, entity_vector_length=3)
vector1 = [0.9, 1.1, 1.01]
vector2 = [1.8, 2.25, 2.01]
kb.set_entities(
entity_list=["Q1", "Q1"], freq_list=[32, 111], vector_list=[vector1, vector2]
)
assert kb.get_size_entities() == 1
# dumping to file & loading back in
with make_tempdir() as d:
dir_path = ensure_path(d)
if not dir_path.exists():
dir_path.mkdir()
file_path = dir_path / "kb"
kb.dump(str(file_path))
kb2 = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=3)
kb2.load_bulk(str(file_path))
assert kb2.get_size_entities() == 1
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
from spacy.kb import KnowledgeBase
from spacy.util import ensure_path
from spacy.lang.en import English
from ..tests.util import make_tempdir
def test_issue4674():
"""Test that setting entities with overlapping identifiers does not mess up IO"""
nlp = English()
kb = KnowledgeBase(nlp.vocab, entity_vector_length=3)
vector1 = [0.9, 1.1, 1.01]
vector2 = [1.8, 2.25, 2.01]
with pytest.warns(UserWarning):
kb.set_entities(
entity_list=["Q1", "Q1"],
freq_list=[32, 111],
vector_list=[vector1, vector2],
)
assert kb.get_size_entities() == 1
# dumping to file & loading back in
with make_tempdir() as d:
dir_path = ensure_path(d)
if not dir_path.exists():
dir_path.mkdir()
file_path = dir_path / "kb"
kb.dump(str(file_path))
kb2 = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=3)
kb2.load_bulk(str(file_path))
assert kb2.get_size_entities() == 1
|
Tidy up and expect warning
|
Tidy up and expect warning
|
Python
|
mit
|
honnibal/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,honnibal/spaCy
|
# coding: utf-8
from __future__ import unicode_literals
from spacy.kb import KnowledgeBase
from spacy.util import ensure_path
from spacy.lang.en import English
from spacy.tests.util import make_tempdir
def test_issue4674():
"""Test that setting entities with overlapping identifiers does not mess up IO"""
nlp = English()
kb = KnowledgeBase(nlp.vocab, entity_vector_length=3)
vector1 = [0.9, 1.1, 1.01]
vector2 = [1.8, 2.25, 2.01]
kb.set_entities(
entity_list=["Q1", "Q1"], freq_list=[32, 111], vector_list=[vector1, vector2]
)
assert kb.get_size_entities() == 1
# dumping to file & loading back in
with make_tempdir() as d:
dir_path = ensure_path(d)
if not dir_path.exists():
dir_path.mkdir()
file_path = dir_path / "kb"
kb.dump(str(file_path))
kb2 = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=3)
kb2.load_bulk(str(file_path))
assert kb2.get_size_entities() == 1
Tidy up and expect warning
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
from spacy.kb import KnowledgeBase
from spacy.util import ensure_path
from spacy.lang.en import English
from ..tests.util import make_tempdir
def test_issue4674():
"""Test that setting entities with overlapping identifiers does not mess up IO"""
nlp = English()
kb = KnowledgeBase(nlp.vocab, entity_vector_length=3)
vector1 = [0.9, 1.1, 1.01]
vector2 = [1.8, 2.25, 2.01]
with pytest.warns(UserWarning):
kb.set_entities(
entity_list=["Q1", "Q1"],
freq_list=[32, 111],
vector_list=[vector1, vector2],
)
assert kb.get_size_entities() == 1
# dumping to file & loading back in
with make_tempdir() as d:
dir_path = ensure_path(d)
if not dir_path.exists():
dir_path.mkdir()
file_path = dir_path / "kb"
kb.dump(str(file_path))
kb2 = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=3)
kb2.load_bulk(str(file_path))
assert kb2.get_size_entities() == 1
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
from spacy.kb import KnowledgeBase
from spacy.util import ensure_path
from spacy.lang.en import English
from spacy.tests.util import make_tempdir
def test_issue4674():
"""Test that setting entities with overlapping identifiers does not mess up IO"""
nlp = English()
kb = KnowledgeBase(nlp.vocab, entity_vector_length=3)
vector1 = [0.9, 1.1, 1.01]
vector2 = [1.8, 2.25, 2.01]
kb.set_entities(
entity_list=["Q1", "Q1"], freq_list=[32, 111], vector_list=[vector1, vector2]
)
assert kb.get_size_entities() == 1
# dumping to file & loading back in
with make_tempdir() as d:
dir_path = ensure_path(d)
if not dir_path.exists():
dir_path.mkdir()
file_path = dir_path / "kb"
kb.dump(str(file_path))
kb2 = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=3)
kb2.load_bulk(str(file_path))
assert kb2.get_size_entities() == 1
<commit_msg>Tidy up and expect warning<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
from spacy.kb import KnowledgeBase
from spacy.util import ensure_path
from spacy.lang.en import English
from ..tests.util import make_tempdir
def test_issue4674():
"""Test that setting entities with overlapping identifiers does not mess up IO"""
nlp = English()
kb = KnowledgeBase(nlp.vocab, entity_vector_length=3)
vector1 = [0.9, 1.1, 1.01]
vector2 = [1.8, 2.25, 2.01]
with pytest.warns(UserWarning):
kb.set_entities(
entity_list=["Q1", "Q1"],
freq_list=[32, 111],
vector_list=[vector1, vector2],
)
assert kb.get_size_entities() == 1
# dumping to file & loading back in
with make_tempdir() as d:
dir_path = ensure_path(d)
if not dir_path.exists():
dir_path.mkdir()
file_path = dir_path / "kb"
kb.dump(str(file_path))
kb2 = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=3)
kb2.load_bulk(str(file_path))
assert kb2.get_size_entities() == 1
|
# coding: utf-8
from __future__ import unicode_literals
from spacy.kb import KnowledgeBase
from spacy.util import ensure_path
from spacy.lang.en import English
from spacy.tests.util import make_tempdir
def test_issue4674():
"""Test that setting entities with overlapping identifiers does not mess up IO"""
nlp = English()
kb = KnowledgeBase(nlp.vocab, entity_vector_length=3)
vector1 = [0.9, 1.1, 1.01]
vector2 = [1.8, 2.25, 2.01]
kb.set_entities(
entity_list=["Q1", "Q1"], freq_list=[32, 111], vector_list=[vector1, vector2]
)
assert kb.get_size_entities() == 1
# dumping to file & loading back in
with make_tempdir() as d:
dir_path = ensure_path(d)
if not dir_path.exists():
dir_path.mkdir()
file_path = dir_path / "kb"
kb.dump(str(file_path))
kb2 = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=3)
kb2.load_bulk(str(file_path))
assert kb2.get_size_entities() == 1
Tidy up and expect warning# coding: utf-8
from __future__ import unicode_literals
import pytest
from spacy.kb import KnowledgeBase
from spacy.util import ensure_path
from spacy.lang.en import English
from ..tests.util import make_tempdir
def test_issue4674():
"""Test that setting entities with overlapping identifiers does not mess up IO"""
nlp = English()
kb = KnowledgeBase(nlp.vocab, entity_vector_length=3)
vector1 = [0.9, 1.1, 1.01]
vector2 = [1.8, 2.25, 2.01]
with pytest.warns(UserWarning):
kb.set_entities(
entity_list=["Q1", "Q1"],
freq_list=[32, 111],
vector_list=[vector1, vector2],
)
assert kb.get_size_entities() == 1
# dumping to file & loading back in
with make_tempdir() as d:
dir_path = ensure_path(d)
if not dir_path.exists():
dir_path.mkdir()
file_path = dir_path / "kb"
kb.dump(str(file_path))
kb2 = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=3)
kb2.load_bulk(str(file_path))
assert kb2.get_size_entities() == 1
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
from spacy.kb import KnowledgeBase
from spacy.util import ensure_path
from spacy.lang.en import English
from spacy.tests.util import make_tempdir
def test_issue4674():
"""Test that setting entities with overlapping identifiers does not mess up IO"""
nlp = English()
kb = KnowledgeBase(nlp.vocab, entity_vector_length=3)
vector1 = [0.9, 1.1, 1.01]
vector2 = [1.8, 2.25, 2.01]
kb.set_entities(
entity_list=["Q1", "Q1"], freq_list=[32, 111], vector_list=[vector1, vector2]
)
assert kb.get_size_entities() == 1
# dumping to file & loading back in
with make_tempdir() as d:
dir_path = ensure_path(d)
if not dir_path.exists():
dir_path.mkdir()
file_path = dir_path / "kb"
kb.dump(str(file_path))
kb2 = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=3)
kb2.load_bulk(str(file_path))
assert kb2.get_size_entities() == 1
<commit_msg>Tidy up and expect warning<commit_after># coding: utf-8
from __future__ import unicode_literals
import pytest
from spacy.kb import KnowledgeBase
from spacy.util import ensure_path
from spacy.lang.en import English
from ..tests.util import make_tempdir
def test_issue4674():
"""Test that setting entities with overlapping identifiers does not mess up IO"""
nlp = English()
kb = KnowledgeBase(nlp.vocab, entity_vector_length=3)
vector1 = [0.9, 1.1, 1.01]
vector2 = [1.8, 2.25, 2.01]
with pytest.warns(UserWarning):
kb.set_entities(
entity_list=["Q1", "Q1"],
freq_list=[32, 111],
vector_list=[vector1, vector2],
)
assert kb.get_size_entities() == 1
# dumping to file & loading back in
with make_tempdir() as d:
dir_path = ensure_path(d)
if not dir_path.exists():
dir_path.mkdir()
file_path = dir_path / "kb"
kb.dump(str(file_path))
kb2 = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=3)
kb2.load_bulk(str(file_path))
assert kb2.get_size_entities() == 1
|
455cf39de018762d22b5d212f3a2c08491840bbf
|
tests/integration/cli/sync_test.py
|
tests/integration/cli/sync_test.py
|
from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
super(TestSyncCLI, self).tearDown()
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
|
from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
super(TestSyncCLI, self).tearDown()
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
|
Fix ordering problem in tearDown
|
Fix ordering problem in tearDown
|
Python
|
mit
|
gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty
|
from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
super(TestSyncCLI, self).tearDown()
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
Fix ordering problem in tearDown
|
from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
super(TestSyncCLI, self).tearDown()
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
|
<commit_before>from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
super(TestSyncCLI, self).tearDown()
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
<commit_msg>Fix ordering problem in tearDown<commit_after>
|
from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
super(TestSyncCLI, self).tearDown()
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
|
from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
super(TestSyncCLI, self).tearDown()
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
Fix ordering problem in tearDownfrom ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
super(TestSyncCLI, self).tearDown()
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
|
<commit_before>from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
super(TestSyncCLI, self).tearDown()
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
<commit_msg>Fix ordering problem in tearDown<commit_after>from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
super(TestSyncCLI, self).tearDown()
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
|
6a330523ad683b7883cefa3878c7690fcb5dbd75
|
TalkingToYouBot.py
|
TalkingToYouBot.py
|
from telegram import Updater
import json
import os
def getToken():
token = []
if not os.path.exists(file_path):
token.append(input('Insert Token here: '))
with open('token.json', 'w') as f:
json.dump(token, f)
else:
with open("token.json") as f:
token = json.load(f)
return token[0]
def main():
token = getToken()
print(token)
if __name__ == '__main__':
main()
|
from telegram import Updater
import json
import os
def getToken():
token = []
if not os.path.exists(file_path):
token.append(input('Insert Token here: '))
with open('token.json', 'w') as f:
json.dump(token, f)
else:
with open("token.json") as f:
token = json.load(f)
return token[0]
def echo(bot, update):
'''
Simple function that echos every received message back to the user.
'''
bot.sendMessage(chat_id=update.message.chat_id, text=update.message.text)
def main():
token = getToken()
print("Starting Bot...")
# Start the Bot with the token
updater = Updater(token=token)
dispatcher = updater.dispatcher
# Add the echo-Message Handler to the Dispatcher
dispatcher.addTelegramMessageHandler(echo)
# Make the bot listen for commands
updater.start_polling()
updater.idle()
if __name__ == '__main__':
main()
|
Add simple Echo function and Bot initialisation
|
Add simple Echo function and Bot initialisation
|
Python
|
mit
|
h4llow3En/IAmTalkingToYouBot
|
from telegram import Updater
import json
import os
def getToken():
token = []
if not os.path.exists(file_path):
token.append(input('Insert Token here: '))
with open('token.json', 'w') as f:
json.dump(token, f)
else:
with open("token.json") as f:
token = json.load(f)
return token[0]
def main():
token = getToken()
print(token)
if __name__ == '__main__':
main()
Add simple Echo function and Bot initialisation
|
from telegram import Updater
import json
import os
def getToken():
token = []
if not os.path.exists(file_path):
token.append(input('Insert Token here: '))
with open('token.json', 'w') as f:
json.dump(token, f)
else:
with open("token.json") as f:
token = json.load(f)
return token[0]
def echo(bot, update):
'''
Simple function that echos every received message back to the user.
'''
bot.sendMessage(chat_id=update.message.chat_id, text=update.message.text)
def main():
token = getToken()
print("Starting Bot...")
# Start the Bot with the token
updater = Updater(token=token)
dispatcher = updater.dispatcher
# Add the echo-Message Handler to the Dispatcher
dispatcher.addTelegramMessageHandler(echo)
# Make the bot listen for commands
updater.start_polling()
updater.idle()
if __name__ == '__main__':
main()
|
<commit_before>from telegram import Updater
import json
import os
def getToken():
token = []
if not os.path.exists(file_path):
token.append(input('Insert Token here: '))
with open('token.json', 'w') as f:
json.dump(token, f)
else:
with open("token.json") as f:
token = json.load(f)
return token[0]
def main():
token = getToken()
print(token)
if __name__ == '__main__':
main()
<commit_msg>Add simple Echo function and Bot initialisation<commit_after>
|
from telegram import Updater
import json
import os
def getToken():
token = []
if not os.path.exists(file_path):
token.append(input('Insert Token here: '))
with open('token.json', 'w') as f:
json.dump(token, f)
else:
with open("token.json") as f:
token = json.load(f)
return token[0]
def echo(bot, update):
'''
Simple function that echos every received message back to the user.
'''
bot.sendMessage(chat_id=update.message.chat_id, text=update.message.text)
def main():
token = getToken()
print("Starting Bot...")
# Start the Bot with the token
updater = Updater(token=token)
dispatcher = updater.dispatcher
# Add the echo-Message Handler to the Dispatcher
dispatcher.addTelegramMessageHandler(echo)
# Make the bot listen for commands
updater.start_polling()
updater.idle()
if __name__ == '__main__':
main()
|
from telegram import Updater
import json
import os
def getToken():
token = []
if not os.path.exists(file_path):
token.append(input('Insert Token here: '))
with open('token.json', 'w') as f:
json.dump(token, f)
else:
with open("token.json") as f:
token = json.load(f)
return token[0]
def main():
token = getToken()
print(token)
if __name__ == '__main__':
main()
Add simple Echo function and Bot initialisationfrom telegram import Updater
import json
import os
def getToken():
token = []
if not os.path.exists(file_path):
token.append(input('Insert Token here: '))
with open('token.json', 'w') as f:
json.dump(token, f)
else:
with open("token.json") as f:
token = json.load(f)
return token[0]
def echo(bot, update):
'''
Simple function that echos every received message back to the user.
'''
bot.sendMessage(chat_id=update.message.chat_id, text=update.message.text)
def main():
token = getToken()
print("Starting Bot...")
# Start the Bot with the token
updater = Updater(token=token)
dispatcher = updater.dispatcher
# Add the echo-Message Handler to the Dispatcher
dispatcher.addTelegramMessageHandler(echo)
# Make the bot listen for commands
updater.start_polling()
updater.idle()
if __name__ == '__main__':
main()
|
<commit_before>from telegram import Updater
import json
import os
def getToken():
token = []
if not os.path.exists(file_path):
token.append(input('Insert Token here: '))
with open('token.json', 'w') as f:
json.dump(token, f)
else:
with open("token.json") as f:
token = json.load(f)
return token[0]
def main():
token = getToken()
print(token)
if __name__ == '__main__':
main()
<commit_msg>Add simple Echo function and Bot initialisation<commit_after>from telegram import Updater
import json
import os
def getToken():
token = []
if not os.path.exists(file_path):
token.append(input('Insert Token here: '))
with open('token.json', 'w') as f:
json.dump(token, f)
else:
with open("token.json") as f:
token = json.load(f)
return token[0]
def echo(bot, update):
'''
Simple function that echos every received message back to the user.
'''
bot.sendMessage(chat_id=update.message.chat_id, text=update.message.text)
def main():
token = getToken()
print("Starting Bot...")
# Start the Bot with the token
updater = Updater(token=token)
dispatcher = updater.dispatcher
# Add the echo-Message Handler to the Dispatcher
dispatcher.addTelegramMessageHandler(echo)
# Make the bot listen for commands
updater.start_polling()
updater.idle()
if __name__ == '__main__':
main()
|
64c967eff68163754ed9b9593030e70c26d65f50
|
pygfunction/examples/unequal_segment_lengths.py
|
pygfunction/examples/unequal_segment_lengths.py
|
# -*- coding: utf-8 -*-
""" Example of calculation of g-functions with unequal segment lengths using
uniform and equal borehole wall temperatures.
The g-functions of a field of 6x4 boreholes is calculated with unequal
segment lengths.
"""
import matplotlib.pyplot as plt
import numpy as np
import pygfunction as gt
def main():
# -------------------------------------------------------------------------
# Simulation parameters
# -------------------------------------------------------------------------
# Borehole dimensions
D = 4.0 # Borehole buried depth (m)
H = 150.0 # Borehole length (m)
r_b = 0.075 # Borehole radius (m)
B = 7.5 # Borehole spacing (m)
# Thermal properties
alpha = 1.0e-6 # Ground thermal diffusivity (m2/s)
# Geometrically expanding time vector.
dt = 100*3600. # Time step
tmax = 3000. * 8760. * 3600. # Maximum time
Nt = 50 # Number of time steps
ts = H**2/(9.*alpha) # Bore field characteristic time
time = gt.utilities.time_geometric(dt, tmax, Nt)
# -------------------------------------------------------------------------
# Borehole field
# -------------------------------------------------------------------------
# Field of 3x2 (n=6) boreholes
N_1 = 6
N_2 = 4
boreField = gt.boreholes.rectangle_field(N_1, N_2, B, B, H, D, r_b)
# -------------------------------------------------------------------------
# Evaluate g-functions with different segment options
# -------------------------------------------------------------------------
# g-Function calculation option for uniform borehole heights
options = {'nSegments': 12, 'disp': True, 'profiles': True}
gfunc = gt.gfunction.gFunction(
boreField, alpha, time=time, options=options)
print(gfunc.gFunc)
# define number of segments as a list
options = {'nSegments': [12] * len(boreField), 'disp': True}
gfunc = gt.gfunction.gFunction(
boreField, alpha, time=time, options=options)
print(gfunc.gFunc)
return
# Main function
if __name__ == '__main__':
main()
|
Add code to unequal segment length example
|
Add code to unequal segment length example
|
Python
|
bsd-3-clause
|
MassimoCimmino/pygfunction
|
Add code to unequal segment length example
|
# -*- coding: utf-8 -*-
""" Example of calculation of g-functions with unequal segment lengths using
uniform and equal borehole wall temperatures.
The g-functions of a field of 6x4 boreholes is calculated with unequal
segment lengths.
"""
import matplotlib.pyplot as plt
import numpy as np
import pygfunction as gt
def main():
# -------------------------------------------------------------------------
# Simulation parameters
# -------------------------------------------------------------------------
# Borehole dimensions
D = 4.0 # Borehole buried depth (m)
H = 150.0 # Borehole length (m)
r_b = 0.075 # Borehole radius (m)
B = 7.5 # Borehole spacing (m)
# Thermal properties
alpha = 1.0e-6 # Ground thermal diffusivity (m2/s)
# Geometrically expanding time vector.
dt = 100*3600. # Time step
tmax = 3000. * 8760. * 3600. # Maximum time
Nt = 50 # Number of time steps
ts = H**2/(9.*alpha) # Bore field characteristic time
time = gt.utilities.time_geometric(dt, tmax, Nt)
# -------------------------------------------------------------------------
# Borehole field
# -------------------------------------------------------------------------
# Field of 3x2 (n=6) boreholes
N_1 = 6
N_2 = 4
boreField = gt.boreholes.rectangle_field(N_1, N_2, B, B, H, D, r_b)
# -------------------------------------------------------------------------
# Evaluate g-functions with different segment options
# -------------------------------------------------------------------------
# g-Function calculation option for uniform borehole heights
options = {'nSegments': 12, 'disp': True, 'profiles': True}
gfunc = gt.gfunction.gFunction(
boreField, alpha, time=time, options=options)
print(gfunc.gFunc)
# define number of segments as a list
options = {'nSegments': [12] * len(boreField), 'disp': True}
gfunc = gt.gfunction.gFunction(
boreField, alpha, time=time, options=options)
print(gfunc.gFunc)
return
# Main function
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add code to unequal segment length example<commit_after>
|
# -*- coding: utf-8 -*-
""" Example of calculation of g-functions with unequal segment lengths using
uniform and equal borehole wall temperatures.
The g-functions of a field of 6x4 boreholes is calculated with unequal
segment lengths.
"""
import matplotlib.pyplot as plt
import numpy as np
import pygfunction as gt
def main():
# -------------------------------------------------------------------------
# Simulation parameters
# -------------------------------------------------------------------------
# Borehole dimensions
D = 4.0 # Borehole buried depth (m)
H = 150.0 # Borehole length (m)
r_b = 0.075 # Borehole radius (m)
B = 7.5 # Borehole spacing (m)
# Thermal properties
alpha = 1.0e-6 # Ground thermal diffusivity (m2/s)
# Geometrically expanding time vector.
dt = 100*3600. # Time step
tmax = 3000. * 8760. * 3600. # Maximum time
Nt = 50 # Number of time steps
ts = H**2/(9.*alpha) # Bore field characteristic time
time = gt.utilities.time_geometric(dt, tmax, Nt)
# -------------------------------------------------------------------------
# Borehole field
# -------------------------------------------------------------------------
# Field of 3x2 (n=6) boreholes
N_1 = 6
N_2 = 4
boreField = gt.boreholes.rectangle_field(N_1, N_2, B, B, H, D, r_b)
# -------------------------------------------------------------------------
# Evaluate g-functions with different segment options
# -------------------------------------------------------------------------
# g-Function calculation option for uniform borehole heights
options = {'nSegments': 12, 'disp': True, 'profiles': True}
gfunc = gt.gfunction.gFunction(
boreField, alpha, time=time, options=options)
print(gfunc.gFunc)
# define number of segments as a list
options = {'nSegments': [12] * len(boreField), 'disp': True}
gfunc = gt.gfunction.gFunction(
boreField, alpha, time=time, options=options)
print(gfunc.gFunc)
return
# Main function
if __name__ == '__main__':
main()
|
Add code to unequal segment length example# -*- coding: utf-8 -*-
""" Example of calculation of g-functions with unequal segment lengths using
uniform and equal borehole wall temperatures.
The g-functions of a field of 6x4 boreholes is calculated with unequal
segment lengths.
"""
import matplotlib.pyplot as plt
import numpy as np
import pygfunction as gt
def main():
# -------------------------------------------------------------------------
# Simulation parameters
# -------------------------------------------------------------------------
# Borehole dimensions
D = 4.0 # Borehole buried depth (m)
H = 150.0 # Borehole length (m)
r_b = 0.075 # Borehole radius (m)
B = 7.5 # Borehole spacing (m)
# Thermal properties
alpha = 1.0e-6 # Ground thermal diffusivity (m2/s)
# Geometrically expanding time vector.
dt = 100*3600. # Time step
tmax = 3000. * 8760. * 3600. # Maximum time
Nt = 50 # Number of time steps
ts = H**2/(9.*alpha) # Bore field characteristic time
time = gt.utilities.time_geometric(dt, tmax, Nt)
# -------------------------------------------------------------------------
# Borehole field
# -------------------------------------------------------------------------
# Field of 3x2 (n=6) boreholes
N_1 = 6
N_2 = 4
boreField = gt.boreholes.rectangle_field(N_1, N_2, B, B, H, D, r_b)
# -------------------------------------------------------------------------
# Evaluate g-functions with different segment options
# -------------------------------------------------------------------------
# g-Function calculation option for uniform borehole heights
options = {'nSegments': 12, 'disp': True, 'profiles': True}
gfunc = gt.gfunction.gFunction(
boreField, alpha, time=time, options=options)
print(gfunc.gFunc)
# define number of segments as a list
options = {'nSegments': [12] * len(boreField), 'disp': True}
gfunc = gt.gfunction.gFunction(
boreField, alpha, time=time, options=options)
print(gfunc.gFunc)
return
# Main function
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add code to unequal segment length example<commit_after># -*- coding: utf-8 -*-
""" Example of calculation of g-functions with unequal segment lengths using
uniform and equal borehole wall temperatures.
The g-functions of a field of 6x4 boreholes is calculated with unequal
segment lengths.
"""
import matplotlib.pyplot as plt
import numpy as np
import pygfunction as gt
def main():
# -------------------------------------------------------------------------
# Simulation parameters
# -------------------------------------------------------------------------
# Borehole dimensions
D = 4.0 # Borehole buried depth (m)
H = 150.0 # Borehole length (m)
r_b = 0.075 # Borehole radius (m)
B = 7.5 # Borehole spacing (m)
# Thermal properties
alpha = 1.0e-6 # Ground thermal diffusivity (m2/s)
# Geometrically expanding time vector.
dt = 100*3600. # Time step
tmax = 3000. * 8760. * 3600. # Maximum time
Nt = 50 # Number of time steps
ts = H**2/(9.*alpha) # Bore field characteristic time
time = gt.utilities.time_geometric(dt, tmax, Nt)
# -------------------------------------------------------------------------
# Borehole field
# -------------------------------------------------------------------------
# Field of 3x2 (n=6) boreholes
N_1 = 6
N_2 = 4
boreField = gt.boreholes.rectangle_field(N_1, N_2, B, B, H, D, r_b)
# -------------------------------------------------------------------------
# Evaluate g-functions with different segment options
# -------------------------------------------------------------------------
# g-Function calculation option for uniform borehole heights
options = {'nSegments': 12, 'disp': True, 'profiles': True}
gfunc = gt.gfunction.gFunction(
boreField, alpha, time=time, options=options)
print(gfunc.gFunc)
# define number of segments as a list
options = {'nSegments': [12] * len(boreField), 'disp': True}
gfunc = gt.gfunction.gFunction(
boreField, alpha, time=time, options=options)
print(gfunc.gFunc)
return
# Main function
if __name__ == '__main__':
main()
|
|
86a7b0e989e983063a1ff5afd098600bf34da401
|
ixwsauth_server/api.py
|
ixwsauth_server/api.py
|
"""
Tastypie API Authorization handlers
"""
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
class ApplicationAuthentication(Authentication):
"""
Authenticate the API request by checking the application key.
"""
def is_authenticated(self, request, **kwargs):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
class ApplicationAuthorization(Authorization):
"""
Authorize the API request by checking the application key.
"""
#
# pylint:disable=W0613,W0622,R0201
# Redefining built-in 'object'
# Unused argument 'object'
# Method could be a function
#
# Part of Tastypie API - cannot change any of the above
#
def is_authorized(self, request, object=None):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
|
"""
Tastypie API Authorization handlers
"""
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
class ApplicationAuthentication(Authentication):
"""
Authenticate the API request by checking the application key.
"""
def is_authenticated(self, request, **kwargs):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
def get_identifier(self, request):
"""
Return a combination of the consumer, the IP address and the host
"""
consumer = getattr(request, 'consumer', None)
return '%s_%s' % (
consumer.key(),
super(ApplicationAuthentication, self).get_identifier(request))
class ApplicationAuthorization(Authorization):
"""
Authorize the API request by checking the application key.
"""
#
# pylint:disable=W0613,W0622,R0201
# Redefining built-in 'object'
# Unused argument 'object'
# Method could be a function
#
# Part of Tastypie API - cannot change any of the above
#
def is_authorized(self, request, object=None):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
|
Add the consumer key to the identifier
|
Add the consumer key to the identifier
Used for rate limiting by API key.
Refs #17338
|
Python
|
mit
|
infoxchange/ixwsauth
|
"""
Tastypie API Authorization handlers
"""
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
class ApplicationAuthentication(Authentication):
"""
Authenticate the API request by checking the application key.
"""
def is_authenticated(self, request, **kwargs):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
class ApplicationAuthorization(Authorization):
"""
Authorize the API request by checking the application key.
"""
#
# pylint:disable=W0613,W0622,R0201
# Redefining built-in 'object'
# Unused argument 'object'
# Method could be a function
#
# Part of Tastypie API - cannot change any of the above
#
def is_authorized(self, request, object=None):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
Add the consumer key to the identifier
Used for rate limiting by API key.
Refs #17338
|
"""
Tastypie API Authorization handlers
"""
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
class ApplicationAuthentication(Authentication):
"""
Authenticate the API request by checking the application key.
"""
def is_authenticated(self, request, **kwargs):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
def get_identifier(self, request):
"""
Return a combination of the consumer, the IP address and the host
"""
consumer = getattr(request, 'consumer', None)
return '%s_%s' % (
consumer.key(),
super(ApplicationAuthentication, self).get_identifier(request))
class ApplicationAuthorization(Authorization):
"""
Authorize the API request by checking the application key.
"""
#
# pylint:disable=W0613,W0622,R0201
# Redefining built-in 'object'
# Unused argument 'object'
# Method could be a function
#
# Part of Tastypie API - cannot change any of the above
#
def is_authorized(self, request, object=None):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
|
<commit_before>"""
Tastypie API Authorization handlers
"""
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
class ApplicationAuthentication(Authentication):
"""
Authenticate the API request by checking the application key.
"""
def is_authenticated(self, request, **kwargs):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
class ApplicationAuthorization(Authorization):
"""
Authorize the API request by checking the application key.
"""
#
# pylint:disable=W0613,W0622,R0201
# Redefining built-in 'object'
# Unused argument 'object'
# Method could be a function
#
# Part of Tastypie API - cannot change any of the above
#
def is_authorized(self, request, object=None):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
<commit_msg>Add the consumer key to the identifier
Used for rate limiting by API key.
Refs #17338<commit_after>
|
"""
Tastypie API Authorization handlers
"""
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
class ApplicationAuthentication(Authentication):
"""
Authenticate the API request by checking the application key.
"""
def is_authenticated(self, request, **kwargs):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
def get_identifier(self, request):
"""
Return a combination of the consumer, the IP address and the host
"""
consumer = getattr(request, 'consumer', None)
return '%s_%s' % (
consumer.key(),
super(ApplicationAuthentication, self).get_identifier(request))
class ApplicationAuthorization(Authorization):
"""
Authorize the API request by checking the application key.
"""
#
# pylint:disable=W0613,W0622,R0201
# Redefining built-in 'object'
# Unused argument 'object'
# Method could be a function
#
# Part of Tastypie API - cannot change any of the above
#
def is_authorized(self, request, object=None):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
|
"""
Tastypie API Authorization handlers
"""
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
class ApplicationAuthentication(Authentication):
"""
Authenticate the API request by checking the application key.
"""
def is_authenticated(self, request, **kwargs):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
class ApplicationAuthorization(Authorization):
"""
Authorize the API request by checking the application key.
"""
#
# pylint:disable=W0613,W0622,R0201
# Redefining built-in 'object'
# Unused argument 'object'
# Method could be a function
#
# Part of Tastypie API - cannot change any of the above
#
def is_authorized(self, request, object=None):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
Add the consumer key to the identifier
Used for rate limiting by API key.
Refs #17338"""
Tastypie API Authorization handlers
"""
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
class ApplicationAuthentication(Authentication):
"""
Authenticate the API request by checking the application key.
"""
def is_authenticated(self, request, **kwargs):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
def get_identifier(self, request):
"""
Return a combination of the consumer, the IP address and the host
"""
consumer = getattr(request, 'consumer', None)
return '%s_%s' % (
consumer.key(),
super(ApplicationAuthentication, self).get_identifier(request))
class ApplicationAuthorization(Authorization):
"""
Authorize the API request by checking the application key.
"""
#
# pylint:disable=W0613,W0622,R0201
# Redefining built-in 'object'
# Unused argument 'object'
# Method could be a function
#
# Part of Tastypie API - cannot change any of the above
#
def is_authorized(self, request, object=None):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
|
<commit_before>"""
Tastypie API Authorization handlers
"""
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
class ApplicationAuthentication(Authentication):
"""
Authenticate the API request by checking the application key.
"""
def is_authenticated(self, request, **kwargs):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
class ApplicationAuthorization(Authorization):
"""
Authorize the API request by checking the application key.
"""
#
# pylint:disable=W0613,W0622,R0201
# Redefining built-in 'object'
# Unused argument 'object'
# Method could be a function
#
# Part of Tastypie API - cannot change any of the above
#
def is_authorized(self, request, object=None):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
<commit_msg>Add the consumer key to the identifier
Used for rate limiting by API key.
Refs #17338<commit_after>"""
Tastypie API Authorization handlers
"""
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
class ApplicationAuthentication(Authentication):
"""
Authenticate the API request by checking the application key.
"""
def is_authenticated(self, request, **kwargs):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
def get_identifier(self, request):
"""
Return a combination of the consumer, the IP address and the host
"""
consumer = getattr(request, 'consumer', None)
return '%s_%s' % (
consumer.key(),
super(ApplicationAuthentication, self).get_identifier(request))
class ApplicationAuthorization(Authorization):
"""
Authorize the API request by checking the application key.
"""
#
# pylint:disable=W0613,W0622,R0201
# Redefining built-in 'object'
# Unused argument 'object'
# Method could be a function
#
# Part of Tastypie API - cannot change any of the above
#
def is_authorized(self, request, object=None):
"""
Check that the request is signed by the application.
"""
consumer = getattr(request, 'consumer', None)
return consumer is not None
|
e3b5e23566830ab20a7e0358e1040e7a6a889b22
|
podoc/conftest.py
|
podoc/conftest.py
|
# -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
from tempfile import TemporaryDirectory
from pytest import yield_fixture
from podoc import Podoc, add_default_handler
from podoc.testing import open_test_file, get_test_file_path, iter_test_files
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logging.getLogger().setLevel(logging.DEBUG)
add_default_handler('DEBUG')
@yield_fixture
def tempdir():
with TemporaryDirectory() as tempdir:
yield tempdir
@yield_fixture
def podoc():
yield Podoc()
@yield_fixture
def hello_ast():
yield open_test_file('hello_ast.py')
@yield_fixture
def hello_json():
yield open_test_file('hello.json')
@yield_fixture
def hello_json_path():
yield get_test_file_path('hello.json')
@yield_fixture
def hello_markdown():
yield open_test_file('hello.md')
def pytest_generate_tests(metafunc):
"""Generate the test_file_tuple fixture to test all plugin test files."""
if 'test_file_tuple' in metafunc.fixturenames:
metafunc.parametrize('test_file_tuple', iter_test_files())
|
# -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
from tempfile import TemporaryDirectory
from pytest import yield_fixture
from podoc import Podoc, add_default_handler
from podoc.testing import open_test_file, get_test_file_path, iter_test_files
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logging.getLogger().setLevel(logging.DEBUG)
add_default_handler('DEBUG')
@yield_fixture
def tempdir():
with TemporaryDirectory() as tempdir:
yield tempdir
@yield_fixture
def podoc():
yield Podoc()
@yield_fixture
def hello_ast():
yield open_test_file('hello_ast.py')
@yield_fixture
def hello_json():
yield open_test_file('hello.json')
@yield_fixture
def hello_json_path():
yield get_test_file_path('hello.json')
@yield_fixture
def hello_markdown():
yield open_test_file('hello.md')
def pytest_generate_tests(metafunc):
"""Generate the test_file_tuple fixture to test all plugin test files."""
if 'test_file_tuple' in metafunc.fixturenames:
def _name(tuple):
"""Name of the parameterized test: <plugin>_<example_file>."""
return '_'.join(tuple[:2])
metafunc.parametrize('test_file_tuple', iter_test_files(), ids=_name)
|
Set the name of the dynamic plugin conversion tests
|
Set the name of the dynamic plugin conversion tests
|
Python
|
bsd-3-clause
|
rossant/podoc,podoc/podoc,rossant/podoc,podoc/podoc
|
# -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
from tempfile import TemporaryDirectory
from pytest import yield_fixture
from podoc import Podoc, add_default_handler
from podoc.testing import open_test_file, get_test_file_path, iter_test_files
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logging.getLogger().setLevel(logging.DEBUG)
add_default_handler('DEBUG')
@yield_fixture
def tempdir():
with TemporaryDirectory() as tempdir:
yield tempdir
@yield_fixture
def podoc():
yield Podoc()
@yield_fixture
def hello_ast():
yield open_test_file('hello_ast.py')
@yield_fixture
def hello_json():
yield open_test_file('hello.json')
@yield_fixture
def hello_json_path():
yield get_test_file_path('hello.json')
@yield_fixture
def hello_markdown():
yield open_test_file('hello.md')
def pytest_generate_tests(metafunc):
"""Generate the test_file_tuple fixture to test all plugin test files."""
if 'test_file_tuple' in metafunc.fixturenames:
metafunc.parametrize('test_file_tuple', iter_test_files())
Set the name of the dynamic plugin conversion tests
|
# -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
from tempfile import TemporaryDirectory
from pytest import yield_fixture
from podoc import Podoc, add_default_handler
from podoc.testing import open_test_file, get_test_file_path, iter_test_files
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logging.getLogger().setLevel(logging.DEBUG)
add_default_handler('DEBUG')
@yield_fixture
def tempdir():
with TemporaryDirectory() as tempdir:
yield tempdir
@yield_fixture
def podoc():
yield Podoc()
@yield_fixture
def hello_ast():
yield open_test_file('hello_ast.py')
@yield_fixture
def hello_json():
yield open_test_file('hello.json')
@yield_fixture
def hello_json_path():
yield get_test_file_path('hello.json')
@yield_fixture
def hello_markdown():
yield open_test_file('hello.md')
def pytest_generate_tests(metafunc):
"""Generate the test_file_tuple fixture to test all plugin test files."""
if 'test_file_tuple' in metafunc.fixturenames:
def _name(tuple):
"""Name of the parameterized test: <plugin>_<example_file>."""
return '_'.join(tuple[:2])
metafunc.parametrize('test_file_tuple', iter_test_files(), ids=_name)
|
<commit_before># -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
from tempfile import TemporaryDirectory
from pytest import yield_fixture
from podoc import Podoc, add_default_handler
from podoc.testing import open_test_file, get_test_file_path, iter_test_files
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logging.getLogger().setLevel(logging.DEBUG)
add_default_handler('DEBUG')
@yield_fixture
def tempdir():
with TemporaryDirectory() as tempdir:
yield tempdir
@yield_fixture
def podoc():
yield Podoc()
@yield_fixture
def hello_ast():
yield open_test_file('hello_ast.py')
@yield_fixture
def hello_json():
yield open_test_file('hello.json')
@yield_fixture
def hello_json_path():
yield get_test_file_path('hello.json')
@yield_fixture
def hello_markdown():
yield open_test_file('hello.md')
def pytest_generate_tests(metafunc):
"""Generate the test_file_tuple fixture to test all plugin test files."""
if 'test_file_tuple' in metafunc.fixturenames:
metafunc.parametrize('test_file_tuple', iter_test_files())
<commit_msg>Set the name of the dynamic plugin conversion tests<commit_after>
|
# -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
from tempfile import TemporaryDirectory
from pytest import yield_fixture
from podoc import Podoc, add_default_handler
from podoc.testing import open_test_file, get_test_file_path, iter_test_files
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logging.getLogger().setLevel(logging.DEBUG)
add_default_handler('DEBUG')
@yield_fixture
def tempdir():
with TemporaryDirectory() as tempdir:
yield tempdir
@yield_fixture
def podoc():
yield Podoc()
@yield_fixture
def hello_ast():
yield open_test_file('hello_ast.py')
@yield_fixture
def hello_json():
yield open_test_file('hello.json')
@yield_fixture
def hello_json_path():
yield get_test_file_path('hello.json')
@yield_fixture
def hello_markdown():
yield open_test_file('hello.md')
def pytest_generate_tests(metafunc):
"""Generate the test_file_tuple fixture to test all plugin test files."""
if 'test_file_tuple' in metafunc.fixturenames:
def _name(tuple):
"""Name of the parameterized test: <plugin>_<example_file>."""
return '_'.join(tuple[:2])
metafunc.parametrize('test_file_tuple', iter_test_files(), ids=_name)
|
# -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
from tempfile import TemporaryDirectory
from pytest import yield_fixture
from podoc import Podoc, add_default_handler
from podoc.testing import open_test_file, get_test_file_path, iter_test_files
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logging.getLogger().setLevel(logging.DEBUG)
add_default_handler('DEBUG')
@yield_fixture
def tempdir():
with TemporaryDirectory() as tempdir:
yield tempdir
@yield_fixture
def podoc():
yield Podoc()
@yield_fixture
def hello_ast():
yield open_test_file('hello_ast.py')
@yield_fixture
def hello_json():
yield open_test_file('hello.json')
@yield_fixture
def hello_json_path():
yield get_test_file_path('hello.json')
@yield_fixture
def hello_markdown():
yield open_test_file('hello.md')
def pytest_generate_tests(metafunc):
"""Generate the test_file_tuple fixture to test all plugin test files."""
if 'test_file_tuple' in metafunc.fixturenames:
metafunc.parametrize('test_file_tuple', iter_test_files())
Set the name of the dynamic plugin conversion tests# -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
from tempfile import TemporaryDirectory
from pytest import yield_fixture
from podoc import Podoc, add_default_handler
from podoc.testing import open_test_file, get_test_file_path, iter_test_files
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logging.getLogger().setLevel(logging.DEBUG)
add_default_handler('DEBUG')
@yield_fixture
def tempdir():
with TemporaryDirectory() as tempdir:
yield tempdir
@yield_fixture
def podoc():
yield Podoc()
@yield_fixture
def hello_ast():
yield open_test_file('hello_ast.py')
@yield_fixture
def hello_json():
yield open_test_file('hello.json')
@yield_fixture
def hello_json_path():
yield get_test_file_path('hello.json')
@yield_fixture
def hello_markdown():
yield open_test_file('hello.md')
def pytest_generate_tests(metafunc):
"""Generate the test_file_tuple fixture to test all plugin test files."""
if 'test_file_tuple' in metafunc.fixturenames:
def _name(tuple):
"""Name of the parameterized test: <plugin>_<example_file>."""
return '_'.join(tuple[:2])
metafunc.parametrize('test_file_tuple', iter_test_files(), ids=_name)
|
<commit_before># -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
from tempfile import TemporaryDirectory
from pytest import yield_fixture
from podoc import Podoc, add_default_handler
from podoc.testing import open_test_file, get_test_file_path, iter_test_files
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logging.getLogger().setLevel(logging.DEBUG)
add_default_handler('DEBUG')
@yield_fixture
def tempdir():
with TemporaryDirectory() as tempdir:
yield tempdir
@yield_fixture
def podoc():
yield Podoc()
@yield_fixture
def hello_ast():
yield open_test_file('hello_ast.py')
@yield_fixture
def hello_json():
yield open_test_file('hello.json')
@yield_fixture
def hello_json_path():
yield get_test_file_path('hello.json')
@yield_fixture
def hello_markdown():
yield open_test_file('hello.md')
def pytest_generate_tests(metafunc):
"""Generate the test_file_tuple fixture to test all plugin test files."""
if 'test_file_tuple' in metafunc.fixturenames:
metafunc.parametrize('test_file_tuple', iter_test_files())
<commit_msg>Set the name of the dynamic plugin conversion tests<commit_after># -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
from tempfile import TemporaryDirectory
from pytest import yield_fixture
from podoc import Podoc, add_default_handler
from podoc.testing import open_test_file, get_test_file_path, iter_test_files
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logging.getLogger().setLevel(logging.DEBUG)
add_default_handler('DEBUG')
@yield_fixture
def tempdir():
with TemporaryDirectory() as tempdir:
yield tempdir
@yield_fixture
def podoc():
yield Podoc()
@yield_fixture
def hello_ast():
yield open_test_file('hello_ast.py')
@yield_fixture
def hello_json():
yield open_test_file('hello.json')
@yield_fixture
def hello_json_path():
yield get_test_file_path('hello.json')
@yield_fixture
def hello_markdown():
yield open_test_file('hello.md')
def pytest_generate_tests(metafunc):
"""Generate the test_file_tuple fixture to test all plugin test files."""
if 'test_file_tuple' in metafunc.fixturenames:
def _name(tuple):
"""Name of the parameterized test: <plugin>_<example_file>."""
return '_'.join(tuple[:2])
metafunc.parametrize('test_file_tuple', iter_test_files(), ids=_name)
|
07d62f1e9525719be48d862a86f3623368c02d9d
|
kuryr/lib/constants.py
|
kuryr/lib/constants.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
PORT_STATUS_ACTIVE = 'ACTIVE'
PORT_STATUS_DOWN = 'DOWN'
DEVICE_OWNER = 'kuryr:container'
NIC_NAME_LEN = 14
VETH_PREFIX = 'tap'
CONTAINER_VETH_PREFIX = 't_c'
# For VLAN type segmentation
MIN_VLAN_TAG = 1
MAX_VLAN_TAG = 4094
BINDING_SUBCOMMAND = 'bind'
DEFAULT_NETWORK_MTU = 1500
FALLBACK_VIF_TYPE = 'unbound'
UNBINDING_SUBCOMMAND = 'unbind'
VIF_DETAILS_KEY = 'binding:vif_details'
VIF_TYPE_KEY = 'binding:vif_type'
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
PORT_STATUS_ACTIVE = 'ACTIVE'
PORT_STATUS_DOWN = 'DOWN'
DEVICE_OWNER = 'compute:kuryr'
NIC_NAME_LEN = 14
VETH_PREFIX = 'tap'
CONTAINER_VETH_PREFIX = 't_c'
# For VLAN type segmentation
MIN_VLAN_TAG = 1
MAX_VLAN_TAG = 4094
BINDING_SUBCOMMAND = 'bind'
DEFAULT_NETWORK_MTU = 1500
FALLBACK_VIF_TYPE = 'unbound'
UNBINDING_SUBCOMMAND = 'unbind'
VIF_DETAILS_KEY = 'binding:vif_details'
VIF_TYPE_KEY = 'binding:vif_type'
|
Change DEVICE_OWNER to make it more Neutron compliant
|
Change DEVICE_OWNER to make it more Neutron compliant
Change-Id: Id7a2973928c6df9e134e7b91000e90f244066703
|
Python
|
apache-2.0
|
openstack/kuryr,openstack/kuryr
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
PORT_STATUS_ACTIVE = 'ACTIVE'
PORT_STATUS_DOWN = 'DOWN'
DEVICE_OWNER = 'kuryr:container'
NIC_NAME_LEN = 14
VETH_PREFIX = 'tap'
CONTAINER_VETH_PREFIX = 't_c'
# For VLAN type segmentation
MIN_VLAN_TAG = 1
MAX_VLAN_TAG = 4094
BINDING_SUBCOMMAND = 'bind'
DEFAULT_NETWORK_MTU = 1500
FALLBACK_VIF_TYPE = 'unbound'
UNBINDING_SUBCOMMAND = 'unbind'
VIF_DETAILS_KEY = 'binding:vif_details'
VIF_TYPE_KEY = 'binding:vif_type'
Change DEVICE_OWNER to make it more Neutron compliant
Change-Id: Id7a2973928c6df9e134e7b91000e90f244066703
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
PORT_STATUS_ACTIVE = 'ACTIVE'
PORT_STATUS_DOWN = 'DOWN'
DEVICE_OWNER = 'compute:kuryr'
NIC_NAME_LEN = 14
VETH_PREFIX = 'tap'
CONTAINER_VETH_PREFIX = 't_c'
# For VLAN type segmentation
MIN_VLAN_TAG = 1
MAX_VLAN_TAG = 4094
BINDING_SUBCOMMAND = 'bind'
DEFAULT_NETWORK_MTU = 1500
FALLBACK_VIF_TYPE = 'unbound'
UNBINDING_SUBCOMMAND = 'unbind'
VIF_DETAILS_KEY = 'binding:vif_details'
VIF_TYPE_KEY = 'binding:vif_type'
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
PORT_STATUS_ACTIVE = 'ACTIVE'
PORT_STATUS_DOWN = 'DOWN'
DEVICE_OWNER = 'kuryr:container'
NIC_NAME_LEN = 14
VETH_PREFIX = 'tap'
CONTAINER_VETH_PREFIX = 't_c'
# For VLAN type segmentation
MIN_VLAN_TAG = 1
MAX_VLAN_TAG = 4094
BINDING_SUBCOMMAND = 'bind'
DEFAULT_NETWORK_MTU = 1500
FALLBACK_VIF_TYPE = 'unbound'
UNBINDING_SUBCOMMAND = 'unbind'
VIF_DETAILS_KEY = 'binding:vif_details'
VIF_TYPE_KEY = 'binding:vif_type'
<commit_msg>Change DEVICE_OWNER to make it more Neutron compliant
Change-Id: Id7a2973928c6df9e134e7b91000e90f244066703<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
PORT_STATUS_ACTIVE = 'ACTIVE'
PORT_STATUS_DOWN = 'DOWN'
DEVICE_OWNER = 'compute:kuryr'
NIC_NAME_LEN = 14
VETH_PREFIX = 'tap'
CONTAINER_VETH_PREFIX = 't_c'
# For VLAN type segmentation
MIN_VLAN_TAG = 1
MAX_VLAN_TAG = 4094
BINDING_SUBCOMMAND = 'bind'
DEFAULT_NETWORK_MTU = 1500
FALLBACK_VIF_TYPE = 'unbound'
UNBINDING_SUBCOMMAND = 'unbind'
VIF_DETAILS_KEY = 'binding:vif_details'
VIF_TYPE_KEY = 'binding:vif_type'
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
PORT_STATUS_ACTIVE = 'ACTIVE'
PORT_STATUS_DOWN = 'DOWN'
DEVICE_OWNER = 'kuryr:container'
NIC_NAME_LEN = 14
VETH_PREFIX = 'tap'
CONTAINER_VETH_PREFIX = 't_c'
# For VLAN type segmentation
MIN_VLAN_TAG = 1
MAX_VLAN_TAG = 4094
BINDING_SUBCOMMAND = 'bind'
DEFAULT_NETWORK_MTU = 1500
FALLBACK_VIF_TYPE = 'unbound'
UNBINDING_SUBCOMMAND = 'unbind'
VIF_DETAILS_KEY = 'binding:vif_details'
VIF_TYPE_KEY = 'binding:vif_type'
Change DEVICE_OWNER to make it more Neutron compliant
Change-Id: Id7a2973928c6df9e134e7b91000e90f244066703# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
PORT_STATUS_ACTIVE = 'ACTIVE'
PORT_STATUS_DOWN = 'DOWN'
DEVICE_OWNER = 'compute:kuryr'
NIC_NAME_LEN = 14
VETH_PREFIX = 'tap'
CONTAINER_VETH_PREFIX = 't_c'
# For VLAN type segmentation
MIN_VLAN_TAG = 1
MAX_VLAN_TAG = 4094
BINDING_SUBCOMMAND = 'bind'
DEFAULT_NETWORK_MTU = 1500
FALLBACK_VIF_TYPE = 'unbound'
UNBINDING_SUBCOMMAND = 'unbind'
VIF_DETAILS_KEY = 'binding:vif_details'
VIF_TYPE_KEY = 'binding:vif_type'
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
PORT_STATUS_ACTIVE = 'ACTIVE'
PORT_STATUS_DOWN = 'DOWN'
DEVICE_OWNER = 'kuryr:container'
NIC_NAME_LEN = 14
VETH_PREFIX = 'tap'
CONTAINER_VETH_PREFIX = 't_c'
# For VLAN type segmentation
MIN_VLAN_TAG = 1
MAX_VLAN_TAG = 4094
BINDING_SUBCOMMAND = 'bind'
DEFAULT_NETWORK_MTU = 1500
FALLBACK_VIF_TYPE = 'unbound'
UNBINDING_SUBCOMMAND = 'unbind'
VIF_DETAILS_KEY = 'binding:vif_details'
VIF_TYPE_KEY = 'binding:vif_type'
<commit_msg>Change DEVICE_OWNER to make it more Neutron compliant
Change-Id: Id7a2973928c6df9e134e7b91000e90f244066703<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
PORT_STATUS_ACTIVE = 'ACTIVE'
PORT_STATUS_DOWN = 'DOWN'
DEVICE_OWNER = 'compute:kuryr'
NIC_NAME_LEN = 14
VETH_PREFIX = 'tap'
CONTAINER_VETH_PREFIX = 't_c'
# For VLAN type segmentation
MIN_VLAN_TAG = 1
MAX_VLAN_TAG = 4094
BINDING_SUBCOMMAND = 'bind'
DEFAULT_NETWORK_MTU = 1500
FALLBACK_VIF_TYPE = 'unbound'
UNBINDING_SUBCOMMAND = 'unbind'
VIF_DETAILS_KEY = 'binding:vif_details'
VIF_TYPE_KEY = 'binding:vif_type'
|
eb63b0979763375522bc71ce2f06fb625151ea08
|
MoMMI/Modules/wyci.py
|
MoMMI/Modules/wyci.py
|
import random
import re
from typing import Match
from discord import Message
from MoMMI import master, always_command, MChannel
@always_command("wyci")
async def wyci(channel: MChannel, _match: Match, message: Message) -> None:
match = re.search(r"\S\s+when[\s*?.!)]*$", message.content, re.IGNORECASE)
if match is None:
return
if random.random() > 0.001:
await channel.send("When You Code It.")
else:
await channel.send("Never.")
|
import random
import re
from typing import Match
from discord import Message
from MoMMI import master, always_command, MChannel
@always_command("wyci")
async def wyci(channel: MChannel, _match: Match, message: Message) -> None:
if not channel.server_config("wyci.enabled", True):
return
match = re.search(r"\S\s+when[\s*?.!)]*$", message.content, re.IGNORECASE)
if match is None:
return
if random.random() > 0.001:
await channel.send("When You Code It.")
else:
await channel.send("Never.")
|
Add config to disable WYCI.
|
Add config to disable WYCI.
|
Python
|
mit
|
PJB3005/MoMMI,PJB3005/MoMMI,PJB3005/MoMMI
|
import random
import re
from typing import Match
from discord import Message
from MoMMI import master, always_command, MChannel
@always_command("wyci")
async def wyci(channel: MChannel, _match: Match, message: Message) -> None:
match = re.search(r"\S\s+when[\s*?.!)]*$", message.content, re.IGNORECASE)
if match is None:
return
if random.random() > 0.001:
await channel.send("When You Code It.")
else:
await channel.send("Never.")
Add config to disable WYCI.
|
import random
import re
from typing import Match
from discord import Message
from MoMMI import master, always_command, MChannel
@always_command("wyci")
async def wyci(channel: MChannel, _match: Match, message: Message) -> None:
if not channel.server_config("wyci.enabled", True):
return
match = re.search(r"\S\s+when[\s*?.!)]*$", message.content, re.IGNORECASE)
if match is None:
return
if random.random() > 0.001:
await channel.send("When You Code It.")
else:
await channel.send("Never.")
|
<commit_before>import random
import re
from typing import Match
from discord import Message
from MoMMI import master, always_command, MChannel
@always_command("wyci")
async def wyci(channel: MChannel, _match: Match, message: Message) -> None:
match = re.search(r"\S\s+when[\s*?.!)]*$", message.content, re.IGNORECASE)
if match is None:
return
if random.random() > 0.001:
await channel.send("When You Code It.")
else:
await channel.send("Never.")
<commit_msg>Add config to disable WYCI.<commit_after>
|
import random
import re
from typing import Match
from discord import Message
from MoMMI import master, always_command, MChannel
@always_command("wyci")
async def wyci(channel: MChannel, _match: Match, message: Message) -> None:
if not channel.server_config("wyci.enabled", True):
return
match = re.search(r"\S\s+when[\s*?.!)]*$", message.content, re.IGNORECASE)
if match is None:
return
if random.random() > 0.001:
await channel.send("When You Code It.")
else:
await channel.send("Never.")
|
import random
import re
from typing import Match
from discord import Message
from MoMMI import master, always_command, MChannel
@always_command("wyci")
async def wyci(channel: MChannel, _match: Match, message: Message) -> None:
match = re.search(r"\S\s+when[\s*?.!)]*$", message.content, re.IGNORECASE)
if match is None:
return
if random.random() > 0.001:
await channel.send("When You Code It.")
else:
await channel.send("Never.")
Add config to disable WYCI.import random
import re
from typing import Match
from discord import Message
from MoMMI import master, always_command, MChannel
@always_command("wyci")
async def wyci(channel: MChannel, _match: Match, message: Message) -> None:
if not channel.server_config("wyci.enabled", True):
return
match = re.search(r"\S\s+when[\s*?.!)]*$", message.content, re.IGNORECASE)
if match is None:
return
if random.random() > 0.001:
await channel.send("When You Code It.")
else:
await channel.send("Never.")
|
<commit_before>import random
import re
from typing import Match
from discord import Message
from MoMMI import master, always_command, MChannel
@always_command("wyci")
async def wyci(channel: MChannel, _match: Match, message: Message) -> None:
match = re.search(r"\S\s+when[\s*?.!)]*$", message.content, re.IGNORECASE)
if match is None:
return
if random.random() > 0.001:
await channel.send("When You Code It.")
else:
await channel.send("Never.")
<commit_msg>Add config to disable WYCI.<commit_after>import random
import re
from typing import Match
from discord import Message
from MoMMI import master, always_command, MChannel
@always_command("wyci")
async def wyci(channel: MChannel, _match: Match, message: Message) -> None:
if not channel.server_config("wyci.enabled", True):
return
match = re.search(r"\S\s+when[\s*?.!)]*$", message.content, re.IGNORECASE)
if match is None:
return
if random.random() > 0.001:
await channel.send("When You Code It.")
else:
await channel.send("Never.")
|
a0fb20f910f59737be725a1fc3d49d17cafa9107
|
ella/articles/newman_admin.py
|
ella/articles/newman_admin.py
|
from django.utils.translation import ugettext_lazy as _
from ella.core.newman_admin import ListingInlineAdmin, PublishableAdmin,\
RelatedInlineAdmin
from ella.articles.models import Article
import ella_newman
class ArticleAdmin(PublishableAdmin):
fieldsets = (
(_("Article heading"), {'fields': ('title', )}),
(_("Updated, slug"), {'fields': ('last_updated', 'slug',), 'classes': ('collapsed',)}),
(_("Metadata"), {'fields': ('photo', 'category', 'authors', 'source')}),
(_("Dates"), {'fields': (('publish_from', 'publish_to'), 'static')}),
(_("Content"), {'fields': ('description', 'content')}),
)
inlines = [ListingInlineAdmin, RelatedInlineAdmin]
rich_text_fields = {'small': ('description',), None: ('content',)}
ella_newman.site.register(Article, ArticleAdmin)
|
from django.utils.translation import ugettext_lazy as _
from ella.core.newman_admin import ListingInlineAdmin, PublishableAdmin,\
RelatedInlineAdmin
from ella.articles.models import Article
import ella_newman
class ArticleAdmin(PublishableAdmin):
fieldsets = (
(_("Article heading"), {'fields': ('title', )}),
(_("Updated, slug"), {'fields': ('last_updated', 'slug',), 'classes': ('collapsed',)}),
(_("Metadata"), {'fields': ('photo', 'category', 'authors', 'source')}),
(_("Dates"), {'fields': (('publish_from', 'publish_to'), 'static')}),
(_("Content"), {'fields': ('description', 'content')}),
)
inlines = [ListingInlineAdmin, RelatedInlineAdmin]
rich_text_fields = {'small': ('description',), None: ('content',)}
ella_newman.site.register(Article, ArticleAdmin)
|
Fix pep8 on articles new man admin: E302 expected 2 blank lines
|
Fix pep8 on articles new man admin: E302 expected 2 blank lines
|
Python
|
bsd-3-clause
|
WhiskeyMedia/ella,MichalMaM/ella,petrlosa/ella,whalerock/ella,petrlosa/ella,whalerock/ella,WhiskeyMedia/ella,ella/ella,whalerock/ella,MichalMaM/ella
|
from django.utils.translation import ugettext_lazy as _
from ella.core.newman_admin import ListingInlineAdmin, PublishableAdmin,\
RelatedInlineAdmin
from ella.articles.models import Article
import ella_newman
class ArticleAdmin(PublishableAdmin):
fieldsets = (
(_("Article heading"), {'fields': ('title', )}),
(_("Updated, slug"), {'fields': ('last_updated', 'slug',), 'classes': ('collapsed',)}),
(_("Metadata"), {'fields': ('photo', 'category', 'authors', 'source')}),
(_("Dates"), {'fields': (('publish_from', 'publish_to'), 'static')}),
(_("Content"), {'fields': ('description', 'content')}),
)
inlines = [ListingInlineAdmin, RelatedInlineAdmin]
rich_text_fields = {'small': ('description',), None: ('content',)}
ella_newman.site.register(Article, ArticleAdmin)
Fix pep8 on articles new man admin: E302 expected 2 blank lines
|
from django.utils.translation import ugettext_lazy as _
from ella.core.newman_admin import ListingInlineAdmin, PublishableAdmin,\
RelatedInlineAdmin
from ella.articles.models import Article
import ella_newman
class ArticleAdmin(PublishableAdmin):
fieldsets = (
(_("Article heading"), {'fields': ('title', )}),
(_("Updated, slug"), {'fields': ('last_updated', 'slug',), 'classes': ('collapsed',)}),
(_("Metadata"), {'fields': ('photo', 'category', 'authors', 'source')}),
(_("Dates"), {'fields': (('publish_from', 'publish_to'), 'static')}),
(_("Content"), {'fields': ('description', 'content')}),
)
inlines = [ListingInlineAdmin, RelatedInlineAdmin]
rich_text_fields = {'small': ('description',), None: ('content',)}
ella_newman.site.register(Article, ArticleAdmin)
|
<commit_before>from django.utils.translation import ugettext_lazy as _
from ella.core.newman_admin import ListingInlineAdmin, PublishableAdmin,\
RelatedInlineAdmin
from ella.articles.models import Article
import ella_newman
class ArticleAdmin(PublishableAdmin):
fieldsets = (
(_("Article heading"), {'fields': ('title', )}),
(_("Updated, slug"), {'fields': ('last_updated', 'slug',), 'classes': ('collapsed',)}),
(_("Metadata"), {'fields': ('photo', 'category', 'authors', 'source')}),
(_("Dates"), {'fields': (('publish_from', 'publish_to'), 'static')}),
(_("Content"), {'fields': ('description', 'content')}),
)
inlines = [ListingInlineAdmin, RelatedInlineAdmin]
rich_text_fields = {'small': ('description',), None: ('content',)}
ella_newman.site.register(Article, ArticleAdmin)
<commit_msg>Fix pep8 on articles new man admin: E302 expected 2 blank lines<commit_after>
|
from django.utils.translation import ugettext_lazy as _
from ella.core.newman_admin import ListingInlineAdmin, PublishableAdmin,\
RelatedInlineAdmin
from ella.articles.models import Article
import ella_newman
class ArticleAdmin(PublishableAdmin):
fieldsets = (
(_("Article heading"), {'fields': ('title', )}),
(_("Updated, slug"), {'fields': ('last_updated', 'slug',), 'classes': ('collapsed',)}),
(_("Metadata"), {'fields': ('photo', 'category', 'authors', 'source')}),
(_("Dates"), {'fields': (('publish_from', 'publish_to'), 'static')}),
(_("Content"), {'fields': ('description', 'content')}),
)
inlines = [ListingInlineAdmin, RelatedInlineAdmin]
rich_text_fields = {'small': ('description',), None: ('content',)}
ella_newman.site.register(Article, ArticleAdmin)
|
from django.utils.translation import ugettext_lazy as _
from ella.core.newman_admin import ListingInlineAdmin, PublishableAdmin,\
RelatedInlineAdmin
from ella.articles.models import Article
import ella_newman
class ArticleAdmin(PublishableAdmin):
fieldsets = (
(_("Article heading"), {'fields': ('title', )}),
(_("Updated, slug"), {'fields': ('last_updated', 'slug',), 'classes': ('collapsed',)}),
(_("Metadata"), {'fields': ('photo', 'category', 'authors', 'source')}),
(_("Dates"), {'fields': (('publish_from', 'publish_to'), 'static')}),
(_("Content"), {'fields': ('description', 'content')}),
)
inlines = [ListingInlineAdmin, RelatedInlineAdmin]
rich_text_fields = {'small': ('description',), None: ('content',)}
ella_newman.site.register(Article, ArticleAdmin)
Fix pep8 on articles new man admin: E302 expected 2 blank linesfrom django.utils.translation import ugettext_lazy as _
from ella.core.newman_admin import ListingInlineAdmin, PublishableAdmin,\
RelatedInlineAdmin
from ella.articles.models import Article
import ella_newman
class ArticleAdmin(PublishableAdmin):
fieldsets = (
(_("Article heading"), {'fields': ('title', )}),
(_("Updated, slug"), {'fields': ('last_updated', 'slug',), 'classes': ('collapsed',)}),
(_("Metadata"), {'fields': ('photo', 'category', 'authors', 'source')}),
(_("Dates"), {'fields': (('publish_from', 'publish_to'), 'static')}),
(_("Content"), {'fields': ('description', 'content')}),
)
inlines = [ListingInlineAdmin, RelatedInlineAdmin]
rich_text_fields = {'small': ('description',), None: ('content',)}
ella_newman.site.register(Article, ArticleAdmin)
|
<commit_before>from django.utils.translation import ugettext_lazy as _
from ella.core.newman_admin import ListingInlineAdmin, PublishableAdmin,\
RelatedInlineAdmin
from ella.articles.models import Article
import ella_newman
class ArticleAdmin(PublishableAdmin):
fieldsets = (
(_("Article heading"), {'fields': ('title', )}),
(_("Updated, slug"), {'fields': ('last_updated', 'slug',), 'classes': ('collapsed',)}),
(_("Metadata"), {'fields': ('photo', 'category', 'authors', 'source')}),
(_("Dates"), {'fields': (('publish_from', 'publish_to'), 'static')}),
(_("Content"), {'fields': ('description', 'content')}),
)
inlines = [ListingInlineAdmin, RelatedInlineAdmin]
rich_text_fields = {'small': ('description',), None: ('content',)}
ella_newman.site.register(Article, ArticleAdmin)
<commit_msg>Fix pep8 on articles new man admin: E302 expected 2 blank lines<commit_after>from django.utils.translation import ugettext_lazy as _
from ella.core.newman_admin import ListingInlineAdmin, PublishableAdmin,\
RelatedInlineAdmin
from ella.articles.models import Article
import ella_newman
class ArticleAdmin(PublishableAdmin):
fieldsets = (
(_("Article heading"), {'fields': ('title', )}),
(_("Updated, slug"), {'fields': ('last_updated', 'slug',), 'classes': ('collapsed',)}),
(_("Metadata"), {'fields': ('photo', 'category', 'authors', 'source')}),
(_("Dates"), {'fields': (('publish_from', 'publish_to'), 'static')}),
(_("Content"), {'fields': ('description', 'content')}),
)
inlines = [ListingInlineAdmin, RelatedInlineAdmin]
rich_text_fields = {'small': ('description',), None: ('content',)}
ella_newman.site.register(Article, ArticleAdmin)
|
da9058064e2a94f717abe2f97af80d2daa4fa292
|
likert_field/models.py
|
likert_field/models.py
|
#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
import likert_field.forms as forms
class LikertField(models.IntegerField):
"""A Likert field is simply stored as an IntegerField"""
description = _('Likert item field')
def __init__(self, *args, **kwargs):
if 'null' not in kwargs and not kwargs.get('null'):
kwargs['null'] = True
super(LikertField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'min_value': 0,
'form_class': forms.LikertField
}
defaults.update(kwargs)
return super(LikertField, self).formfield(**defaults)
|
#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from six import string_types
from django.db import models
from django.utils.translation import ugettext_lazy as _
import likert_field.forms as forms
class LikertField(models.IntegerField):
"""A Likert field is simply stored as an IntegerField"""
description = _('Likert item field')
def __init__(self, *args, **kwargs):
if 'null' not in kwargs and not kwargs.get('null'):
kwargs['null'] = True
super(LikertField, self).__init__(*args, **kwargs)
def get_prep_value(self, value):
"""The field expects a number as a string (ie. '2').
Unscored fields are empty strings and are stored as NULL
"""
if value is None:
return None
if isinstance(value, string_types) and len(value) == 0:
return None
return int(value)
def formfield(self, **kwargs):
defaults = {
'min_value': 0,
'form_class': forms.LikertField
}
defaults.update(kwargs)
return super(LikertField, self).formfield(**defaults)
|
Handle empty strings from unanswered items
|
Handle empty strings from unanswered items
|
Python
|
bsd-3-clause
|
kelvinwong-ca/django-likert-field,kelvinwong-ca/django-likert-field
|
#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
import likert_field.forms as forms
class LikertField(models.IntegerField):
"""A Likert field is simply stored as an IntegerField"""
description = _('Likert item field')
def __init__(self, *args, **kwargs):
if 'null' not in kwargs and not kwargs.get('null'):
kwargs['null'] = True
super(LikertField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'min_value': 0,
'form_class': forms.LikertField
}
defaults.update(kwargs)
return super(LikertField, self).formfield(**defaults)
Handle empty strings from unanswered items
|
#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from six import string_types
from django.db import models
from django.utils.translation import ugettext_lazy as _
import likert_field.forms as forms
class LikertField(models.IntegerField):
"""A Likert field is simply stored as an IntegerField"""
description = _('Likert item field')
def __init__(self, *args, **kwargs):
if 'null' not in kwargs and not kwargs.get('null'):
kwargs['null'] = True
super(LikertField, self).__init__(*args, **kwargs)
def get_prep_value(self, value):
"""The field expects a number as a string (ie. '2').
Unscored fields are empty strings and are stored as NULL
"""
if value is None:
return None
if isinstance(value, string_types) and len(value) == 0:
return None
return int(value)
def formfield(self, **kwargs):
defaults = {
'min_value': 0,
'form_class': forms.LikertField
}
defaults.update(kwargs)
return super(LikertField, self).formfield(**defaults)
|
<commit_before>#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
import likert_field.forms as forms
class LikertField(models.IntegerField):
"""A Likert field is simply stored as an IntegerField"""
description = _('Likert item field')
def __init__(self, *args, **kwargs):
if 'null' not in kwargs and not kwargs.get('null'):
kwargs['null'] = True
super(LikertField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'min_value': 0,
'form_class': forms.LikertField
}
defaults.update(kwargs)
return super(LikertField, self).formfield(**defaults)
<commit_msg>Handle empty strings from unanswered items<commit_after>
|
#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from six import string_types
from django.db import models
from django.utils.translation import ugettext_lazy as _
import likert_field.forms as forms
class LikertField(models.IntegerField):
"""A Likert field is simply stored as an IntegerField"""
description = _('Likert item field')
def __init__(self, *args, **kwargs):
if 'null' not in kwargs and not kwargs.get('null'):
kwargs['null'] = True
super(LikertField, self).__init__(*args, **kwargs)
def get_prep_value(self, value):
"""The field expects a number as a string (ie. '2').
Unscored fields are empty strings and are stored as NULL
"""
if value is None:
return None
if isinstance(value, string_types) and len(value) == 0:
return None
return int(value)
def formfield(self, **kwargs):
defaults = {
'min_value': 0,
'form_class': forms.LikertField
}
defaults.update(kwargs)
return super(LikertField, self).formfield(**defaults)
|
#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
import likert_field.forms as forms
class LikertField(models.IntegerField):
"""A Likert field is simply stored as an IntegerField"""
description = _('Likert item field')
def __init__(self, *args, **kwargs):
if 'null' not in kwargs and not kwargs.get('null'):
kwargs['null'] = True
super(LikertField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'min_value': 0,
'form_class': forms.LikertField
}
defaults.update(kwargs)
return super(LikertField, self).formfield(**defaults)
Handle empty strings from unanswered items#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from six import string_types
from django.db import models
from django.utils.translation import ugettext_lazy as _
import likert_field.forms as forms
class LikertField(models.IntegerField):
"""A Likert field is simply stored as an IntegerField"""
description = _('Likert item field')
def __init__(self, *args, **kwargs):
if 'null' not in kwargs and not kwargs.get('null'):
kwargs['null'] = True
super(LikertField, self).__init__(*args, **kwargs)
def get_prep_value(self, value):
"""The field expects a number as a string (ie. '2').
Unscored fields are empty strings and are stored as NULL
"""
if value is None:
return None
if isinstance(value, string_types) and len(value) == 0:
return None
return int(value)
def formfield(self, **kwargs):
defaults = {
'min_value': 0,
'form_class': forms.LikertField
}
defaults.update(kwargs)
return super(LikertField, self).formfield(**defaults)
|
<commit_before>#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
import likert_field.forms as forms
class LikertField(models.IntegerField):
"""A Likert field is simply stored as an IntegerField"""
description = _('Likert item field')
def __init__(self, *args, **kwargs):
if 'null' not in kwargs and not kwargs.get('null'):
kwargs['null'] = True
super(LikertField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'min_value': 0,
'form_class': forms.LikertField
}
defaults.update(kwargs)
return super(LikertField, self).formfield(**defaults)
<commit_msg>Handle empty strings from unanswered items<commit_after>#-*- coding: utf-8 -*-
from __future__ import unicode_literals
from six import string_types
from django.db import models
from django.utils.translation import ugettext_lazy as _
import likert_field.forms as forms
class LikertField(models.IntegerField):
"""A Likert field is simply stored as an IntegerField"""
description = _('Likert item field')
def __init__(self, *args, **kwargs):
if 'null' not in kwargs and not kwargs.get('null'):
kwargs['null'] = True
super(LikertField, self).__init__(*args, **kwargs)
def get_prep_value(self, value):
"""The field expects a number as a string (ie. '2').
Unscored fields are empty strings and are stored as NULL
"""
if value is None:
return None
if isinstance(value, string_types) and len(value) == 0:
return None
return int(value)
def formfield(self, **kwargs):
defaults = {
'min_value': 0,
'form_class': forms.LikertField
}
defaults.update(kwargs)
return super(LikertField, self).formfield(**defaults)
|
049287c2ec73d46bbc4ad0c4b4778719d6fd4505
|
src/dicomweb_client/__init__.py
|
src/dicomweb_client/__init__.py
|
__version__ = '0.9.4'
from dicomweb_client.api import DICOMwebClient
|
__version__ = '0.10.0'
from dicomweb_client.api import DICOMwebClient
|
Increase package version to 0.10.0
|
Increase package version to 0.10.0
|
Python
|
mit
|
MGHComputationalPathology/dicomweb-client
|
__version__ = '0.9.4'
from dicomweb_client.api import DICOMwebClient
Increase package version to 0.10.0
|
__version__ = '0.10.0'
from dicomweb_client.api import DICOMwebClient
|
<commit_before>__version__ = '0.9.4'
from dicomweb_client.api import DICOMwebClient
<commit_msg>Increase package version to 0.10.0<commit_after>
|
__version__ = '0.10.0'
from dicomweb_client.api import DICOMwebClient
|
__version__ = '0.9.4'
from dicomweb_client.api import DICOMwebClient
Increase package version to 0.10.0__version__ = '0.10.0'
from dicomweb_client.api import DICOMwebClient
|
<commit_before>__version__ = '0.9.4'
from dicomweb_client.api import DICOMwebClient
<commit_msg>Increase package version to 0.10.0<commit_after>__version__ = '0.10.0'
from dicomweb_client.api import DICOMwebClient
|
22855458c7c683353f2ed7b577289b63da8bc9c6
|
src/scikit-cycling/skcycling/utils/io_fit.py
|
src/scikit-cycling/skcycling/utils/io_fit.py
|
import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
raise ValueError('There record without power values. Check what is happening.')
return power_rec
|
import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
# raise ValueError('There record without power values. Check what is happening.')
# We put the value to 0 since that it will not influence
# the computation of the RPP
power_rec[idx_rec] = 0.
return power_rec
|
Solve the issue of the power got disconnected during the ride
|
Solve the issue of the power got disconnected during the ride
|
Python
|
mit
|
glemaitre/power-profile,glemaitre/power-profile,clemaitre58/power-profile,clemaitre58/power-profile
|
import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
raise ValueError('There record without power values. Check what is happening.')
return power_rec
Solve the issue of the power got disconnected during the ride
|
import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
# raise ValueError('There record without power values. Check what is happening.')
# We put the value to 0 since that it will not influence
# the computation of the RPP
power_rec[idx_rec] = 0.
return power_rec
|
<commit_before>import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
raise ValueError('There record without power values. Check what is happening.')
return power_rec
<commit_msg>Solve the issue of the power got disconnected during the ride<commit_after>
|
import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
# raise ValueError('There record without power values. Check what is happening.')
# We put the value to 0 since that it will not influence
# the computation of the RPP
power_rec[idx_rec] = 0.
return power_rec
|
import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
raise ValueError('There record without power values. Check what is happening.')
return power_rec
Solve the issue of the power got disconnected during the rideimport numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
# raise ValueError('There record without power values. Check what is happening.')
# We put the value to 0 since that it will not influence
# the computation of the RPP
power_rec[idx_rec] = 0.
return power_rec
|
<commit_before>import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
raise ValueError('There record without power values. Check what is happening.')
return power_rec
<commit_msg>Solve the issue of the power got disconnected during the ride<commit_after>import numpy as np
from fitparse import FitFile
def load_power_from_fit(filename):
""" Method to open the power data from FIT file into a numpy array.
Parameters
----------
filename: str,
Path to the FIT file.
"""
# Check that the filename has the good extension
if filename.endswith('.fit') is not True:
raise ValueError('The file does not have the right extension. Expected *.fit.')
# Create an object to open the activity
activity = FitFile(filename)
activity.parse()
# Get only the power records
records = list(activity.get_messages(name='record'))
# Append the different values inside a list which will be later
# converted to numpy array
power_rec = np.zeros((len(records), ))
# Go through each record
for idx_rec, rec in enumerate(records):
# Extract only the value regarding the power
p = rec.get_value('power')
if p is not None:
power_rec[idx_rec] = float(p)
else:
# raise ValueError('There record without power values. Check what is happening.')
# We put the value to 0 since that it will not influence
# the computation of the RPP
power_rec[idx_rec] = 0.
return power_rec
|
844e3635aeb0144f7e4cc0d9de3bfc219312bbe5
|
ocradmin/plugins/views.py
|
ocradmin/plugins/views.py
|
"""
RESTful interface to interacting with OCR plugins.
"""
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from ocradmin.ocrtasks.models import OcrTask
from ocradmin.plugins.manager import ModuleManager
import logging
logger = logging.getLogger(__name__)
import simplejson
import tasks
def query(request):
"""
Query plugin info. This returns a list
of available OCR engines and an URL that
can be queries when one of them is selected.
"""
stages=request.GET.getlist("stage")
return HttpResponse(
ModuleManager.get_json(*stages), mimetype="application/json")
def runscript(request):
"""
Execute a script (sent as JSON).
"""
evalnode = request.POST.get("node", "")
jsondata = request.POST.get("script", simplejson.dumps({"arse":"spaz"}))
script = simplejson.loads(jsondata)
async = OcrTask.run_celery_task("run.script", evalnode, script,
untracked=True, asyncronous=True, queue="interactive")
out = dict(task_id=async.task_id, status=async.status,
results=async.result)
return HttpResponse(simplejson.dumps(out), mimetype="application/json")
|
"""
RESTful interface to interacting with OCR plugins.
"""
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from ocradmin.ocrtasks.models import OcrTask
from ocradmin.plugins.manager import ModuleManager
import logging
logger = logging.getLogger(__name__)
import simplejson
import tasks
def query(request):
"""
Query plugin info. This returns a list
of available OCR engines and an URL that
can be queries when one of them is selected.
"""
stages=request.GET.getlist("stage")
return HttpResponse(
ModuleManager.get_json(*stages), mimetype="application/json")
def runscript(request):
"""
Execute a script (sent as JSON).
"""
evalnode = request.POST.get("node", "")
jsondata = request.POST.get("script", simplejson.dumps({"arse":"spaz"}))
script = simplejson.loads(jsondata)
async = OcrTask.run_celery_task("run.script", evalnode, script,
untracked=True, asyncronous=True, queue="interactive")
out = dict(
node=evalnode,
task_id=async.task_id,
status=async.status,
results=async.result
)
return HttpResponse(simplejson.dumps(out), mimetype="application/json")
|
Include the eval'd node type in the async return
|
Include the eval'd node type in the async return
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
"""
RESTful interface to interacting with OCR plugins.
"""
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from ocradmin.ocrtasks.models import OcrTask
from ocradmin.plugins.manager import ModuleManager
import logging
logger = logging.getLogger(__name__)
import simplejson
import tasks
def query(request):
"""
Query plugin info. This returns a list
of available OCR engines and an URL that
can be queries when one of them is selected.
"""
stages=request.GET.getlist("stage")
return HttpResponse(
ModuleManager.get_json(*stages), mimetype="application/json")
def runscript(request):
"""
Execute a script (sent as JSON).
"""
evalnode = request.POST.get("node", "")
jsondata = request.POST.get("script", simplejson.dumps({"arse":"spaz"}))
script = simplejson.loads(jsondata)
async = OcrTask.run_celery_task("run.script", evalnode, script,
untracked=True, asyncronous=True, queue="interactive")
out = dict(task_id=async.task_id, status=async.status,
results=async.result)
return HttpResponse(simplejson.dumps(out), mimetype="application/json")
Include the eval'd node type in the async return
|
"""
RESTful interface to interacting with OCR plugins.
"""
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from ocradmin.ocrtasks.models import OcrTask
from ocradmin.plugins.manager import ModuleManager
import logging
logger = logging.getLogger(__name__)
import simplejson
import tasks
def query(request):
"""
Query plugin info. This returns a list
of available OCR engines and an URL that
can be queries when one of them is selected.
"""
stages=request.GET.getlist("stage")
return HttpResponse(
ModuleManager.get_json(*stages), mimetype="application/json")
def runscript(request):
"""
Execute a script (sent as JSON).
"""
evalnode = request.POST.get("node", "")
jsondata = request.POST.get("script", simplejson.dumps({"arse":"spaz"}))
script = simplejson.loads(jsondata)
async = OcrTask.run_celery_task("run.script", evalnode, script,
untracked=True, asyncronous=True, queue="interactive")
out = dict(
node=evalnode,
task_id=async.task_id,
status=async.status,
results=async.result
)
return HttpResponse(simplejson.dumps(out), mimetype="application/json")
|
<commit_before>"""
RESTful interface to interacting with OCR plugins.
"""
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from ocradmin.ocrtasks.models import OcrTask
from ocradmin.plugins.manager import ModuleManager
import logging
logger = logging.getLogger(__name__)
import simplejson
import tasks
def query(request):
"""
Query plugin info. This returns a list
of available OCR engines and an URL that
can be queries when one of them is selected.
"""
stages=request.GET.getlist("stage")
return HttpResponse(
ModuleManager.get_json(*stages), mimetype="application/json")
def runscript(request):
"""
Execute a script (sent as JSON).
"""
evalnode = request.POST.get("node", "")
jsondata = request.POST.get("script", simplejson.dumps({"arse":"spaz"}))
script = simplejson.loads(jsondata)
async = OcrTask.run_celery_task("run.script", evalnode, script,
untracked=True, asyncronous=True, queue="interactive")
out = dict(task_id=async.task_id, status=async.status,
results=async.result)
return HttpResponse(simplejson.dumps(out), mimetype="application/json")
<commit_msg>Include the eval'd node type in the async return<commit_after>
|
"""
RESTful interface to interacting with OCR plugins.
"""
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from ocradmin.ocrtasks.models import OcrTask
from ocradmin.plugins.manager import ModuleManager
import logging
logger = logging.getLogger(__name__)
import simplejson
import tasks
def query(request):
"""
Query plugin info. This returns a list
of available OCR engines and an URL that
can be queries when one of them is selected.
"""
stages=request.GET.getlist("stage")
return HttpResponse(
ModuleManager.get_json(*stages), mimetype="application/json")
def runscript(request):
"""
Execute a script (sent as JSON).
"""
evalnode = request.POST.get("node", "")
jsondata = request.POST.get("script", simplejson.dumps({"arse":"spaz"}))
script = simplejson.loads(jsondata)
async = OcrTask.run_celery_task("run.script", evalnode, script,
untracked=True, asyncronous=True, queue="interactive")
out = dict(
node=evalnode,
task_id=async.task_id,
status=async.status,
results=async.result
)
return HttpResponse(simplejson.dumps(out), mimetype="application/json")
|
"""
RESTful interface to interacting with OCR plugins.
"""
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from ocradmin.ocrtasks.models import OcrTask
from ocradmin.plugins.manager import ModuleManager
import logging
logger = logging.getLogger(__name__)
import simplejson
import tasks
def query(request):
"""
Query plugin info. This returns a list
of available OCR engines and an URL that
can be queries when one of them is selected.
"""
stages=request.GET.getlist("stage")
return HttpResponse(
ModuleManager.get_json(*stages), mimetype="application/json")
def runscript(request):
"""
Execute a script (sent as JSON).
"""
evalnode = request.POST.get("node", "")
jsondata = request.POST.get("script", simplejson.dumps({"arse":"spaz"}))
script = simplejson.loads(jsondata)
async = OcrTask.run_celery_task("run.script", evalnode, script,
untracked=True, asyncronous=True, queue="interactive")
out = dict(task_id=async.task_id, status=async.status,
results=async.result)
return HttpResponse(simplejson.dumps(out), mimetype="application/json")
Include the eval'd node type in the async return"""
RESTful interface to interacting with OCR plugins.
"""
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from ocradmin.ocrtasks.models import OcrTask
from ocradmin.plugins.manager import ModuleManager
import logging
logger = logging.getLogger(__name__)
import simplejson
import tasks
def query(request):
"""
Query plugin info. This returns a list
of available OCR engines and an URL that
can be queries when one of them is selected.
"""
stages=request.GET.getlist("stage")
return HttpResponse(
ModuleManager.get_json(*stages), mimetype="application/json")
def runscript(request):
"""
Execute a script (sent as JSON).
"""
evalnode = request.POST.get("node", "")
jsondata = request.POST.get("script", simplejson.dumps({"arse":"spaz"}))
script = simplejson.loads(jsondata)
async = OcrTask.run_celery_task("run.script", evalnode, script,
untracked=True, asyncronous=True, queue="interactive")
out = dict(
node=evalnode,
task_id=async.task_id,
status=async.status,
results=async.result
)
return HttpResponse(simplejson.dumps(out), mimetype="application/json")
|
<commit_before>"""
RESTful interface to interacting with OCR plugins.
"""
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from ocradmin.ocrtasks.models import OcrTask
from ocradmin.plugins.manager import ModuleManager
import logging
logger = logging.getLogger(__name__)
import simplejson
import tasks
def query(request):
"""
Query plugin info. This returns a list
of available OCR engines and an URL that
can be queries when one of them is selected.
"""
stages=request.GET.getlist("stage")
return HttpResponse(
ModuleManager.get_json(*stages), mimetype="application/json")
def runscript(request):
"""
Execute a script (sent as JSON).
"""
evalnode = request.POST.get("node", "")
jsondata = request.POST.get("script", simplejson.dumps({"arse":"spaz"}))
script = simplejson.loads(jsondata)
async = OcrTask.run_celery_task("run.script", evalnode, script,
untracked=True, asyncronous=True, queue="interactive")
out = dict(task_id=async.task_id, status=async.status,
results=async.result)
return HttpResponse(simplejson.dumps(out), mimetype="application/json")
<commit_msg>Include the eval'd node type in the async return<commit_after>"""
RESTful interface to interacting with OCR plugins.
"""
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from ocradmin.ocrtasks.models import OcrTask
from ocradmin.plugins.manager import ModuleManager
import logging
logger = logging.getLogger(__name__)
import simplejson
import tasks
def query(request):
"""
Query plugin info. This returns a list
of available OCR engines and an URL that
can be queries when one of them is selected.
"""
stages=request.GET.getlist("stage")
return HttpResponse(
ModuleManager.get_json(*stages), mimetype="application/json")
def runscript(request):
"""
Execute a script (sent as JSON).
"""
evalnode = request.POST.get("node", "")
jsondata = request.POST.get("script", simplejson.dumps({"arse":"spaz"}))
script = simplejson.loads(jsondata)
async = OcrTask.run_celery_task("run.script", evalnode, script,
untracked=True, asyncronous=True, queue="interactive")
out = dict(
node=evalnode,
task_id=async.task_id,
status=async.status,
results=async.result
)
return HttpResponse(simplejson.dumps(out), mimetype="application/json")
|
b052c2fd93cd578723c93dbe7357f1f3c82273eb
|
src/poliastro/patched_conics.py
|
src/poliastro/patched_conics.py
|
# coding: utf-8
"""Patched Conics Computations
Contains methods to compute interplanetary trajectories approximating the three
body problem with Patched Conics.
"""
from astropy import units as u
from poliastro.twobody import Orbit
from poliastro.constants import J2000
@u.quantity_input(a=u.m)
def compute_soi(body, a=None):
"""Approximated radius of the Laplace Sphere of Influence (SOI) for a body.
Parameters
----------
body : `~poliastro.bodies.Body`
Astronomical body which the SOI's radius is computed for
a : float or None, optional
Semimajor Axis of the body's orbit
Returns
-------
astropy.units.quantity.Quantity
Approximated radius of the Sphere of Influence (SOI) [m]
"""
# Compute semimajor axis at epoch J2000 for the body if it was not
# introduced by the user
if a is None:
try:
ss = Orbit.from_body_ephem(body, J2000)
a = ss.a
r_SOI = a * (body.k / body.parent.k)**(2 / 5)
return r_SOI.decompose()
except KeyError:
print("To compute the semimajor axis for Moon",
" and Pluto use the JPL ephemeris: ")
print(">>> from astropy.coordinates import solar_system_ephemeris")
print('>>> solar_system_ephemeris.set("jpl")')
pass
|
# coding: utf-8
"""Patched Conics Computations
Contains methods to compute interplanetary trajectories approximating the three
body problem with Patched Conics.
"""
from astropy import units as u
from poliastro.twobody import Orbit
from poliastro.constants import J2000
@u.quantity_input(a=u.m)
def compute_soi(body, a=None):
"""Approximated radius of the Laplace Sphere of Influence (SOI) for a body.
Parameters
----------
body : `~poliastro.bodies.Body`
Astronomical body which the SOI's radius is computed for
a : float or None, optional
Semimajor Axis of the body's orbit
Returns
-------
astropy.units.quantity.Quantity
Approximated radius of the Sphere of Influence (SOI) [m]
"""
# Compute semimajor axis at epoch J2000 for the body if it was not
# introduced by the user
if a is None:
try:
ss = Orbit.from_body_ephem(body, J2000)
a = ss.a
r_SOI = a * (body.k / body.parent.k)**(2 / 5)
return r_SOI.decompose()
except KeyError:
raise RuntimeError(
"""To compute the semimajor axis for Moon and Pluto use the JPL ephemeris:
>>> from astropy.coordinates import solar_system_ephemeris
>>> solar_system_ephemeris.set("jpl")""")
|
Replace prints by an exception
|
Replace prints by an exception
|
Python
|
mit
|
anhiga/poliastro,Juanlu001/poliastro,anhiga/poliastro,poliastro/poliastro,newlawrence/poliastro,newlawrence/poliastro,Juanlu001/poliastro,newlawrence/poliastro,anhiga/poliastro,Juanlu001/poliastro
|
# coding: utf-8
"""Patched Conics Computations
Contains methods to compute interplanetary trajectories approximating the three
body problem with Patched Conics.
"""
from astropy import units as u
from poliastro.twobody import Orbit
from poliastro.constants import J2000
@u.quantity_input(a=u.m)
def compute_soi(body, a=None):
"""Approximated radius of the Laplace Sphere of Influence (SOI) for a body.
Parameters
----------
body : `~poliastro.bodies.Body`
Astronomical body which the SOI's radius is computed for
a : float or None, optional
Semimajor Axis of the body's orbit
Returns
-------
astropy.units.quantity.Quantity
Approximated radius of the Sphere of Influence (SOI) [m]
"""
# Compute semimajor axis at epoch J2000 for the body if it was not
# introduced by the user
if a is None:
try:
ss = Orbit.from_body_ephem(body, J2000)
a = ss.a
r_SOI = a * (body.k / body.parent.k)**(2 / 5)
return r_SOI.decompose()
except KeyError:
print("To compute the semimajor axis for Moon",
" and Pluto use the JPL ephemeris: ")
print(">>> from astropy.coordinates import solar_system_ephemeris")
print('>>> solar_system_ephemeris.set("jpl")')
pass
Replace prints by an exception
|
# coding: utf-8
"""Patched Conics Computations
Contains methods to compute interplanetary trajectories approximating the three
body problem with Patched Conics.
"""
from astropy import units as u
from poliastro.twobody import Orbit
from poliastro.constants import J2000
@u.quantity_input(a=u.m)
def compute_soi(body, a=None):
"""Approximated radius of the Laplace Sphere of Influence (SOI) for a body.
Parameters
----------
body : `~poliastro.bodies.Body`
Astronomical body which the SOI's radius is computed for
a : float or None, optional
Semimajor Axis of the body's orbit
Returns
-------
astropy.units.quantity.Quantity
Approximated radius of the Sphere of Influence (SOI) [m]
"""
# Compute semimajor axis at epoch J2000 for the body if it was not
# introduced by the user
if a is None:
try:
ss = Orbit.from_body_ephem(body, J2000)
a = ss.a
r_SOI = a * (body.k / body.parent.k)**(2 / 5)
return r_SOI.decompose()
except KeyError:
raise RuntimeError(
"""To compute the semimajor axis for Moon and Pluto use the JPL ephemeris:
>>> from astropy.coordinates import solar_system_ephemeris
>>> solar_system_ephemeris.set("jpl")""")
|
<commit_before># coding: utf-8
"""Patched Conics Computations
Contains methods to compute interplanetary trajectories approximating the three
body problem with Patched Conics.
"""
from astropy import units as u
from poliastro.twobody import Orbit
from poliastro.constants import J2000
@u.quantity_input(a=u.m)
def compute_soi(body, a=None):
"""Approximated radius of the Laplace Sphere of Influence (SOI) for a body.
Parameters
----------
body : `~poliastro.bodies.Body`
Astronomical body which the SOI's radius is computed for
a : float or None, optional
Semimajor Axis of the body's orbit
Returns
-------
astropy.units.quantity.Quantity
Approximated radius of the Sphere of Influence (SOI) [m]
"""
# Compute semimajor axis at epoch J2000 for the body if it was not
# introduced by the user
if a is None:
try:
ss = Orbit.from_body_ephem(body, J2000)
a = ss.a
r_SOI = a * (body.k / body.parent.k)**(2 / 5)
return r_SOI.decompose()
except KeyError:
print("To compute the semimajor axis for Moon",
" and Pluto use the JPL ephemeris: ")
print(">>> from astropy.coordinates import solar_system_ephemeris")
print('>>> solar_system_ephemeris.set("jpl")')
pass
<commit_msg>Replace prints by an exception<commit_after>
|
# coding: utf-8
"""Patched Conics Computations
Contains methods to compute interplanetary trajectories approximating the three
body problem with Patched Conics.
"""
from astropy import units as u
from poliastro.twobody import Orbit
from poliastro.constants import J2000
@u.quantity_input(a=u.m)
def compute_soi(body, a=None):
"""Approximated radius of the Laplace Sphere of Influence (SOI) for a body.
Parameters
----------
body : `~poliastro.bodies.Body`
Astronomical body which the SOI's radius is computed for
a : float or None, optional
Semimajor Axis of the body's orbit
Returns
-------
astropy.units.quantity.Quantity
Approximated radius of the Sphere of Influence (SOI) [m]
"""
# Compute semimajor axis at epoch J2000 for the body if it was not
# introduced by the user
if a is None:
try:
ss = Orbit.from_body_ephem(body, J2000)
a = ss.a
r_SOI = a * (body.k / body.parent.k)**(2 / 5)
return r_SOI.decompose()
except KeyError:
raise RuntimeError(
"""To compute the semimajor axis for Moon and Pluto use the JPL ephemeris:
>>> from astropy.coordinates import solar_system_ephemeris
>>> solar_system_ephemeris.set("jpl")""")
|
# coding: utf-8
"""Patched Conics Computations
Contains methods to compute interplanetary trajectories approximating the three
body problem with Patched Conics.
"""
from astropy import units as u
from poliastro.twobody import Orbit
from poliastro.constants import J2000
@u.quantity_input(a=u.m)
def compute_soi(body, a=None):
"""Approximated radius of the Laplace Sphere of Influence (SOI) for a body.
Parameters
----------
body : `~poliastro.bodies.Body`
Astronomical body which the SOI's radius is computed for
a : float or None, optional
Semimajor Axis of the body's orbit
Returns
-------
astropy.units.quantity.Quantity
Approximated radius of the Sphere of Influence (SOI) [m]
"""
# Compute semimajor axis at epoch J2000 for the body if it was not
# introduced by the user
if a is None:
try:
ss = Orbit.from_body_ephem(body, J2000)
a = ss.a
r_SOI = a * (body.k / body.parent.k)**(2 / 5)
return r_SOI.decompose()
except KeyError:
print("To compute the semimajor axis for Moon",
" and Pluto use the JPL ephemeris: ")
print(">>> from astropy.coordinates import solar_system_ephemeris")
print('>>> solar_system_ephemeris.set("jpl")')
pass
Replace prints by an exception# coding: utf-8
"""Patched Conics Computations
Contains methods to compute interplanetary trajectories approximating the three
body problem with Patched Conics.
"""
from astropy import units as u
from poliastro.twobody import Orbit
from poliastro.constants import J2000
@u.quantity_input(a=u.m)
def compute_soi(body, a=None):
"""Approximated radius of the Laplace Sphere of Influence (SOI) for a body.
Parameters
----------
body : `~poliastro.bodies.Body`
Astronomical body which the SOI's radius is computed for
a : float or None, optional
Semimajor Axis of the body's orbit
Returns
-------
astropy.units.quantity.Quantity
Approximated radius of the Sphere of Influence (SOI) [m]
"""
# Compute semimajor axis at epoch J2000 for the body if it was not
# introduced by the user
if a is None:
try:
ss = Orbit.from_body_ephem(body, J2000)
a = ss.a
r_SOI = a * (body.k / body.parent.k)**(2 / 5)
return r_SOI.decompose()
except KeyError:
raise RuntimeError(
"""To compute the semimajor axis for Moon and Pluto use the JPL ephemeris:
>>> from astropy.coordinates import solar_system_ephemeris
>>> solar_system_ephemeris.set("jpl")""")
|
<commit_before># coding: utf-8
"""Patched Conics Computations
Contains methods to compute interplanetary trajectories approximating the three
body problem with Patched Conics.
"""
from astropy import units as u
from poliastro.twobody import Orbit
from poliastro.constants import J2000
@u.quantity_input(a=u.m)
def compute_soi(body, a=None):
"""Approximated radius of the Laplace Sphere of Influence (SOI) for a body.
Parameters
----------
body : `~poliastro.bodies.Body`
Astronomical body which the SOI's radius is computed for
a : float or None, optional
Semimajor Axis of the body's orbit
Returns
-------
astropy.units.quantity.Quantity
Approximated radius of the Sphere of Influence (SOI) [m]
"""
# Compute semimajor axis at epoch J2000 for the body if it was not
# introduced by the user
if a is None:
try:
ss = Orbit.from_body_ephem(body, J2000)
a = ss.a
r_SOI = a * (body.k / body.parent.k)**(2 / 5)
return r_SOI.decompose()
except KeyError:
print("To compute the semimajor axis for Moon",
" and Pluto use the JPL ephemeris: ")
print(">>> from astropy.coordinates import solar_system_ephemeris")
print('>>> solar_system_ephemeris.set("jpl")')
pass
<commit_msg>Replace prints by an exception<commit_after># coding: utf-8
"""Patched Conics Computations
Contains methods to compute interplanetary trajectories approximating the three
body problem with Patched Conics.
"""
from astropy import units as u
from poliastro.twobody import Orbit
from poliastro.constants import J2000
@u.quantity_input(a=u.m)
def compute_soi(body, a=None):
"""Approximated radius of the Laplace Sphere of Influence (SOI) for a body.
Parameters
----------
body : `~poliastro.bodies.Body`
Astronomical body which the SOI's radius is computed for
a : float or None, optional
Semimajor Axis of the body's orbit
Returns
-------
astropy.units.quantity.Quantity
Approximated radius of the Sphere of Influence (SOI) [m]
"""
# Compute semimajor axis at epoch J2000 for the body if it was not
# introduced by the user
if a is None:
try:
ss = Orbit.from_body_ephem(body, J2000)
a = ss.a
r_SOI = a * (body.k / body.parent.k)**(2 / 5)
return r_SOI.decompose()
except KeyError:
raise RuntimeError(
"""To compute the semimajor axis for Moon and Pluto use the JPL ephemeris:
>>> from astropy.coordinates import solar_system_ephemeris
>>> solar_system_ephemeris.set("jpl")""")
|
d156beeaf0638e585c616d697e1ecd76a98d8a3f
|
axelrod/tests/test_reflex.py
|
axelrod/tests/test_reflex.py
|
"""
Test suite for Reflex Axelrod PD player.
"""
import axelrod
from test_player import TestPlayer
class Reflex_test(TestPlayer):
def test_initial_nice_strategy(self):
""" First response should always be cooperation. """
p1 = axelrod.Reflex()
p2 = axelrod.Player()
self.assertEqual(p1.strategy(p2), 'C')
def test_representation(self):
""" How do we appear? """
p1 = axelrod.Reflex()
self.assertEqual(str(p1), "Reflex")
def test_reset_method(self):
""" Does self.reset() reset the self? """
p1 = axelrod.Reflex()
p1.history = ['C', 'D', 'C', 'C']
p1.reset()
self.assertEqual(p1.history, [])
self.assertEqual(p1.response, 'C')
def test_stochastic(self):
""" We are not stochastic. """
self.assertFalse(axelrod.Reflex().stochastic)
|
"""
Test suite for Reflex Axelrod PD player.
"""
import axelrod
from test_player import TestPlayer
class Reflex_test(TestPlayer):
name = "Reflex"
player = axelrod.Reflex
stochastic = False
def test_strategy(self):
""" First response should always be cooperation. """
p1 = axelrod.Reflex()
p2 = axelrod.Player()
self.assertEqual(p1.strategy(p2), 'C')
def test_reset_method(self):
""" Does self.reset() reset the self? """
p1 = axelrod.Reflex()
p1.history = ['C', 'D', 'C', 'C']
p1.reset()
self.assertEqual(p1.history, [])
self.assertEqual(p1.response, 'C')
|
Simplify tests to new format.
|
Simplify tests to new format.
|
Python
|
mit
|
marcharper/Axelrod,ranjinidas/Axelrod,marcharper/Axelrod,ranjinidas/Axelrod
|
"""
Test suite for Reflex Axelrod PD player.
"""
import axelrod
from test_player import TestPlayer
class Reflex_test(TestPlayer):
def test_initial_nice_strategy(self):
""" First response should always be cooperation. """
p1 = axelrod.Reflex()
p2 = axelrod.Player()
self.assertEqual(p1.strategy(p2), 'C')
def test_representation(self):
""" How do we appear? """
p1 = axelrod.Reflex()
self.assertEqual(str(p1), "Reflex")
def test_reset_method(self):
""" Does self.reset() reset the self? """
p1 = axelrod.Reflex()
p1.history = ['C', 'D', 'C', 'C']
p1.reset()
self.assertEqual(p1.history, [])
self.assertEqual(p1.response, 'C')
def test_stochastic(self):
""" We are not stochastic. """
self.assertFalse(axelrod.Reflex().stochastic)
Simplify tests to new format.
|
"""
Test suite for Reflex Axelrod PD player.
"""
import axelrod
from test_player import TestPlayer
class Reflex_test(TestPlayer):
name = "Reflex"
player = axelrod.Reflex
stochastic = False
def test_strategy(self):
""" First response should always be cooperation. """
p1 = axelrod.Reflex()
p2 = axelrod.Player()
self.assertEqual(p1.strategy(p2), 'C')
def test_reset_method(self):
""" Does self.reset() reset the self? """
p1 = axelrod.Reflex()
p1.history = ['C', 'D', 'C', 'C']
p1.reset()
self.assertEqual(p1.history, [])
self.assertEqual(p1.response, 'C')
|
<commit_before>"""
Test suite for Reflex Axelrod PD player.
"""
import axelrod
from test_player import TestPlayer
class Reflex_test(TestPlayer):
def test_initial_nice_strategy(self):
""" First response should always be cooperation. """
p1 = axelrod.Reflex()
p2 = axelrod.Player()
self.assertEqual(p1.strategy(p2), 'C')
def test_representation(self):
""" How do we appear? """
p1 = axelrod.Reflex()
self.assertEqual(str(p1), "Reflex")
def test_reset_method(self):
""" Does self.reset() reset the self? """
p1 = axelrod.Reflex()
p1.history = ['C', 'D', 'C', 'C']
p1.reset()
self.assertEqual(p1.history, [])
self.assertEqual(p1.response, 'C')
def test_stochastic(self):
""" We are not stochastic. """
self.assertFalse(axelrod.Reflex().stochastic)
<commit_msg>Simplify tests to new format.<commit_after>
|
"""
Test suite for Reflex Axelrod PD player.
"""
import axelrod
from test_player import TestPlayer
class Reflex_test(TestPlayer):
name = "Reflex"
player = axelrod.Reflex
stochastic = False
def test_strategy(self):
""" First response should always be cooperation. """
p1 = axelrod.Reflex()
p2 = axelrod.Player()
self.assertEqual(p1.strategy(p2), 'C')
def test_reset_method(self):
""" Does self.reset() reset the self? """
p1 = axelrod.Reflex()
p1.history = ['C', 'D', 'C', 'C']
p1.reset()
self.assertEqual(p1.history, [])
self.assertEqual(p1.response, 'C')
|
"""
Test suite for Reflex Axelrod PD player.
"""
import axelrod
from test_player import TestPlayer
class Reflex_test(TestPlayer):
def test_initial_nice_strategy(self):
""" First response should always be cooperation. """
p1 = axelrod.Reflex()
p2 = axelrod.Player()
self.assertEqual(p1.strategy(p2), 'C')
def test_representation(self):
""" How do we appear? """
p1 = axelrod.Reflex()
self.assertEqual(str(p1), "Reflex")
def test_reset_method(self):
""" Does self.reset() reset the self? """
p1 = axelrod.Reflex()
p1.history = ['C', 'D', 'C', 'C']
p1.reset()
self.assertEqual(p1.history, [])
self.assertEqual(p1.response, 'C')
def test_stochastic(self):
""" We are not stochastic. """
self.assertFalse(axelrod.Reflex().stochastic)
Simplify tests to new format."""
Test suite for Reflex Axelrod PD player.
"""
import axelrod
from test_player import TestPlayer
class Reflex_test(TestPlayer):
name = "Reflex"
player = axelrod.Reflex
stochastic = False
def test_strategy(self):
""" First response should always be cooperation. """
p1 = axelrod.Reflex()
p2 = axelrod.Player()
self.assertEqual(p1.strategy(p2), 'C')
def test_reset_method(self):
""" Does self.reset() reset the self? """
p1 = axelrod.Reflex()
p1.history = ['C', 'D', 'C', 'C']
p1.reset()
self.assertEqual(p1.history, [])
self.assertEqual(p1.response, 'C')
|
<commit_before>"""
Test suite for Reflex Axelrod PD player.
"""
import axelrod
from test_player import TestPlayer
class Reflex_test(TestPlayer):
def test_initial_nice_strategy(self):
""" First response should always be cooperation. """
p1 = axelrod.Reflex()
p2 = axelrod.Player()
self.assertEqual(p1.strategy(p2), 'C')
def test_representation(self):
""" How do we appear? """
p1 = axelrod.Reflex()
self.assertEqual(str(p1), "Reflex")
def test_reset_method(self):
""" Does self.reset() reset the self? """
p1 = axelrod.Reflex()
p1.history = ['C', 'D', 'C', 'C']
p1.reset()
self.assertEqual(p1.history, [])
self.assertEqual(p1.response, 'C')
def test_stochastic(self):
""" We are not stochastic. """
self.assertFalse(axelrod.Reflex().stochastic)
<commit_msg>Simplify tests to new format.<commit_after>"""
Test suite for Reflex Axelrod PD player.
"""
import axelrod
from test_player import TestPlayer
class Reflex_test(TestPlayer):
name = "Reflex"
player = axelrod.Reflex
stochastic = False
def test_strategy(self):
""" First response should always be cooperation. """
p1 = axelrod.Reflex()
p2 = axelrod.Player()
self.assertEqual(p1.strategy(p2), 'C')
def test_reset_method(self):
""" Does self.reset() reset the self? """
p1 = axelrod.Reflex()
p1.history = ['C', 'D', 'C', 'C']
p1.reset()
self.assertEqual(p1.history, [])
self.assertEqual(p1.response, 'C')
|
9f208fd476c8864a1b4c294b80d5d8191c400fb5
|
admin_sso/admin.py
|
admin_sso/admin.py
|
from django.conf.urls import url
from django.contrib import admin
from admin_sso import settings
from admin_sso.models import Assignment
class AssignmentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'username', 'username_mode', 'domain',
'user', 'weight')
list_editable = ('username', 'username_mode', 'domain', 'user', 'weight')
def get_urls(self):
from admin_sso.views import start, end
info = (self.model._meta.app_label, self.model._meta.model_name)
return [
url(r'^start/$', start,
name='%s_%s_start' % info),
url(r'^end/$', end,
name='%s_%s_end' % info),
] + super(AssignmentAdmin, self).get_urls()
admin.site.register(Assignment, AssignmentAdmin)
if settings.DJANGO_ADMIN_SSO_ADD_LOGIN_BUTTON:
admin.site.login_template = 'admin_sso/login.html'
|
from django.conf.urls import url
from django.contrib import admin
from admin_sso import settings
from admin_sso.models import Assignment
class AssignmentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'username', 'username_mode', 'domain',
'user', 'weight')
list_editable = ('username', 'username_mode', 'domain', 'user', 'weight')
raw_id_fields = ('user',)
def get_urls(self):
from admin_sso.views import start, end
info = (self.model._meta.app_label, self.model._meta.model_name)
return [
url(r'^start/$', start,
name='%s_%s_start' % info),
url(r'^end/$', end,
name='%s_%s_end' % info),
] + super(AssignmentAdmin, self).get_urls()
admin.site.register(Assignment, AssignmentAdmin)
if settings.DJANGO_ADMIN_SSO_ADD_LOGIN_BUTTON:
admin.site.login_template = 'admin_sso/login.html'
|
Add user to raw_id_fields, drastically improves UX on sites with many users
|
Add user to raw_id_fields, drastically improves UX on sites with many users
|
Python
|
bsd-3-clause
|
matthiask/django-admin-sso,diegobz/django-admin-sso,diegobz/django-admin-sso,matthiask/django-admin-sso
|
from django.conf.urls import url
from django.contrib import admin
from admin_sso import settings
from admin_sso.models import Assignment
class AssignmentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'username', 'username_mode', 'domain',
'user', 'weight')
list_editable = ('username', 'username_mode', 'domain', 'user', 'weight')
def get_urls(self):
from admin_sso.views import start, end
info = (self.model._meta.app_label, self.model._meta.model_name)
return [
url(r'^start/$', start,
name='%s_%s_start' % info),
url(r'^end/$', end,
name='%s_%s_end' % info),
] + super(AssignmentAdmin, self).get_urls()
admin.site.register(Assignment, AssignmentAdmin)
if settings.DJANGO_ADMIN_SSO_ADD_LOGIN_BUTTON:
admin.site.login_template = 'admin_sso/login.html'
Add user to raw_id_fields, drastically improves UX on sites with many users
|
from django.conf.urls import url
from django.contrib import admin
from admin_sso import settings
from admin_sso.models import Assignment
class AssignmentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'username', 'username_mode', 'domain',
'user', 'weight')
list_editable = ('username', 'username_mode', 'domain', 'user', 'weight')
raw_id_fields = ('user',)
def get_urls(self):
from admin_sso.views import start, end
info = (self.model._meta.app_label, self.model._meta.model_name)
return [
url(r'^start/$', start,
name='%s_%s_start' % info),
url(r'^end/$', end,
name='%s_%s_end' % info),
] + super(AssignmentAdmin, self).get_urls()
admin.site.register(Assignment, AssignmentAdmin)
if settings.DJANGO_ADMIN_SSO_ADD_LOGIN_BUTTON:
admin.site.login_template = 'admin_sso/login.html'
|
<commit_before>from django.conf.urls import url
from django.contrib import admin
from admin_sso import settings
from admin_sso.models import Assignment
class AssignmentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'username', 'username_mode', 'domain',
'user', 'weight')
list_editable = ('username', 'username_mode', 'domain', 'user', 'weight')
def get_urls(self):
from admin_sso.views import start, end
info = (self.model._meta.app_label, self.model._meta.model_name)
return [
url(r'^start/$', start,
name='%s_%s_start' % info),
url(r'^end/$', end,
name='%s_%s_end' % info),
] + super(AssignmentAdmin, self).get_urls()
admin.site.register(Assignment, AssignmentAdmin)
if settings.DJANGO_ADMIN_SSO_ADD_LOGIN_BUTTON:
admin.site.login_template = 'admin_sso/login.html'
<commit_msg>Add user to raw_id_fields, drastically improves UX on sites with many users<commit_after>
|
from django.conf.urls import url
from django.contrib import admin
from admin_sso import settings
from admin_sso.models import Assignment
class AssignmentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'username', 'username_mode', 'domain',
'user', 'weight')
list_editable = ('username', 'username_mode', 'domain', 'user', 'weight')
raw_id_fields = ('user',)
def get_urls(self):
from admin_sso.views import start, end
info = (self.model._meta.app_label, self.model._meta.model_name)
return [
url(r'^start/$', start,
name='%s_%s_start' % info),
url(r'^end/$', end,
name='%s_%s_end' % info),
] + super(AssignmentAdmin, self).get_urls()
admin.site.register(Assignment, AssignmentAdmin)
if settings.DJANGO_ADMIN_SSO_ADD_LOGIN_BUTTON:
admin.site.login_template = 'admin_sso/login.html'
|
from django.conf.urls import url
from django.contrib import admin
from admin_sso import settings
from admin_sso.models import Assignment
class AssignmentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'username', 'username_mode', 'domain',
'user', 'weight')
list_editable = ('username', 'username_mode', 'domain', 'user', 'weight')
def get_urls(self):
from admin_sso.views import start, end
info = (self.model._meta.app_label, self.model._meta.model_name)
return [
url(r'^start/$', start,
name='%s_%s_start' % info),
url(r'^end/$', end,
name='%s_%s_end' % info),
] + super(AssignmentAdmin, self).get_urls()
admin.site.register(Assignment, AssignmentAdmin)
if settings.DJANGO_ADMIN_SSO_ADD_LOGIN_BUTTON:
admin.site.login_template = 'admin_sso/login.html'
Add user to raw_id_fields, drastically improves UX on sites with many usersfrom django.conf.urls import url
from django.contrib import admin
from admin_sso import settings
from admin_sso.models import Assignment
class AssignmentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'username', 'username_mode', 'domain',
'user', 'weight')
list_editable = ('username', 'username_mode', 'domain', 'user', 'weight')
raw_id_fields = ('user',)
def get_urls(self):
from admin_sso.views import start, end
info = (self.model._meta.app_label, self.model._meta.model_name)
return [
url(r'^start/$', start,
name='%s_%s_start' % info),
url(r'^end/$', end,
name='%s_%s_end' % info),
] + super(AssignmentAdmin, self).get_urls()
admin.site.register(Assignment, AssignmentAdmin)
if settings.DJANGO_ADMIN_SSO_ADD_LOGIN_BUTTON:
admin.site.login_template = 'admin_sso/login.html'
|
<commit_before>from django.conf.urls import url
from django.contrib import admin
from admin_sso import settings
from admin_sso.models import Assignment
class AssignmentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'username', 'username_mode', 'domain',
'user', 'weight')
list_editable = ('username', 'username_mode', 'domain', 'user', 'weight')
def get_urls(self):
from admin_sso.views import start, end
info = (self.model._meta.app_label, self.model._meta.model_name)
return [
url(r'^start/$', start,
name='%s_%s_start' % info),
url(r'^end/$', end,
name='%s_%s_end' % info),
] + super(AssignmentAdmin, self).get_urls()
admin.site.register(Assignment, AssignmentAdmin)
if settings.DJANGO_ADMIN_SSO_ADD_LOGIN_BUTTON:
admin.site.login_template = 'admin_sso/login.html'
<commit_msg>Add user to raw_id_fields, drastically improves UX on sites with many users<commit_after>from django.conf.urls import url
from django.contrib import admin
from admin_sso import settings
from admin_sso.models import Assignment
class AssignmentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'username', 'username_mode', 'domain',
'user', 'weight')
list_editable = ('username', 'username_mode', 'domain', 'user', 'weight')
raw_id_fields = ('user',)
def get_urls(self):
from admin_sso.views import start, end
info = (self.model._meta.app_label, self.model._meta.model_name)
return [
url(r'^start/$', start,
name='%s_%s_start' % info),
url(r'^end/$', end,
name='%s_%s_end' % info),
] + super(AssignmentAdmin, self).get_urls()
admin.site.register(Assignment, AssignmentAdmin)
if settings.DJANGO_ADMIN_SSO_ADD_LOGIN_BUTTON:
admin.site.login_template = 'admin_sso/login.html'
|
4b18a9fe5454055131202c60f057ffa3372dadbb
|
parsec/commands/cmd_config_init.py
|
parsec/commands/cmd_config_init.py
|
import os
import click
from parsec.cli import pass_context
from parsec import options
from parsec import config
from parsec.io import warn, info
CONFIG_TEMPLATE = """## Parsec Global Configuration File.
# Each stanza should contian a single galaxy server to control.
local:
key: "<TODO>"
email: "<TODO>"
password: "<TODO>"
host: "127.0.0.1"
port: "8080"
"""
SUCCESS_MESSAGE = (
"Wrote configuration template to %s, "
"please open with editor and fill out."
)
@click.command("config_init")
@pass_context
def cli(ctx, path, **kwds):
"""Help initialize global configuration (in home directory)
"""
# TODO: prompt for values someday.
config_path = config.global_config_path()
if os.path.exists(config_path):
warn("File %s already exists, exiting." % config_path)
return -1
with open(config_path, "w") as f:
f.write(CONFIG_TEMPLATE)
info(SUCCESS_MESSAGE % config_path)
|
import os
import click
from parsec.cli import pass_context
from parsec import options
from parsec import config
from parsec.io import warn, info
CONFIG_TEMPLATE = """## Parsec Global Configuration File.
# Each stanza should contian a single galaxy server to control.
local:
key: "%(key)s"
email: "<TODO>"
password: "<TODO>"
url: "%(url)s"
admin: %(admin)s
"""
SUCCESS_MESSAGE = (
"Wrote configuration template to %s, "
"please open with editor and fill out."
)
@click.command("config_init")
@click.option(
'--url',
help="URL to galaxy server",
)
@click.option(
'--api_key',
help="API key for galaxy server",
)
@click.option(
'--admin',
is_flag=True,
help="This API key is an admin/master API key",
)
@pass_context
def cli(ctx, url=None, api_key=None, admin=False, **kwds):
"""Help initialize global configuration (in home directory)
"""
# TODO: prompt for values someday.
config_path = config.global_config_path()
if os.path.exists(config_path):
warn("File %s already exists, exiting." % config_path)
return -1
with open(config_path, "w") as f:
f.write(CONFIG_TEMPLATE % {'key': '<TODO>' if api_key is None else api_key,
'url': '<TODO>' if url is None else url,
'admin': admin
})
info(SUCCESS_MESSAGE % config_path)
|
Allow bootstrapping config with values
|
Allow bootstrapping config with values
|
Python
|
apache-2.0
|
galaxy-iuc/parsec
|
import os
import click
from parsec.cli import pass_context
from parsec import options
from parsec import config
from parsec.io import warn, info
CONFIG_TEMPLATE = """## Parsec Global Configuration File.
# Each stanza should contian a single galaxy server to control.
local:
key: "<TODO>"
email: "<TODO>"
password: "<TODO>"
host: "127.0.0.1"
port: "8080"
"""
SUCCESS_MESSAGE = (
"Wrote configuration template to %s, "
"please open with editor and fill out."
)
@click.command("config_init")
@pass_context
def cli(ctx, path, **kwds):
"""Help initialize global configuration (in home directory)
"""
# TODO: prompt for values someday.
config_path = config.global_config_path()
if os.path.exists(config_path):
warn("File %s already exists, exiting." % config_path)
return -1
with open(config_path, "w") as f:
f.write(CONFIG_TEMPLATE)
info(SUCCESS_MESSAGE % config_path)
Allow bootstrapping config with values
|
import os
import click
from parsec.cli import pass_context
from parsec import options
from parsec import config
from parsec.io import warn, info
CONFIG_TEMPLATE = """## Parsec Global Configuration File.
# Each stanza should contian a single galaxy server to control.
local:
key: "%(key)s"
email: "<TODO>"
password: "<TODO>"
url: "%(url)s"
admin: %(admin)s
"""
SUCCESS_MESSAGE = (
"Wrote configuration template to %s, "
"please open with editor and fill out."
)
@click.command("config_init")
@click.option(
'--url',
help="URL to galaxy server",
)
@click.option(
'--api_key',
help="API key for galaxy server",
)
@click.option(
'--admin',
is_flag=True,
help="This API key is an admin/master API key",
)
@pass_context
def cli(ctx, url=None, api_key=None, admin=False, **kwds):
"""Help initialize global configuration (in home directory)
"""
# TODO: prompt for values someday.
config_path = config.global_config_path()
if os.path.exists(config_path):
warn("File %s already exists, exiting." % config_path)
return -1
with open(config_path, "w") as f:
f.write(CONFIG_TEMPLATE % {'key': '<TODO>' if api_key is None else api_key,
'url': '<TODO>' if url is None else url,
'admin': admin
})
info(SUCCESS_MESSAGE % config_path)
|
<commit_before>import os
import click
from parsec.cli import pass_context
from parsec import options
from parsec import config
from parsec.io import warn, info
CONFIG_TEMPLATE = """## Parsec Global Configuration File.
# Each stanza should contian a single galaxy server to control.
local:
key: "<TODO>"
email: "<TODO>"
password: "<TODO>"
host: "127.0.0.1"
port: "8080"
"""
SUCCESS_MESSAGE = (
"Wrote configuration template to %s, "
"please open with editor and fill out."
)
@click.command("config_init")
@pass_context
def cli(ctx, path, **kwds):
"""Help initialize global configuration (in home directory)
"""
# TODO: prompt for values someday.
config_path = config.global_config_path()
if os.path.exists(config_path):
warn("File %s already exists, exiting." % config_path)
return -1
with open(config_path, "w") as f:
f.write(CONFIG_TEMPLATE)
info(SUCCESS_MESSAGE % config_path)
<commit_msg>Allow bootstrapping config with values<commit_after>
|
import os
import click
from parsec.cli import pass_context
from parsec import options
from parsec import config
from parsec.io import warn, info
CONFIG_TEMPLATE = """## Parsec Global Configuration File.
# Each stanza should contian a single galaxy server to control.
local:
key: "%(key)s"
email: "<TODO>"
password: "<TODO>"
url: "%(url)s"
admin: %(admin)s
"""
SUCCESS_MESSAGE = (
"Wrote configuration template to %s, "
"please open with editor and fill out."
)
@click.command("config_init")
@click.option(
'--url',
help="URL to galaxy server",
)
@click.option(
'--api_key',
help="API key for galaxy server",
)
@click.option(
'--admin',
is_flag=True,
help="This API key is an admin/master API key",
)
@pass_context
def cli(ctx, url=None, api_key=None, admin=False, **kwds):
"""Help initialize global configuration (in home directory)
"""
# TODO: prompt for values someday.
config_path = config.global_config_path()
if os.path.exists(config_path):
warn("File %s already exists, exiting." % config_path)
return -1
with open(config_path, "w") as f:
f.write(CONFIG_TEMPLATE % {'key': '<TODO>' if api_key is None else api_key,
'url': '<TODO>' if url is None else url,
'admin': admin
})
info(SUCCESS_MESSAGE % config_path)
|
import os
import click
from parsec.cli import pass_context
from parsec import options
from parsec import config
from parsec.io import warn, info
CONFIG_TEMPLATE = """## Parsec Global Configuration File.
# Each stanza should contian a single galaxy server to control.
local:
key: "<TODO>"
email: "<TODO>"
password: "<TODO>"
host: "127.0.0.1"
port: "8080"
"""
SUCCESS_MESSAGE = (
"Wrote configuration template to %s, "
"please open with editor and fill out."
)
@click.command("config_init")
@pass_context
def cli(ctx, path, **kwds):
"""Help initialize global configuration (in home directory)
"""
# TODO: prompt for values someday.
config_path = config.global_config_path()
if os.path.exists(config_path):
warn("File %s already exists, exiting." % config_path)
return -1
with open(config_path, "w") as f:
f.write(CONFIG_TEMPLATE)
info(SUCCESS_MESSAGE % config_path)
Allow bootstrapping config with valuesimport os
import click
from parsec.cli import pass_context
from parsec import options
from parsec import config
from parsec.io import warn, info
CONFIG_TEMPLATE = """## Parsec Global Configuration File.
# Each stanza should contian a single galaxy server to control.
local:
key: "%(key)s"
email: "<TODO>"
password: "<TODO>"
url: "%(url)s"
admin: %(admin)s
"""
SUCCESS_MESSAGE = (
"Wrote configuration template to %s, "
"please open with editor and fill out."
)
@click.command("config_init")
@click.option(
'--url',
help="URL to galaxy server",
)
@click.option(
'--api_key',
help="API key for galaxy server",
)
@click.option(
'--admin',
is_flag=True,
help="This API key is an admin/master API key",
)
@pass_context
def cli(ctx, url=None, api_key=None, admin=False, **kwds):
"""Help initialize global configuration (in home directory)
"""
# TODO: prompt for values someday.
config_path = config.global_config_path()
if os.path.exists(config_path):
warn("File %s already exists, exiting." % config_path)
return -1
with open(config_path, "w") as f:
f.write(CONFIG_TEMPLATE % {'key': '<TODO>' if api_key is None else api_key,
'url': '<TODO>' if url is None else url,
'admin': admin
})
info(SUCCESS_MESSAGE % config_path)
|
<commit_before>import os
import click
from parsec.cli import pass_context
from parsec import options
from parsec import config
from parsec.io import warn, info
CONFIG_TEMPLATE = """## Parsec Global Configuration File.
# Each stanza should contian a single galaxy server to control.
local:
key: "<TODO>"
email: "<TODO>"
password: "<TODO>"
host: "127.0.0.1"
port: "8080"
"""
SUCCESS_MESSAGE = (
"Wrote configuration template to %s, "
"please open with editor and fill out."
)
@click.command("config_init")
@pass_context
def cli(ctx, path, **kwds):
"""Help initialize global configuration (in home directory)
"""
# TODO: prompt for values someday.
config_path = config.global_config_path()
if os.path.exists(config_path):
warn("File %s already exists, exiting." % config_path)
return -1
with open(config_path, "w") as f:
f.write(CONFIG_TEMPLATE)
info(SUCCESS_MESSAGE % config_path)
<commit_msg>Allow bootstrapping config with values<commit_after>import os
import click
from parsec.cli import pass_context
from parsec import options
from parsec import config
from parsec.io import warn, info
CONFIG_TEMPLATE = """## Parsec Global Configuration File.
# Each stanza should contian a single galaxy server to control.
local:
key: "%(key)s"
email: "<TODO>"
password: "<TODO>"
url: "%(url)s"
admin: %(admin)s
"""
SUCCESS_MESSAGE = (
"Wrote configuration template to %s, "
"please open with editor and fill out."
)
@click.command("config_init")
@click.option(
'--url',
help="URL to galaxy server",
)
@click.option(
'--api_key',
help="API key for galaxy server",
)
@click.option(
'--admin',
is_flag=True,
help="This API key is an admin/master API key",
)
@pass_context
def cli(ctx, url=None, api_key=None, admin=False, **kwds):
"""Help initialize global configuration (in home directory)
"""
# TODO: prompt for values someday.
config_path = config.global_config_path()
if os.path.exists(config_path):
warn("File %s already exists, exiting." % config_path)
return -1
with open(config_path, "w") as f:
f.write(CONFIG_TEMPLATE % {'key': '<TODO>' if api_key is None else api_key,
'url': '<TODO>' if url is None else url,
'admin': admin
})
info(SUCCESS_MESSAGE % config_path)
|
c87b5a51bc0fd69ed3ec1eddeedd3111820d3252
|
biblio/__init__.py
|
biblio/__init__.py
|
"""
Connect to the UGent Biblio API
"""
from .biblio import search, BASE_URL, publications_by_person
__author__ = 'Stef Bastiaansen'
__email__ = 'stef.bastiaansen@ugent.be'
__copyright__ = 'Copyright (c) 2016 LT3 - UGent'
__license__ = 'Apache License 2.0'
__version__ = '0.1'
__url__ = 'https://github.com/megasnort/python-ugent-biblio'
__description__ = 'A Python wrapper around the UGent Biblio API'
|
"""
Connect to the UGent Biblio API
"""
from biblio import search, BASE_URL, publications_by_person
__author__ = 'Stef Bastiaansen'
__email__ = 'stef.bastiaansen@ugent.be'
__copyright__ = 'Copyright (c) 2016 LT3 - UGent'
__license__ = 'Apache License 2.0'
__version__ = '0.1'
__url__ = 'https://github.com/megasnort/python-ugent-biblio'
__description__ = 'A Python wrapper around the UGent Biblio API'
|
Fix import location of biblio package
|
Fix import location of biblio package
|
Python
|
apache-2.0
|
megasnort/python-ugent-biblio
|
"""
Connect to the UGent Biblio API
"""
from .biblio import search, BASE_URL, publications_by_person
__author__ = 'Stef Bastiaansen'
__email__ = 'stef.bastiaansen@ugent.be'
__copyright__ = 'Copyright (c) 2016 LT3 - UGent'
__license__ = 'Apache License 2.0'
__version__ = '0.1'
__url__ = 'https://github.com/megasnort/python-ugent-biblio'
__description__ = 'A Python wrapper around the UGent Biblio API'
Fix import location of biblio package
|
"""
Connect to the UGent Biblio API
"""
from biblio import search, BASE_URL, publications_by_person
__author__ = 'Stef Bastiaansen'
__email__ = 'stef.bastiaansen@ugent.be'
__copyright__ = 'Copyright (c) 2016 LT3 - UGent'
__license__ = 'Apache License 2.0'
__version__ = '0.1'
__url__ = 'https://github.com/megasnort/python-ugent-biblio'
__description__ = 'A Python wrapper around the UGent Biblio API'
|
<commit_before>"""
Connect to the UGent Biblio API
"""
from .biblio import search, BASE_URL, publications_by_person
__author__ = 'Stef Bastiaansen'
__email__ = 'stef.bastiaansen@ugent.be'
__copyright__ = 'Copyright (c) 2016 LT3 - UGent'
__license__ = 'Apache License 2.0'
__version__ = '0.1'
__url__ = 'https://github.com/megasnort/python-ugent-biblio'
__description__ = 'A Python wrapper around the UGent Biblio API'
<commit_msg>Fix import location of biblio package<commit_after>
|
"""
Connect to the UGent Biblio API
"""
from biblio import search, BASE_URL, publications_by_person
__author__ = 'Stef Bastiaansen'
__email__ = 'stef.bastiaansen@ugent.be'
__copyright__ = 'Copyright (c) 2016 LT3 - UGent'
__license__ = 'Apache License 2.0'
__version__ = '0.1'
__url__ = 'https://github.com/megasnort/python-ugent-biblio'
__description__ = 'A Python wrapper around the UGent Biblio API'
|
"""
Connect to the UGent Biblio API
"""
from .biblio import search, BASE_URL, publications_by_person
__author__ = 'Stef Bastiaansen'
__email__ = 'stef.bastiaansen@ugent.be'
__copyright__ = 'Copyright (c) 2016 LT3 - UGent'
__license__ = 'Apache License 2.0'
__version__ = '0.1'
__url__ = 'https://github.com/megasnort/python-ugent-biblio'
__description__ = 'A Python wrapper around the UGent Biblio API'
Fix import location of biblio package"""
Connect to the UGent Biblio API
"""
from biblio import search, BASE_URL, publications_by_person
__author__ = 'Stef Bastiaansen'
__email__ = 'stef.bastiaansen@ugent.be'
__copyright__ = 'Copyright (c) 2016 LT3 - UGent'
__license__ = 'Apache License 2.0'
__version__ = '0.1'
__url__ = 'https://github.com/megasnort/python-ugent-biblio'
__description__ = 'A Python wrapper around the UGent Biblio API'
|
<commit_before>"""
Connect to the UGent Biblio API
"""
from .biblio import search, BASE_URL, publications_by_person
__author__ = 'Stef Bastiaansen'
__email__ = 'stef.bastiaansen@ugent.be'
__copyright__ = 'Copyright (c) 2016 LT3 - UGent'
__license__ = 'Apache License 2.0'
__version__ = '0.1'
__url__ = 'https://github.com/megasnort/python-ugent-biblio'
__description__ = 'A Python wrapper around the UGent Biblio API'
<commit_msg>Fix import location of biblio package<commit_after>"""
Connect to the UGent Biblio API
"""
from biblio import search, BASE_URL, publications_by_person
__author__ = 'Stef Bastiaansen'
__email__ = 'stef.bastiaansen@ugent.be'
__copyright__ = 'Copyright (c) 2016 LT3 - UGent'
__license__ = 'Apache License 2.0'
__version__ = '0.1'
__url__ = 'https://github.com/megasnort/python-ugent-biblio'
__description__ = 'A Python wrapper around the UGent Biblio API'
|
e255b92589000c2d485d35f9008b78e0313b4374
|
pystache/template_spec.py
|
pystache/template_spec.py
|
# coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "Spec" in TemplateSpec
stands for template information that is "special" or "specified".
"""
# TODO: finish the class docstring.
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
template_rel_directory: the directory containing the template file, relative
to the directory containing the module defining the class.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
"""
template = None
template_rel_path = None
template_rel_directory = None
template_name = None
template_extension = None
template_encoding = None
|
# coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "spec" in TemplateSpec
stands for "special" or "specified" template information.
"""
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_encoding: the encoding used by the template.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
template_name: the name of the template.
template_rel_directory: the directory containing the template file,
relative to the directory containing the module defining the class.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
"""
template = None
template_encoding = None
template_extension = None
template_name = None
template_rel_directory = None
template_rel_path = None
|
Reorder TemplateSpec attributes and add to docstring.
|
Reorder TemplateSpec attributes and add to docstring.
|
Python
|
mit
|
nitish116/pystache,rismalrv/pystache,charbeljc/pystache,rismalrv/pystache,harsh00008/pystache,arlenesr28/pystache,defunkt/pystache,beni55/pystache,nitish116/pystache,nitish116/pystache,rismalrv/pystache,jrnold/pystache,jrnold/pystache,harsh00008/pystache,harsh00008/pystache,charbeljc/pystache,arlenesr28/pystache,beni55/pystache,arlenesr28/pystache
|
# coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "Spec" in TemplateSpec
stands for template information that is "special" or "specified".
"""
# TODO: finish the class docstring.
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
template_rel_directory: the directory containing the template file, relative
to the directory containing the module defining the class.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
"""
template = None
template_rel_path = None
template_rel_directory = None
template_name = None
template_extension = None
template_encoding = None
Reorder TemplateSpec attributes and add to docstring.
|
# coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "spec" in TemplateSpec
stands for "special" or "specified" template information.
"""
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_encoding: the encoding used by the template.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
template_name: the name of the template.
template_rel_directory: the directory containing the template file,
relative to the directory containing the module defining the class.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
"""
template = None
template_encoding = None
template_extension = None
template_name = None
template_rel_directory = None
template_rel_path = None
|
<commit_before># coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "Spec" in TemplateSpec
stands for template information that is "special" or "specified".
"""
# TODO: finish the class docstring.
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
template_rel_directory: the directory containing the template file, relative
to the directory containing the module defining the class.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
"""
template = None
template_rel_path = None
template_rel_directory = None
template_name = None
template_extension = None
template_encoding = None
<commit_msg>Reorder TemplateSpec attributes and add to docstring.<commit_after>
|
# coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "spec" in TemplateSpec
stands for "special" or "specified" template information.
"""
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_encoding: the encoding used by the template.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
template_name: the name of the template.
template_rel_directory: the directory containing the template file,
relative to the directory containing the module defining the class.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
"""
template = None
template_encoding = None
template_extension = None
template_name = None
template_rel_directory = None
template_rel_path = None
|
# coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "Spec" in TemplateSpec
stands for template information that is "special" or "specified".
"""
# TODO: finish the class docstring.
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
template_rel_directory: the directory containing the template file, relative
to the directory containing the module defining the class.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
"""
template = None
template_rel_path = None
template_rel_directory = None
template_name = None
template_extension = None
template_encoding = None
Reorder TemplateSpec attributes and add to docstring.# coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "spec" in TemplateSpec
stands for "special" or "specified" template information.
"""
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_encoding: the encoding used by the template.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
template_name: the name of the template.
template_rel_directory: the directory containing the template file,
relative to the directory containing the module defining the class.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
"""
template = None
template_encoding = None
template_extension = None
template_name = None
template_rel_directory = None
template_rel_path = None
|
<commit_before># coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "Spec" in TemplateSpec
stands for template information that is "special" or "specified".
"""
# TODO: finish the class docstring.
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
template_rel_directory: the directory containing the template file, relative
to the directory containing the module defining the class.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
"""
template = None
template_rel_path = None
template_rel_directory = None
template_name = None
template_extension = None
template_encoding = None
<commit_msg>Reorder TemplateSpec attributes and add to docstring.<commit_after># coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "spec" in TemplateSpec
stands for "special" or "specified" template information.
"""
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_encoding: the encoding used by the template.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
template_name: the name of the template.
template_rel_directory: the directory containing the template file,
relative to the directory containing the module defining the class.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
"""
template = None
template_encoding = None
template_extension = None
template_name = None
template_rel_directory = None
template_rel_path = None
|
ecb3a296b379f4abdc03ce5de447b30ada5f124e
|
txircd/modules/rfc/cmode_l.py
|
txircd/modules/rfc/cmode_l.py
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class LimitMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "LimitMode"
core = True
affectedActions = [ "joinpermission" ]
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
return [ ("l", ModeType.Param, self) ]
def actions(self):
return [ ("modeactioncheck-channel-l-joinpermission", 10, self.isModeSet) ]
def isModeSet(self, channel, alsoChannel, user):
if "l" in channel.modes:
return channel.modes["l"]
return None
def checkSet(self, channel, param):
if param.isdigit():
return param
return None
def apply(self, actionType, channel, param, alsoChannel, user):
try: # There may be cases when the parameter we're passed is in string form still (e.g. from modules other than this one)
param = int(param)
except ValueError:
return None
if len(channel.users) >= param:
user.sendMessage(irc.ERR_CHANNELISFULL, channel.name, ":Cannot join channel (Channel is full)")
return False
return None
limitMode = LimitMode()
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class LimitMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "LimitMode"
core = True
affectedActions = [ "joinpermission" ]
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
return [ ("l", ModeType.Param, self) ]
def actions(self):
return [ ("modeactioncheck-channel-l-joinpermission", 10, self.isModeSet) ]
def isModeSet(self, channel, alsoChannel, user):
if "l" in channel.modes:
return channel.modes["l"]
return None
def checkSet(self, channel, param):
if param.isdigit():
return [param]
return None
def apply(self, actionType, channel, param, alsoChannel, user):
try: # There may be cases when the parameter we're passed is in string form still (e.g. from modules other than this one)
param = int(param)
except ValueError:
return None
if len(channel.users) >= param:
user.sendMessage(irc.ERR_CHANNELISFULL, channel.name, ":Cannot join channel (Channel is full)")
return False
return None
limitMode = LimitMode()
|
Return the parameter for channel mode +l as a list
|
Return the parameter for channel mode +l as a list
This fixes a bug where every digit was handled as a separate parameter, causing
"MODE #channel +l 10" to turn into "MODE #channel +ll 1 0"
|
Python
|
bsd-3-clause
|
Heufneutje/txircd,ElementalAlchemist/txircd
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class LimitMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "LimitMode"
core = True
affectedActions = [ "joinpermission" ]
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
return [ ("l", ModeType.Param, self) ]
def actions(self):
return [ ("modeactioncheck-channel-l-joinpermission", 10, self.isModeSet) ]
def isModeSet(self, channel, alsoChannel, user):
if "l" in channel.modes:
return channel.modes["l"]
return None
def checkSet(self, channel, param):
if param.isdigit():
return param
return None
def apply(self, actionType, channel, param, alsoChannel, user):
try: # There may be cases when the parameter we're passed is in string form still (e.g. from modules other than this one)
param = int(param)
except ValueError:
return None
if len(channel.users) >= param:
user.sendMessage(irc.ERR_CHANNELISFULL, channel.name, ":Cannot join channel (Channel is full)")
return False
return None
limitMode = LimitMode()Return the parameter for channel mode +l as a list
This fixes a bug where every digit was handled as a separate parameter, causing
"MODE #channel +l 10" to turn into "MODE #channel +ll 1 0"
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class LimitMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "LimitMode"
core = True
affectedActions = [ "joinpermission" ]
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
return [ ("l", ModeType.Param, self) ]
def actions(self):
return [ ("modeactioncheck-channel-l-joinpermission", 10, self.isModeSet) ]
def isModeSet(self, channel, alsoChannel, user):
if "l" in channel.modes:
return channel.modes["l"]
return None
def checkSet(self, channel, param):
if param.isdigit():
return [param]
return None
def apply(self, actionType, channel, param, alsoChannel, user):
try: # There may be cases when the parameter we're passed is in string form still (e.g. from modules other than this one)
param = int(param)
except ValueError:
return None
if len(channel.users) >= param:
user.sendMessage(irc.ERR_CHANNELISFULL, channel.name, ":Cannot join channel (Channel is full)")
return False
return None
limitMode = LimitMode()
|
<commit_before>from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class LimitMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "LimitMode"
core = True
affectedActions = [ "joinpermission" ]
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
return [ ("l", ModeType.Param, self) ]
def actions(self):
return [ ("modeactioncheck-channel-l-joinpermission", 10, self.isModeSet) ]
def isModeSet(self, channel, alsoChannel, user):
if "l" in channel.modes:
return channel.modes["l"]
return None
def checkSet(self, channel, param):
if param.isdigit():
return param
return None
def apply(self, actionType, channel, param, alsoChannel, user):
try: # There may be cases when the parameter we're passed is in string form still (e.g. from modules other than this one)
param = int(param)
except ValueError:
return None
if len(channel.users) >= param:
user.sendMessage(irc.ERR_CHANNELISFULL, channel.name, ":Cannot join channel (Channel is full)")
return False
return None
limitMode = LimitMode()<commit_msg>Return the parameter for channel mode +l as a list
This fixes a bug where every digit was handled as a separate parameter, causing
"MODE #channel +l 10" to turn into "MODE #channel +ll 1 0"<commit_after>
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class LimitMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "LimitMode"
core = True
affectedActions = [ "joinpermission" ]
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
return [ ("l", ModeType.Param, self) ]
def actions(self):
return [ ("modeactioncheck-channel-l-joinpermission", 10, self.isModeSet) ]
def isModeSet(self, channel, alsoChannel, user):
if "l" in channel.modes:
return channel.modes["l"]
return None
def checkSet(self, channel, param):
if param.isdigit():
return [param]
return None
def apply(self, actionType, channel, param, alsoChannel, user):
try: # There may be cases when the parameter we're passed is in string form still (e.g. from modules other than this one)
param = int(param)
except ValueError:
return None
if len(channel.users) >= param:
user.sendMessage(irc.ERR_CHANNELISFULL, channel.name, ":Cannot join channel (Channel is full)")
return False
return None
limitMode = LimitMode()
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class LimitMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "LimitMode"
core = True
affectedActions = [ "joinpermission" ]
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
return [ ("l", ModeType.Param, self) ]
def actions(self):
return [ ("modeactioncheck-channel-l-joinpermission", 10, self.isModeSet) ]
def isModeSet(self, channel, alsoChannel, user):
if "l" in channel.modes:
return channel.modes["l"]
return None
def checkSet(self, channel, param):
if param.isdigit():
return param
return None
def apply(self, actionType, channel, param, alsoChannel, user):
try: # There may be cases when the parameter we're passed is in string form still (e.g. from modules other than this one)
param = int(param)
except ValueError:
return None
if len(channel.users) >= param:
user.sendMessage(irc.ERR_CHANNELISFULL, channel.name, ":Cannot join channel (Channel is full)")
return False
return None
limitMode = LimitMode()Return the parameter for channel mode +l as a list
This fixes a bug where every digit was handled as a separate parameter, causing
"MODE #channel +l 10" to turn into "MODE #channel +ll 1 0"from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class LimitMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "LimitMode"
core = True
affectedActions = [ "joinpermission" ]
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
return [ ("l", ModeType.Param, self) ]
def actions(self):
return [ ("modeactioncheck-channel-l-joinpermission", 10, self.isModeSet) ]
def isModeSet(self, channel, alsoChannel, user):
if "l" in channel.modes:
return channel.modes["l"]
return None
def checkSet(self, channel, param):
if param.isdigit():
return [param]
return None
def apply(self, actionType, channel, param, alsoChannel, user):
try: # There may be cases when the parameter we're passed is in string form still (e.g. from modules other than this one)
param = int(param)
except ValueError:
return None
if len(channel.users) >= param:
user.sendMessage(irc.ERR_CHANNELISFULL, channel.name, ":Cannot join channel (Channel is full)")
return False
return None
limitMode = LimitMode()
|
<commit_before>from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class LimitMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "LimitMode"
core = True
affectedActions = [ "joinpermission" ]
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
return [ ("l", ModeType.Param, self) ]
def actions(self):
return [ ("modeactioncheck-channel-l-joinpermission", 10, self.isModeSet) ]
def isModeSet(self, channel, alsoChannel, user):
if "l" in channel.modes:
return channel.modes["l"]
return None
def checkSet(self, channel, param):
if param.isdigit():
return param
return None
def apply(self, actionType, channel, param, alsoChannel, user):
try: # There may be cases when the parameter we're passed is in string form still (e.g. from modules other than this one)
param = int(param)
except ValueError:
return None
if len(channel.users) >= param:
user.sendMessage(irc.ERR_CHANNELISFULL, channel.name, ":Cannot join channel (Channel is full)")
return False
return None
limitMode = LimitMode()<commit_msg>Return the parameter for channel mode +l as a list
This fixes a bug where every digit was handled as a separate parameter, causing
"MODE #channel +l 10" to turn into "MODE #channel +ll 1 0"<commit_after>from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class LimitMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "LimitMode"
core = True
affectedActions = [ "joinpermission" ]
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
return [ ("l", ModeType.Param, self) ]
def actions(self):
return [ ("modeactioncheck-channel-l-joinpermission", 10, self.isModeSet) ]
def isModeSet(self, channel, alsoChannel, user):
if "l" in channel.modes:
return channel.modes["l"]
return None
def checkSet(self, channel, param):
if param.isdigit():
return [param]
return None
def apply(self, actionType, channel, param, alsoChannel, user):
try: # There may be cases when the parameter we're passed is in string form still (e.g. from modules other than this one)
param = int(param)
except ValueError:
return None
if len(channel.users) >= param:
user.sendMessage(irc.ERR_CHANNELISFULL, channel.name, ":Cannot join channel (Channel is full)")
return False
return None
limitMode = LimitMode()
|
a3ca99ab519401df8f2418ce877065dc3aa63146
|
app/parsers/models.py
|
app/parsers/models.py
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from typing import List
from rpe.resources import Resource
from pydantic import BaseModel, Field
# Parser-supplied metadata is arbitrary, but some fields are required
# currently just `src`
class MessageMetadata(BaseModel):
src: str
class Config:
extra = 'allow'
class EnforcerControlData(BaseModel):
enforce: bool = True
delay_enforcement: bool = True
class Config:
extra = 'forbid'
class ParsedMessage(BaseModel):
metadata: MessageMetadata
resources: List[Resource]
control_data: EnforcerControlData = EnforcerControlData()
timestamp: int = Field(default_factory=time.time)
class Config:
arbitrary_types_allowed = True
extra = 'forbid'
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from typing import List
from rpe.resources import Resource
from pydantic import BaseModel, Field
# Parser-supplied metadata is arbitrary, but some fields are required
# currently just `src`
class MessageMetadata(BaseModel):
src: str
class Config:
extra = 'allow'
class EnforcerControlData(BaseModel):
enforce: bool = True
delay_enforcement: bool = True
class Config:
extra = 'forbid'
class ParsedMessage(BaseModel):
metadata: MessageMetadata
resources: List[Resource]
control_data: EnforcerControlData = EnforcerControlData()
timestamp: int = Field(default_factory=time.time)
class Config:
arbitrary_types_allowed = True
extra = 'forbid'
@property
def age(self):
return int(time.time()) - self.timestamp
|
Add message_age property to ParsedMessage
|
Add message_age property to ParsedMessage
|
Python
|
apache-2.0
|
forseti-security/real-time-enforcer
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from typing import List
from rpe.resources import Resource
from pydantic import BaseModel, Field
# Parser-supplied metadata is arbitrary, but some fields are required
# currently just `src`
class MessageMetadata(BaseModel):
src: str
class Config:
extra = 'allow'
class EnforcerControlData(BaseModel):
enforce: bool = True
delay_enforcement: bool = True
class Config:
extra = 'forbid'
class ParsedMessage(BaseModel):
metadata: MessageMetadata
resources: List[Resource]
control_data: EnforcerControlData = EnforcerControlData()
timestamp: int = Field(default_factory=time.time)
class Config:
arbitrary_types_allowed = True
extra = 'forbid'
Add message_age property to ParsedMessage
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from typing import List
from rpe.resources import Resource
from pydantic import BaseModel, Field
# Parser-supplied metadata is arbitrary, but some fields are required
# currently just `src`
class MessageMetadata(BaseModel):
src: str
class Config:
extra = 'allow'
class EnforcerControlData(BaseModel):
enforce: bool = True
delay_enforcement: bool = True
class Config:
extra = 'forbid'
class ParsedMessage(BaseModel):
metadata: MessageMetadata
resources: List[Resource]
control_data: EnforcerControlData = EnforcerControlData()
timestamp: int = Field(default_factory=time.time)
class Config:
arbitrary_types_allowed = True
extra = 'forbid'
@property
def age(self):
return int(time.time()) - self.timestamp
|
<commit_before>#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from typing import List
from rpe.resources import Resource
from pydantic import BaseModel, Field
# Parser-supplied metadata is arbitrary, but some fields are required
# currently just `src`
class MessageMetadata(BaseModel):
src: str
class Config:
extra = 'allow'
class EnforcerControlData(BaseModel):
enforce: bool = True
delay_enforcement: bool = True
class Config:
extra = 'forbid'
class ParsedMessage(BaseModel):
metadata: MessageMetadata
resources: List[Resource]
control_data: EnforcerControlData = EnforcerControlData()
timestamp: int = Field(default_factory=time.time)
class Config:
arbitrary_types_allowed = True
extra = 'forbid'
<commit_msg>Add message_age property to ParsedMessage<commit_after>
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from typing import List
from rpe.resources import Resource
from pydantic import BaseModel, Field
# Parser-supplied metadata is arbitrary, but some fields are required
# currently just `src`
class MessageMetadata(BaseModel):
src: str
class Config:
extra = 'allow'
class EnforcerControlData(BaseModel):
enforce: bool = True
delay_enforcement: bool = True
class Config:
extra = 'forbid'
class ParsedMessage(BaseModel):
metadata: MessageMetadata
resources: List[Resource]
control_data: EnforcerControlData = EnforcerControlData()
timestamp: int = Field(default_factory=time.time)
class Config:
arbitrary_types_allowed = True
extra = 'forbid'
@property
def age(self):
return int(time.time()) - self.timestamp
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from typing import List
from rpe.resources import Resource
from pydantic import BaseModel, Field
# Parser-supplied metadata is arbitrary, but some fields are required
# currently just `src`
class MessageMetadata(BaseModel):
src: str
class Config:
extra = 'allow'
class EnforcerControlData(BaseModel):
enforce: bool = True
delay_enforcement: bool = True
class Config:
extra = 'forbid'
class ParsedMessage(BaseModel):
metadata: MessageMetadata
resources: List[Resource]
control_data: EnforcerControlData = EnforcerControlData()
timestamp: int = Field(default_factory=time.time)
class Config:
arbitrary_types_allowed = True
extra = 'forbid'
Add message_age property to ParsedMessage#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from typing import List
from rpe.resources import Resource
from pydantic import BaseModel, Field
# Parser-supplied metadata is arbitrary, but some fields are required
# currently just `src`
class MessageMetadata(BaseModel):
src: str
class Config:
extra = 'allow'
class EnforcerControlData(BaseModel):
enforce: bool = True
delay_enforcement: bool = True
class Config:
extra = 'forbid'
class ParsedMessage(BaseModel):
metadata: MessageMetadata
resources: List[Resource]
control_data: EnforcerControlData = EnforcerControlData()
timestamp: int = Field(default_factory=time.time)
class Config:
arbitrary_types_allowed = True
extra = 'forbid'
@property
def age(self):
return int(time.time()) - self.timestamp
|
<commit_before>#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from typing import List
from rpe.resources import Resource
from pydantic import BaseModel, Field
# Parser-supplied metadata is arbitrary, but some fields are required
# currently just `src`
class MessageMetadata(BaseModel):
src: str
class Config:
extra = 'allow'
class EnforcerControlData(BaseModel):
enforce: bool = True
delay_enforcement: bool = True
class Config:
extra = 'forbid'
class ParsedMessage(BaseModel):
metadata: MessageMetadata
resources: List[Resource]
control_data: EnforcerControlData = EnforcerControlData()
timestamp: int = Field(default_factory=time.time)
class Config:
arbitrary_types_allowed = True
extra = 'forbid'
<commit_msg>Add message_age property to ParsedMessage<commit_after>#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from typing import List
from rpe.resources import Resource
from pydantic import BaseModel, Field
# Parser-supplied metadata is arbitrary, but some fields are required
# currently just `src`
class MessageMetadata(BaseModel):
src: str
class Config:
extra = 'allow'
class EnforcerControlData(BaseModel):
enforce: bool = True
delay_enforcement: bool = True
class Config:
extra = 'forbid'
class ParsedMessage(BaseModel):
metadata: MessageMetadata
resources: List[Resource]
control_data: EnforcerControlData = EnforcerControlData()
timestamp: int = Field(default_factory=time.time)
class Config:
arbitrary_types_allowed = True
extra = 'forbid'
@property
def age(self):
return int(time.time()) - self.timestamp
|
9cf29c769e3902c44914d3e216ae9457aa7e5fef
|
api/api/config_settings/redis_settings.py
|
api/api/config_settings/redis_settings.py
|
import redis
from api.utils import config
class RedisPools(object):
EXPERIMENTS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_EXPERIMENTS_STATUS_URL'))
JOBS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOBS_STATUS_URL'))
JOB_CONTAINERS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOB_CONTAINERS_URL'))
|
import redis
from api.utils import config
class RedisPools(object):
EXPERIMENTS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_EXPERIMENTS_STATUS_URL'))
JOBS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOBS_STATUS_URL'))
JOB_CONTAINERS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOB_CONTAINERS_URL'))
TO_STREAM = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_TO_STREAM_URL'))
|
Add to stream redis db
|
Add to stream redis db
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
import redis
from api.utils import config
class RedisPools(object):
EXPERIMENTS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_EXPERIMENTS_STATUS_URL'))
JOBS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOBS_STATUS_URL'))
JOB_CONTAINERS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOB_CONTAINERS_URL'))
Add to stream redis db
|
import redis
from api.utils import config
class RedisPools(object):
EXPERIMENTS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_EXPERIMENTS_STATUS_URL'))
JOBS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOBS_STATUS_URL'))
JOB_CONTAINERS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOB_CONTAINERS_URL'))
TO_STREAM = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_TO_STREAM_URL'))
|
<commit_before>import redis
from api.utils import config
class RedisPools(object):
EXPERIMENTS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_EXPERIMENTS_STATUS_URL'))
JOBS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOBS_STATUS_URL'))
JOB_CONTAINERS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOB_CONTAINERS_URL'))
<commit_msg>Add to stream redis db<commit_after>
|
import redis
from api.utils import config
class RedisPools(object):
EXPERIMENTS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_EXPERIMENTS_STATUS_URL'))
JOBS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOBS_STATUS_URL'))
JOB_CONTAINERS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOB_CONTAINERS_URL'))
TO_STREAM = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_TO_STREAM_URL'))
|
import redis
from api.utils import config
class RedisPools(object):
EXPERIMENTS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_EXPERIMENTS_STATUS_URL'))
JOBS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOBS_STATUS_URL'))
JOB_CONTAINERS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOB_CONTAINERS_URL'))
Add to stream redis dbimport redis
from api.utils import config
class RedisPools(object):
EXPERIMENTS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_EXPERIMENTS_STATUS_URL'))
JOBS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOBS_STATUS_URL'))
JOB_CONTAINERS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOB_CONTAINERS_URL'))
TO_STREAM = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_TO_STREAM_URL'))
|
<commit_before>import redis
from api.utils import config
class RedisPools(object):
EXPERIMENTS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_EXPERIMENTS_STATUS_URL'))
JOBS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOBS_STATUS_URL'))
JOB_CONTAINERS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOB_CONTAINERS_URL'))
<commit_msg>Add to stream redis db<commit_after>import redis
from api.utils import config
class RedisPools(object):
EXPERIMENTS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_EXPERIMENTS_STATUS_URL'))
JOBS_STATUS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOBS_STATUS_URL'))
JOB_CONTAINERS = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_JOB_CONTAINERS_URL'))
TO_STREAM = redis.ConnectionPool.from_url(
config.get_string('POLYAXON_REDIS_TO_STREAM_URL'))
|
66b49f913513545e5ae0484963412e965c8f9aa1
|
saleor/dashboard/category/forms.py
|
saleor/dashboard/category/forms.py
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from mptt.forms import TreeNodeChoiceField
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
self.fields['parent'] = TreeNodeChoiceField(queryset=Category.objects.all())
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
return parent
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from mptt.forms import TreeNodeChoiceField
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
self.fields['parent'] = TreeNodeChoiceField(queryset=Category.objects.all())
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
if self.instance in parent.get_ancestors():
raise forms.ValidationError(_('A category may not be made a child of any of its descendants.'))
return parent
|
Check if new parent is not a descendant of current category
|
Check if new parent is not a descendant of current category
|
Python
|
bsd-3-clause
|
rchav/vinerack,HyperManTT/ECommerceSaleor,rchav/vinerack,taedori81/saleor,jreigel/saleor,tfroehlich82/saleor,avorio/saleor,paweltin/saleor,laosunhust/saleor,UITools/saleor,spartonia/saleor,arth-co/saleor,spartonia/saleor,taedori81/saleor,arth-co/saleor,rchav/vinerack,tfroehlich82/saleor,avorio/saleor,josesanch/saleor,UITools/saleor,arth-co/saleor,jreigel/saleor,maferelo/saleor,UITools/saleor,tfroehlich82/saleor,car3oon/saleor,itbabu/saleor,Drekscott/Motlaesaleor,dashmug/saleor,car3oon/saleor,paweltin/saleor,KenMutemi/saleor,rodrigozn/CW-Shop,laosunhust/saleor,jreigel/saleor,paweltin/saleor,taedori81/saleor,mociepka/saleor,itbabu/saleor,avorio/saleor,Drekscott/Motlaesaleor,josesanch/saleor,HyperManTT/ECommerceSaleor,Drekscott/Motlaesaleor,mociepka/saleor,dashmug/saleor,UITools/saleor,HyperManTT/ECommerceSaleor,Drekscott/Motlaesaleor,UITools/saleor,arth-co/saleor,laosunhust/saleor,KenMutemi/saleor,itbabu/saleor,spartonia/saleor,paweltin/saleor,josesanch/saleor,mociepka/saleor,spartonia/saleor,rodrigozn/CW-Shop,dashmug/saleor,rodrigozn/CW-Shop,maferelo/saleor,taedori81/saleor,maferelo/saleor,KenMutemi/saleor,avorio/saleor,car3oon/saleor,laosunhust/saleor
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from mptt.forms import TreeNodeChoiceField
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
self.fields['parent'] = TreeNodeChoiceField(queryset=Category.objects.all())
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
return parent
Check if new parent is not a descendant of current category
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from mptt.forms import TreeNodeChoiceField
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
self.fields['parent'] = TreeNodeChoiceField(queryset=Category.objects.all())
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
if self.instance in parent.get_ancestors():
raise forms.ValidationError(_('A category may not be made a child of any of its descendants.'))
return parent
|
<commit_before>from django import forms
from django.utils.translation import ugettext_lazy as _
from mptt.forms import TreeNodeChoiceField
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
self.fields['parent'] = TreeNodeChoiceField(queryset=Category.objects.all())
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
return parent
<commit_msg>Check if new parent is not a descendant of current category<commit_after>
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from mptt.forms import TreeNodeChoiceField
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
self.fields['parent'] = TreeNodeChoiceField(queryset=Category.objects.all())
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
if self.instance in parent.get_ancestors():
raise forms.ValidationError(_('A category may not be made a child of any of its descendants.'))
return parent
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from mptt.forms import TreeNodeChoiceField
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
self.fields['parent'] = TreeNodeChoiceField(queryset=Category.objects.all())
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
return parent
Check if new parent is not a descendant of current categoryfrom django import forms
from django.utils.translation import ugettext_lazy as _
from mptt.forms import TreeNodeChoiceField
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
self.fields['parent'] = TreeNodeChoiceField(queryset=Category.objects.all())
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
if self.instance in parent.get_ancestors():
raise forms.ValidationError(_('A category may not be made a child of any of its descendants.'))
return parent
|
<commit_before>from django import forms
from django.utils.translation import ugettext_lazy as _
from mptt.forms import TreeNodeChoiceField
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
self.fields['parent'] = TreeNodeChoiceField(queryset=Category.objects.all())
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
return parent
<commit_msg>Check if new parent is not a descendant of current category<commit_after>from django import forms
from django.utils.translation import ugettext_lazy as _
from mptt.forms import TreeNodeChoiceField
from ...product.models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = []
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
self.fields['parent'] = TreeNodeChoiceField(queryset=Category.objects.all())
def clean_parent(self):
parent = self.cleaned_data['parent']
if parent == self.instance:
raise forms.ValidationError(_('A category may not be made a child of itself'))
if self.instance in parent.get_ancestors():
raise forms.ValidationError(_('A category may not be made a child of any of its descendants.'))
return parent
|
c7d9287b770a0033cb54f9c1f9ac5f8beb25d528
|
scripts/cronRefreshEdxQualtrics.py
|
scripts/cronRefreshEdxQualtrics.py
|
from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
|
from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
|
Revert "Added script for cron job to load surveys to database."
|
Revert "Added script for cron job to load surveys to database."
This reverts commit 34e5560437348e5cfeab589b783c9cc524aa2abf.
|
Python
|
bsd-3-clause
|
paepcke/json_to_relation,paepcke/json_to_relation,paepcke/json_to_relation,paepcke/json_to_relation
|
from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
Revert "Added script for cron job to load surveys to database."
This reverts commit 34e5560437348e5cfeab589b783c9cc524aa2abf.
|
from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
|
<commit_before>from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
<commit_msg>Revert "Added script for cron job to load surveys to database."
This reverts commit 34e5560437348e5cfeab589b783c9cc524aa2abf.<commit_after>
|
from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
|
from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
Revert "Added script for cron job to load surveys to database."
This reverts commit 34e5560437348e5cfeab589b783c9cc524aa2abf.from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
|
<commit_before>from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
<commit_msg>Revert "Added script for cron job to load surveys to database."
This reverts commit 34e5560437348e5cfeab589b783c9cc524aa2abf.<commit_after>from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
|
250458aeb6619587443d6896c46a49c9754951e4
|
byceps/services/tourney/models/tourney.py
|
byceps/services/tourney/models/tourney.py
|
"""
byceps.services.tourney.models.tourney
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import NewType
from uuid import UUID
from ....database import db, generate_uuid
from ....util.instances import ReprBuilder
from .tourney_category import TourneyCategory
TourneyID = NewType('TourneyID', UUID)
class Tourney(db.Model):
"""A tournament."""
__tablename__ = 'tourney_teams'
__table_args__ = (
db.UniqueConstraint('group_id', 'title'),
)
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
group_id = db.Column(db.Uuid, db.ForeignKey('tourney_groups.id'), index=True, nullable=False)
group = db.relationship(TourneyCategory)
title = db.Column(db.Unicode(40), nullable=False)
def __init__(self, group: TourneyCategory, title: str) -> None:
self.group = group
self.title = title
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('group') \
.add_with_lookup('title') \
.build()
|
"""
byceps.services.tourney.models.tourney
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import NewType
from uuid import UUID
from ....database import db, generate_uuid
from ....util.instances import ReprBuilder
from .tourney_category import TourneyCategory
TourneyID = NewType('TourneyID', UUID)
class Tourney(db.Model):
"""A tournament."""
__tablename__ = 'tourneys'
__table_args__ = (
db.UniqueConstraint('group_id', 'title'),
)
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
group_id = db.Column(db.Uuid, db.ForeignKey('tourney_groups.id'), index=True, nullable=False)
group = db.relationship(TourneyCategory)
title = db.Column(db.Unicode(40), nullable=False)
def __init__(self, group: TourneyCategory, title: str) -> None:
self.group = group
self.title = title
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('group') \
.add_with_lookup('title') \
.build()
|
Fix `Tourney` model's database table name
|
Fix `Tourney` model's database table name
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps
|
"""
byceps.services.tourney.models.tourney
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import NewType
from uuid import UUID
from ....database import db, generate_uuid
from ....util.instances import ReprBuilder
from .tourney_category import TourneyCategory
TourneyID = NewType('TourneyID', UUID)
class Tourney(db.Model):
"""A tournament."""
__tablename__ = 'tourney_teams'
__table_args__ = (
db.UniqueConstraint('group_id', 'title'),
)
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
group_id = db.Column(db.Uuid, db.ForeignKey('tourney_groups.id'), index=True, nullable=False)
group = db.relationship(TourneyCategory)
title = db.Column(db.Unicode(40), nullable=False)
def __init__(self, group: TourneyCategory, title: str) -> None:
self.group = group
self.title = title
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('group') \
.add_with_lookup('title') \
.build()
Fix `Tourney` model's database table name
|
"""
byceps.services.tourney.models.tourney
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import NewType
from uuid import UUID
from ....database import db, generate_uuid
from ....util.instances import ReprBuilder
from .tourney_category import TourneyCategory
TourneyID = NewType('TourneyID', UUID)
class Tourney(db.Model):
"""A tournament."""
__tablename__ = 'tourneys'
__table_args__ = (
db.UniqueConstraint('group_id', 'title'),
)
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
group_id = db.Column(db.Uuid, db.ForeignKey('tourney_groups.id'), index=True, nullable=False)
group = db.relationship(TourneyCategory)
title = db.Column(db.Unicode(40), nullable=False)
def __init__(self, group: TourneyCategory, title: str) -> None:
self.group = group
self.title = title
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('group') \
.add_with_lookup('title') \
.build()
|
<commit_before>"""
byceps.services.tourney.models.tourney
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import NewType
from uuid import UUID
from ....database import db, generate_uuid
from ....util.instances import ReprBuilder
from .tourney_category import TourneyCategory
TourneyID = NewType('TourneyID', UUID)
class Tourney(db.Model):
"""A tournament."""
__tablename__ = 'tourney_teams'
__table_args__ = (
db.UniqueConstraint('group_id', 'title'),
)
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
group_id = db.Column(db.Uuid, db.ForeignKey('tourney_groups.id'), index=True, nullable=False)
group = db.relationship(TourneyCategory)
title = db.Column(db.Unicode(40), nullable=False)
def __init__(self, group: TourneyCategory, title: str) -> None:
self.group = group
self.title = title
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('group') \
.add_with_lookup('title') \
.build()
<commit_msg>Fix `Tourney` model's database table name<commit_after>
|
"""
byceps.services.tourney.models.tourney
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import NewType
from uuid import UUID
from ....database import db, generate_uuid
from ....util.instances import ReprBuilder
from .tourney_category import TourneyCategory
TourneyID = NewType('TourneyID', UUID)
class Tourney(db.Model):
"""A tournament."""
__tablename__ = 'tourneys'
__table_args__ = (
db.UniqueConstraint('group_id', 'title'),
)
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
group_id = db.Column(db.Uuid, db.ForeignKey('tourney_groups.id'), index=True, nullable=False)
group = db.relationship(TourneyCategory)
title = db.Column(db.Unicode(40), nullable=False)
def __init__(self, group: TourneyCategory, title: str) -> None:
self.group = group
self.title = title
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('group') \
.add_with_lookup('title') \
.build()
|
"""
byceps.services.tourney.models.tourney
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import NewType
from uuid import UUID
from ....database import db, generate_uuid
from ....util.instances import ReprBuilder
from .tourney_category import TourneyCategory
TourneyID = NewType('TourneyID', UUID)
class Tourney(db.Model):
"""A tournament."""
__tablename__ = 'tourney_teams'
__table_args__ = (
db.UniqueConstraint('group_id', 'title'),
)
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
group_id = db.Column(db.Uuid, db.ForeignKey('tourney_groups.id'), index=True, nullable=False)
group = db.relationship(TourneyCategory)
title = db.Column(db.Unicode(40), nullable=False)
def __init__(self, group: TourneyCategory, title: str) -> None:
self.group = group
self.title = title
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('group') \
.add_with_lookup('title') \
.build()
Fix `Tourney` model's database table name"""
byceps.services.tourney.models.tourney
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import NewType
from uuid import UUID
from ....database import db, generate_uuid
from ....util.instances import ReprBuilder
from .tourney_category import TourneyCategory
TourneyID = NewType('TourneyID', UUID)
class Tourney(db.Model):
"""A tournament."""
__tablename__ = 'tourneys'
__table_args__ = (
db.UniqueConstraint('group_id', 'title'),
)
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
group_id = db.Column(db.Uuid, db.ForeignKey('tourney_groups.id'), index=True, nullable=False)
group = db.relationship(TourneyCategory)
title = db.Column(db.Unicode(40), nullable=False)
def __init__(self, group: TourneyCategory, title: str) -> None:
self.group = group
self.title = title
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('group') \
.add_with_lookup('title') \
.build()
|
<commit_before>"""
byceps.services.tourney.models.tourney
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import NewType
from uuid import UUID
from ....database import db, generate_uuid
from ....util.instances import ReprBuilder
from .tourney_category import TourneyCategory
TourneyID = NewType('TourneyID', UUID)
class Tourney(db.Model):
"""A tournament."""
__tablename__ = 'tourney_teams'
__table_args__ = (
db.UniqueConstraint('group_id', 'title'),
)
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
group_id = db.Column(db.Uuid, db.ForeignKey('tourney_groups.id'), index=True, nullable=False)
group = db.relationship(TourneyCategory)
title = db.Column(db.Unicode(40), nullable=False)
def __init__(self, group: TourneyCategory, title: str) -> None:
self.group = group
self.title = title
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('group') \
.add_with_lookup('title') \
.build()
<commit_msg>Fix `Tourney` model's database table name<commit_after>"""
byceps.services.tourney.models.tourney
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import NewType
from uuid import UUID
from ....database import db, generate_uuid
from ....util.instances import ReprBuilder
from .tourney_category import TourneyCategory
TourneyID = NewType('TourneyID', UUID)
class Tourney(db.Model):
"""A tournament."""
__tablename__ = 'tourneys'
__table_args__ = (
db.UniqueConstraint('group_id', 'title'),
)
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
group_id = db.Column(db.Uuid, db.ForeignKey('tourney_groups.id'), index=True, nullable=False)
group = db.relationship(TourneyCategory)
title = db.Column(db.Unicode(40), nullable=False)
def __init__(self, group: TourneyCategory, title: str) -> None:
self.group = group
self.title = title
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('group') \
.add_with_lookup('title') \
.build()
|
d5049edc8567cebf936bb07847906c5400f9a6d9
|
ceph_deploy/tests/unit/hosts/test_suse.py
|
ceph_deploy/tests/unit/hosts/test_suse.py
|
from ceph_deploy.hosts import suse
class TestSuseInit(object):
def setup(self):
self.host = suse
def test_choose_init_default(self):
self.host.release = None
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_11(self):
self.host.release = '11'
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_12(self):
self.host.release = '12'
init_type = self.host.choose_init()
assert init_type == "systemd"
def test_choose_init_openSUSE_13_1(self):
self.host.release = '13.1'
init_type = self.host.choose_init()
assert init_type == "systemd"
|
from ceph_deploy.hosts import suse
from ceph_deploy.hosts.suse.install import map_components
class TestSuseInit(object):
def setup(self):
self.host = suse
def test_choose_init_default(self):
self.host.release = None
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_11(self):
self.host.release = '11'
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_12(self):
self.host.release = '12'
init_type = self.host.choose_init()
assert init_type == "systemd"
def test_choose_init_openSUSE_13_1(self):
self.host.release = '13.1'
init_type = self.host.choose_init()
assert init_type == "systemd"
class TestSuseMapComponents(object):
def test_valid(self):
pkgs = map_components(['ceph-osd', 'ceph-common', 'ceph-radosgw'])
assert 'ceph' in pkgs
assert 'ceph-common' in pkgs
assert 'ceph-radosgw' in pkgs
assert 'ceph-osd' not in pkgs
def test_invalid(self):
pkgs = map_components(['not-provided', 'ceph-mon'])
assert 'not-provided' not in pkgs
assert 'ceph' in pkgs
|
Add tests for component to SUSE package mapping
|
Add tests for component to SUSE package mapping
Signed-off-by: David Disseldorp <589a549dc9f982d9f46aeeb82a09ab6d87ccf1d8@suse.de>
|
Python
|
mit
|
zhouyuan/ceph-deploy,shenhequnying/ceph-deploy,ceph/ceph-deploy,ghxandsky/ceph-deploy,zhouyuan/ceph-deploy,imzhulei/ceph-deploy,SUSE/ceph-deploy,Vicente-Cheng/ceph-deploy,ceph/ceph-deploy,branto1/ceph-deploy,trhoden/ceph-deploy,trhoden/ceph-deploy,osynge/ceph-deploy,ghxandsky/ceph-deploy,SUSE/ceph-deploy,branto1/ceph-deploy,codenrhoden/ceph-deploy,isyippee/ceph-deploy,isyippee/ceph-deploy,Vicente-Cheng/ceph-deploy,shenhequnying/ceph-deploy,osynge/ceph-deploy,imzhulei/ceph-deploy,codenrhoden/ceph-deploy
|
from ceph_deploy.hosts import suse
class TestSuseInit(object):
def setup(self):
self.host = suse
def test_choose_init_default(self):
self.host.release = None
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_11(self):
self.host.release = '11'
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_12(self):
self.host.release = '12'
init_type = self.host.choose_init()
assert init_type == "systemd"
def test_choose_init_openSUSE_13_1(self):
self.host.release = '13.1'
init_type = self.host.choose_init()
assert init_type == "systemd"
Add tests for component to SUSE package mapping
Signed-off-by: David Disseldorp <589a549dc9f982d9f46aeeb82a09ab6d87ccf1d8@suse.de>
|
from ceph_deploy.hosts import suse
from ceph_deploy.hosts.suse.install import map_components
class TestSuseInit(object):
def setup(self):
self.host = suse
def test_choose_init_default(self):
self.host.release = None
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_11(self):
self.host.release = '11'
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_12(self):
self.host.release = '12'
init_type = self.host.choose_init()
assert init_type == "systemd"
def test_choose_init_openSUSE_13_1(self):
self.host.release = '13.1'
init_type = self.host.choose_init()
assert init_type == "systemd"
class TestSuseMapComponents(object):
def test_valid(self):
pkgs = map_components(['ceph-osd', 'ceph-common', 'ceph-radosgw'])
assert 'ceph' in pkgs
assert 'ceph-common' in pkgs
assert 'ceph-radosgw' in pkgs
assert 'ceph-osd' not in pkgs
def test_invalid(self):
pkgs = map_components(['not-provided', 'ceph-mon'])
assert 'not-provided' not in pkgs
assert 'ceph' in pkgs
|
<commit_before>from ceph_deploy.hosts import suse
class TestSuseInit(object):
def setup(self):
self.host = suse
def test_choose_init_default(self):
self.host.release = None
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_11(self):
self.host.release = '11'
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_12(self):
self.host.release = '12'
init_type = self.host.choose_init()
assert init_type == "systemd"
def test_choose_init_openSUSE_13_1(self):
self.host.release = '13.1'
init_type = self.host.choose_init()
assert init_type == "systemd"
<commit_msg>Add tests for component to SUSE package mapping
Signed-off-by: David Disseldorp <589a549dc9f982d9f46aeeb82a09ab6d87ccf1d8@suse.de><commit_after>
|
from ceph_deploy.hosts import suse
from ceph_deploy.hosts.suse.install import map_components
class TestSuseInit(object):
def setup(self):
self.host = suse
def test_choose_init_default(self):
self.host.release = None
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_11(self):
self.host.release = '11'
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_12(self):
self.host.release = '12'
init_type = self.host.choose_init()
assert init_type == "systemd"
def test_choose_init_openSUSE_13_1(self):
self.host.release = '13.1'
init_type = self.host.choose_init()
assert init_type == "systemd"
class TestSuseMapComponents(object):
def test_valid(self):
pkgs = map_components(['ceph-osd', 'ceph-common', 'ceph-radosgw'])
assert 'ceph' in pkgs
assert 'ceph-common' in pkgs
assert 'ceph-radosgw' in pkgs
assert 'ceph-osd' not in pkgs
def test_invalid(self):
pkgs = map_components(['not-provided', 'ceph-mon'])
assert 'not-provided' not in pkgs
assert 'ceph' in pkgs
|
from ceph_deploy.hosts import suse
class TestSuseInit(object):
def setup(self):
self.host = suse
def test_choose_init_default(self):
self.host.release = None
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_11(self):
self.host.release = '11'
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_12(self):
self.host.release = '12'
init_type = self.host.choose_init()
assert init_type == "systemd"
def test_choose_init_openSUSE_13_1(self):
self.host.release = '13.1'
init_type = self.host.choose_init()
assert init_type == "systemd"
Add tests for component to SUSE package mapping
Signed-off-by: David Disseldorp <589a549dc9f982d9f46aeeb82a09ab6d87ccf1d8@suse.de>from ceph_deploy.hosts import suse
from ceph_deploy.hosts.suse.install import map_components
class TestSuseInit(object):
def setup(self):
self.host = suse
def test_choose_init_default(self):
self.host.release = None
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_11(self):
self.host.release = '11'
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_12(self):
self.host.release = '12'
init_type = self.host.choose_init()
assert init_type == "systemd"
def test_choose_init_openSUSE_13_1(self):
self.host.release = '13.1'
init_type = self.host.choose_init()
assert init_type == "systemd"
class TestSuseMapComponents(object):
def test_valid(self):
pkgs = map_components(['ceph-osd', 'ceph-common', 'ceph-radosgw'])
assert 'ceph' in pkgs
assert 'ceph-common' in pkgs
assert 'ceph-radosgw' in pkgs
assert 'ceph-osd' not in pkgs
def test_invalid(self):
pkgs = map_components(['not-provided', 'ceph-mon'])
assert 'not-provided' not in pkgs
assert 'ceph' in pkgs
|
<commit_before>from ceph_deploy.hosts import suse
class TestSuseInit(object):
def setup(self):
self.host = suse
def test_choose_init_default(self):
self.host.release = None
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_11(self):
self.host.release = '11'
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_12(self):
self.host.release = '12'
init_type = self.host.choose_init()
assert init_type == "systemd"
def test_choose_init_openSUSE_13_1(self):
self.host.release = '13.1'
init_type = self.host.choose_init()
assert init_type == "systemd"
<commit_msg>Add tests for component to SUSE package mapping
Signed-off-by: David Disseldorp <589a549dc9f982d9f46aeeb82a09ab6d87ccf1d8@suse.de><commit_after>from ceph_deploy.hosts import suse
from ceph_deploy.hosts.suse.install import map_components
class TestSuseInit(object):
def setup(self):
self.host = suse
def test_choose_init_default(self):
self.host.release = None
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_11(self):
self.host.release = '11'
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_12(self):
self.host.release = '12'
init_type = self.host.choose_init()
assert init_type == "systemd"
def test_choose_init_openSUSE_13_1(self):
self.host.release = '13.1'
init_type = self.host.choose_init()
assert init_type == "systemd"
class TestSuseMapComponents(object):
def test_valid(self):
pkgs = map_components(['ceph-osd', 'ceph-common', 'ceph-radosgw'])
assert 'ceph' in pkgs
assert 'ceph-common' in pkgs
assert 'ceph-radosgw' in pkgs
assert 'ceph-osd' not in pkgs
def test_invalid(self):
pkgs = map_components(['not-provided', 'ceph-mon'])
assert 'not-provided' not in pkgs
assert 'ceph' in pkgs
|
d8f33c46b6462788ef6e38dc5aefcdda2144eb66
|
camoco/__init__.py
|
camoco/__init__.py
|
"""
Camoco Library - CoAnalysis of Molecular Components
CacheMoneyCorn
"""
__license__ = """
Creative Commons Non-Commercial 4.0 Generic
http://creativecommons.org/licenses/by-nc/4.0/
"""
import pyximport; pyximport.install()
from camoco.Camoco import Camoco
from camoco.Expr import Expr
from camoco.COB import COB
from camoco.RefGen import RefGen
from camoco.Ontology import Ontology,Term
from camoco.HapMap import HapMap
from camoco.Locus import Locus
from camoco.Tools import available_datasets,del_dataset,\
mv_dataset,redescribe_dataset
from camoco.Config import cf
from camoco.GEO import Family
|
"""
Camoco Library - CoAnalysis of Molecular Components
CacheMoneyCorn
"""
__license__ = """
Creative Commons Non-Commercial 4.0 Generic
http://creativecommons.org/licenses/by-nc/4.0/
"""
import pyximport; pyximport.install()
from camoco.Camoco import Camoco
Camoco.create()
from camoco.Expr import Expr
from camoco.COB import COB
from camoco.RefGen import RefGen
from camoco.Ontology import Ontology,Term
from camoco.HapMap import HapMap
from camoco.Locus import Locus
from camoco.Tools import available_datasets,del_dataset,\
mv_dataset,redescribe_dataset
from camoco.Config import cf
from camoco.GEO import Family
|
Fix initial create for camoco class
|
Fix initial create for camoco class
|
Python
|
mit
|
schae234/Camoco,schae234/Camoco
|
"""
Camoco Library - CoAnalysis of Molecular Components
CacheMoneyCorn
"""
__license__ = """
Creative Commons Non-Commercial 4.0 Generic
http://creativecommons.org/licenses/by-nc/4.0/
"""
import pyximport; pyximport.install()
from camoco.Camoco import Camoco
from camoco.Expr import Expr
from camoco.COB import COB
from camoco.RefGen import RefGen
from camoco.Ontology import Ontology,Term
from camoco.HapMap import HapMap
from camoco.Locus import Locus
from camoco.Tools import available_datasets,del_dataset,\
mv_dataset,redescribe_dataset
from camoco.Config import cf
from camoco.GEO import Family
Fix initial create for camoco class
|
"""
Camoco Library - CoAnalysis of Molecular Components
CacheMoneyCorn
"""
__license__ = """
Creative Commons Non-Commercial 4.0 Generic
http://creativecommons.org/licenses/by-nc/4.0/
"""
import pyximport; pyximport.install()
from camoco.Camoco import Camoco
Camoco.create()
from camoco.Expr import Expr
from camoco.COB import COB
from camoco.RefGen import RefGen
from camoco.Ontology import Ontology,Term
from camoco.HapMap import HapMap
from camoco.Locus import Locus
from camoco.Tools import available_datasets,del_dataset,\
mv_dataset,redescribe_dataset
from camoco.Config import cf
from camoco.GEO import Family
|
<commit_before>"""
Camoco Library - CoAnalysis of Molecular Components
CacheMoneyCorn
"""
__license__ = """
Creative Commons Non-Commercial 4.0 Generic
http://creativecommons.org/licenses/by-nc/4.0/
"""
import pyximport; pyximport.install()
from camoco.Camoco import Camoco
from camoco.Expr import Expr
from camoco.COB import COB
from camoco.RefGen import RefGen
from camoco.Ontology import Ontology,Term
from camoco.HapMap import HapMap
from camoco.Locus import Locus
from camoco.Tools import available_datasets,del_dataset,\
mv_dataset,redescribe_dataset
from camoco.Config import cf
from camoco.GEO import Family
<commit_msg>Fix initial create for camoco class<commit_after>
|
"""
Camoco Library - CoAnalysis of Molecular Components
CacheMoneyCorn
"""
__license__ = """
Creative Commons Non-Commercial 4.0 Generic
http://creativecommons.org/licenses/by-nc/4.0/
"""
import pyximport; pyximport.install()
from camoco.Camoco import Camoco
Camoco.create()
from camoco.Expr import Expr
from camoco.COB import COB
from camoco.RefGen import RefGen
from camoco.Ontology import Ontology,Term
from camoco.HapMap import HapMap
from camoco.Locus import Locus
from camoco.Tools import available_datasets,del_dataset,\
mv_dataset,redescribe_dataset
from camoco.Config import cf
from camoco.GEO import Family
|
"""
Camoco Library - CoAnalysis of Molecular Components
CacheMoneyCorn
"""
__license__ = """
Creative Commons Non-Commercial 4.0 Generic
http://creativecommons.org/licenses/by-nc/4.0/
"""
import pyximport; pyximport.install()
from camoco.Camoco import Camoco
from camoco.Expr import Expr
from camoco.COB import COB
from camoco.RefGen import RefGen
from camoco.Ontology import Ontology,Term
from camoco.HapMap import HapMap
from camoco.Locus import Locus
from camoco.Tools import available_datasets,del_dataset,\
mv_dataset,redescribe_dataset
from camoco.Config import cf
from camoco.GEO import Family
Fix initial create for camoco class"""
Camoco Library - CoAnalysis of Molecular Components
CacheMoneyCorn
"""
__license__ = """
Creative Commons Non-Commercial 4.0 Generic
http://creativecommons.org/licenses/by-nc/4.0/
"""
import pyximport; pyximport.install()
from camoco.Camoco import Camoco
Camoco.create()
from camoco.Expr import Expr
from camoco.COB import COB
from camoco.RefGen import RefGen
from camoco.Ontology import Ontology,Term
from camoco.HapMap import HapMap
from camoco.Locus import Locus
from camoco.Tools import available_datasets,del_dataset,\
mv_dataset,redescribe_dataset
from camoco.Config import cf
from camoco.GEO import Family
|
<commit_before>"""
Camoco Library - CoAnalysis of Molecular Components
CacheMoneyCorn
"""
__license__ = """
Creative Commons Non-Commercial 4.0 Generic
http://creativecommons.org/licenses/by-nc/4.0/
"""
import pyximport; pyximport.install()
from camoco.Camoco import Camoco
from camoco.Expr import Expr
from camoco.COB import COB
from camoco.RefGen import RefGen
from camoco.Ontology import Ontology,Term
from camoco.HapMap import HapMap
from camoco.Locus import Locus
from camoco.Tools import available_datasets,del_dataset,\
mv_dataset,redescribe_dataset
from camoco.Config import cf
from camoco.GEO import Family
<commit_msg>Fix initial create for camoco class<commit_after>"""
Camoco Library - CoAnalysis of Molecular Components
CacheMoneyCorn
"""
__license__ = """
Creative Commons Non-Commercial 4.0 Generic
http://creativecommons.org/licenses/by-nc/4.0/
"""
import pyximport; pyximport.install()
from camoco.Camoco import Camoco
Camoco.create()
from camoco.Expr import Expr
from camoco.COB import COB
from camoco.RefGen import RefGen
from camoco.Ontology import Ontology,Term
from camoco.HapMap import HapMap
from camoco.Locus import Locus
from camoco.Tools import available_datasets,del_dataset,\
mv_dataset,redescribe_dataset
from camoco.Config import cf
from camoco.GEO import Family
|
a1bcb99691f5a0238f6a34a5579df3e89e8d6823
|
child_sync_gp/model/project_compassion.py
|
child_sync_gp/model/project_compassion.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
if not isinstance(ids, list):
ids = [ids]
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
|
Fix bug in write project.
|
Fix bug in write project.
|
Python
|
agpl-3.0
|
CompassionCH/compassion-switzerland,ndtran/compassion-switzerland,MickSandoz/compassion-switzerland,eicher31/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,MickSandoz/compassion-switzerland,ecino/compassion-switzerland,ndtran/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
Fix bug in write project.
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
if not isinstance(ids, list):
ids = [ids]
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
<commit_msg>Fix bug in write project.<commit_after>
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
if not isinstance(ids, list):
ids = [ids]
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
Fix bug in write project.# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
if not isinstance(ids, list):
ids = [ids]
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
<commit_msg>Fix bug in write project.<commit_after># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
if not isinstance(ids, list):
ids = [ids]
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
|
4ad92bfcbfd2145b008cd18e934ebd6dc3be53e9
|
pytest/test_prefork.py
|
pytest/test_prefork.py
|
from tectonic import prefork
def test_WorkerMetadata():
"""
This is a simple test, as WorkerMetadata only holds data
"""
pid = 'pid'
health_check_read = 100
last_seen = 'now'
metadata = prefork.WorkerMetadata(pid=pid,
health_check_read=health_check_read,
last_seen=last_seen)
assert metadata.pid == pid
assert metadata.health_check_read == health_check_read
assert metadata.last_seen == last_seen
|
import os
import shutil
import os.path
import tempfile
from tectonic import prefork
def test_WorkerMetadata():
"""
This is a simple test, as WorkerMetadata only holds data
"""
pid = 'pid'
health_check_read = 100
last_seen = 'now'
metadata = prefork.WorkerMetadata(pid=pid,
health_check_read=health_check_read,
last_seen=last_seen)
assert metadata.pid == pid
assert metadata.health_check_read == health_check_read
assert metadata.last_seen == last_seen
def test_WriteAndFlushFile():
"""
Make sure we can write to and read from a file.
"""
try:
# Create a directory. Make sure to remove it at the end.
dirname = tempfile.mkdtemp()
filename = 'filename.txt'
text1 = 'The quick brown fox\n'
text2 = 'The lazy dog'
full_path = os.path.join(dirname, filename)
# Open a file and write using both changed methods
f = prefork.WriteAndFlushFile(full_path, 'w')
f.write(text1)
f.writelines(text2)
f.close()
# Read everything back
f = open(full_path, 'r')
data = f.readlines()
f.close()
assert data[0] == text1
assert data[1] == text2
finally:
# Always remove it
shutil.rmtree(dirname)
|
Add a test for the file object
|
Add a test for the file object
|
Python
|
bsd-3-clause
|
markrwilliams/tectonic
|
from tectonic import prefork
def test_WorkerMetadata():
"""
This is a simple test, as WorkerMetadata only holds data
"""
pid = 'pid'
health_check_read = 100
last_seen = 'now'
metadata = prefork.WorkerMetadata(pid=pid,
health_check_read=health_check_read,
last_seen=last_seen)
assert metadata.pid == pid
assert metadata.health_check_read == health_check_read
assert metadata.last_seen == last_seen
Add a test for the file object
|
import os
import shutil
import os.path
import tempfile
from tectonic import prefork
def test_WorkerMetadata():
"""
This is a simple test, as WorkerMetadata only holds data
"""
pid = 'pid'
health_check_read = 100
last_seen = 'now'
metadata = prefork.WorkerMetadata(pid=pid,
health_check_read=health_check_read,
last_seen=last_seen)
assert metadata.pid == pid
assert metadata.health_check_read == health_check_read
assert metadata.last_seen == last_seen
def test_WriteAndFlushFile():
"""
Make sure we can write to and read from a file.
"""
try:
# Create a directory. Make sure to remove it at the end.
dirname = tempfile.mkdtemp()
filename = 'filename.txt'
text1 = 'The quick brown fox\n'
text2 = 'The lazy dog'
full_path = os.path.join(dirname, filename)
# Open a file and write using both changed methods
f = prefork.WriteAndFlushFile(full_path, 'w')
f.write(text1)
f.writelines(text2)
f.close()
# Read everything back
f = open(full_path, 'r')
data = f.readlines()
f.close()
assert data[0] == text1
assert data[1] == text2
finally:
# Always remove it
shutil.rmtree(dirname)
|
<commit_before>from tectonic import prefork
def test_WorkerMetadata():
"""
This is a simple test, as WorkerMetadata only holds data
"""
pid = 'pid'
health_check_read = 100
last_seen = 'now'
metadata = prefork.WorkerMetadata(pid=pid,
health_check_read=health_check_read,
last_seen=last_seen)
assert metadata.pid == pid
assert metadata.health_check_read == health_check_read
assert metadata.last_seen == last_seen
<commit_msg>Add a test for the file object<commit_after>
|
import os
import shutil
import os.path
import tempfile
from tectonic import prefork
def test_WorkerMetadata():
"""
This is a simple test, as WorkerMetadata only holds data
"""
pid = 'pid'
health_check_read = 100
last_seen = 'now'
metadata = prefork.WorkerMetadata(pid=pid,
health_check_read=health_check_read,
last_seen=last_seen)
assert metadata.pid == pid
assert metadata.health_check_read == health_check_read
assert metadata.last_seen == last_seen
def test_WriteAndFlushFile():
"""
Make sure we can write to and read from a file.
"""
try:
# Create a directory. Make sure to remove it at the end.
dirname = tempfile.mkdtemp()
filename = 'filename.txt'
text1 = 'The quick brown fox\n'
text2 = 'The lazy dog'
full_path = os.path.join(dirname, filename)
# Open a file and write using both changed methods
f = prefork.WriteAndFlushFile(full_path, 'w')
f.write(text1)
f.writelines(text2)
f.close()
# Read everything back
f = open(full_path, 'r')
data = f.readlines()
f.close()
assert data[0] == text1
assert data[1] == text2
finally:
# Always remove it
shutil.rmtree(dirname)
|
from tectonic import prefork
def test_WorkerMetadata():
"""
This is a simple test, as WorkerMetadata only holds data
"""
pid = 'pid'
health_check_read = 100
last_seen = 'now'
metadata = prefork.WorkerMetadata(pid=pid,
health_check_read=health_check_read,
last_seen=last_seen)
assert metadata.pid == pid
assert metadata.health_check_read == health_check_read
assert metadata.last_seen == last_seen
Add a test for the file objectimport os
import shutil
import os.path
import tempfile
from tectonic import prefork
def test_WorkerMetadata():
"""
This is a simple test, as WorkerMetadata only holds data
"""
pid = 'pid'
health_check_read = 100
last_seen = 'now'
metadata = prefork.WorkerMetadata(pid=pid,
health_check_read=health_check_read,
last_seen=last_seen)
assert metadata.pid == pid
assert metadata.health_check_read == health_check_read
assert metadata.last_seen == last_seen
def test_WriteAndFlushFile():
"""
Make sure we can write to and read from a file.
"""
try:
# Create a directory. Make sure to remove it at the end.
dirname = tempfile.mkdtemp()
filename = 'filename.txt'
text1 = 'The quick brown fox\n'
text2 = 'The lazy dog'
full_path = os.path.join(dirname, filename)
# Open a file and write using both changed methods
f = prefork.WriteAndFlushFile(full_path, 'w')
f.write(text1)
f.writelines(text2)
f.close()
# Read everything back
f = open(full_path, 'r')
data = f.readlines()
f.close()
assert data[0] == text1
assert data[1] == text2
finally:
# Always remove it
shutil.rmtree(dirname)
|
<commit_before>from tectonic import prefork
def test_WorkerMetadata():
"""
This is a simple test, as WorkerMetadata only holds data
"""
pid = 'pid'
health_check_read = 100
last_seen = 'now'
metadata = prefork.WorkerMetadata(pid=pid,
health_check_read=health_check_read,
last_seen=last_seen)
assert metadata.pid == pid
assert metadata.health_check_read == health_check_read
assert metadata.last_seen == last_seen
<commit_msg>Add a test for the file object<commit_after>import os
import shutil
import os.path
import tempfile
from tectonic import prefork
def test_WorkerMetadata():
"""
This is a simple test, as WorkerMetadata only holds data
"""
pid = 'pid'
health_check_read = 100
last_seen = 'now'
metadata = prefork.WorkerMetadata(pid=pid,
health_check_read=health_check_read,
last_seen=last_seen)
assert metadata.pid == pid
assert metadata.health_check_read == health_check_read
assert metadata.last_seen == last_seen
def test_WriteAndFlushFile():
"""
Make sure we can write to and read from a file.
"""
try:
# Create a directory. Make sure to remove it at the end.
dirname = tempfile.mkdtemp()
filename = 'filename.txt'
text1 = 'The quick brown fox\n'
text2 = 'The lazy dog'
full_path = os.path.join(dirname, filename)
# Open a file and write using both changed methods
f = prefork.WriteAndFlushFile(full_path, 'w')
f.write(text1)
f.writelines(text2)
f.close()
# Read everything back
f = open(full_path, 'r')
data = f.readlines()
f.close()
assert data[0] == text1
assert data[1] == text2
finally:
# Always remove it
shutil.rmtree(dirname)
|
cb8bf92ab2f71767de8b471992d79131e4dde9a1
|
quicksort/quicksort.py
|
quicksort/quicksort.py
|
def sort(arr):
return arr;
if __name__ == '__main__':
unsorted = list(reversed(range(1000)));
print sort(unsorted);
|
def sort(arr, length):
if length == 1:
return
return (arr, length)
if __name__ == '__main__':
unsorted = list(reversed(range(1000)))
initial_len = len(unsorted)
print sort(unsorted, initial_len)
|
Add length parameter to sort and remove semicolons
|
Add length parameter to sort and remove semicolons
The sort function requires a length parameter, so the function
declaration and the initial call were modified to reflect that.
A length of just 1 element represents the base case of the
recursion, so the function simply returns in this case.
Also I forgot how python works for a minute there and included a
bunch of unecessary semicolons. Those are gone now
|
Python
|
mit
|
timpel/stanford-algs,timpel/stanford-algs
|
def sort(arr):
return arr;
if __name__ == '__main__':
unsorted = list(reversed(range(1000)));
print sort(unsorted);Add length parameter to sort and remove semicolons
The sort function requires a length parameter, so the function
declaration and the initial call were modified to reflect that.
A length of just 1 element represents the base case of the
recursion, so the function simply returns in this case.
Also I forgot how python works for a minute there and included a
bunch of unecessary semicolons. Those are gone now
|
def sort(arr, length):
if length == 1:
return
return (arr, length)
if __name__ == '__main__':
unsorted = list(reversed(range(1000)))
initial_len = len(unsorted)
print sort(unsorted, initial_len)
|
<commit_before>def sort(arr):
return arr;
if __name__ == '__main__':
unsorted = list(reversed(range(1000)));
print sort(unsorted);<commit_msg>Add length parameter to sort and remove semicolons
The sort function requires a length parameter, so the function
declaration and the initial call were modified to reflect that.
A length of just 1 element represents the base case of the
recursion, so the function simply returns in this case.
Also I forgot how python works for a minute there and included a
bunch of unecessary semicolons. Those are gone now<commit_after>
|
def sort(arr, length):
if length == 1:
return
return (arr, length)
if __name__ == '__main__':
unsorted = list(reversed(range(1000)))
initial_len = len(unsorted)
print sort(unsorted, initial_len)
|
def sort(arr):
return arr;
if __name__ == '__main__':
unsorted = list(reversed(range(1000)));
print sort(unsorted);Add length parameter to sort and remove semicolons
The sort function requires a length parameter, so the function
declaration and the initial call were modified to reflect that.
A length of just 1 element represents the base case of the
recursion, so the function simply returns in this case.
Also I forgot how python works for a minute there and included a
bunch of unecessary semicolons. Those are gone nowdef sort(arr, length):
if length == 1:
return
return (arr, length)
if __name__ == '__main__':
unsorted = list(reversed(range(1000)))
initial_len = len(unsorted)
print sort(unsorted, initial_len)
|
<commit_before>def sort(arr):
return arr;
if __name__ == '__main__':
unsorted = list(reversed(range(1000)));
print sort(unsorted);<commit_msg>Add length parameter to sort and remove semicolons
The sort function requires a length parameter, so the function
declaration and the initial call were modified to reflect that.
A length of just 1 element represents the base case of the
recursion, so the function simply returns in this case.
Also I forgot how python works for a minute there and included a
bunch of unecessary semicolons. Those are gone now<commit_after>def sort(arr, length):
if length == 1:
return
return (arr, length)
if __name__ == '__main__':
unsorted = list(reversed(range(1000)))
initial_len = len(unsorted)
print sort(unsorted, initial_len)
|
9256844b08edaff1b9755a6ffc25acc0df76934d
|
MoodJournal/entries/serializers.py
|
MoodJournal/entries/serializers.py
|
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(
view_name='category-detail'
)
# TODO Should I just either not include it, or go with the user name?
user = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = UserDefinedCategory
fields = ('url', 'user', 'category', 'pk',)
class EntryInstanceSerializer(serializers.ModelSerializer):
user = serializers.ReadOnlyField(source='user.username')
class Meta:
model = EntryInstance
fields = ('user', 'category', 'date', 'entry', 'quality_rating', 'pk',)
|
from rest_framework import serializers
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.ModelSerializer):
class Meta:
model = UserDefinedCategory
fields = ('user', 'category', 'pk',)
class EntryInstanceSerializer(serializers.ModelSerializer):
user = serializers.ReadOnlyField(source='user.username')
class Meta:
model = EntryInstance
fields = ('user', 'category', 'date', 'entry', 'quality_rating', 'pk',)
|
Revert "beginning hyperlink model serialization"
|
Revert "beginning hyperlink model serialization"
This reverts commit 6d41c54397512da69604f7e730757f4aff96374f.
|
Python
|
mit
|
swpease/MoodJournal,swpease/MoodJournal,swpease/MoodJournal
|
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(
view_name='category-detail'
)
# TODO Should I just either not include it, or go with the user name?
user = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = UserDefinedCategory
fields = ('url', 'user', 'category', 'pk',)
class EntryInstanceSerializer(serializers.ModelSerializer):
user = serializers.ReadOnlyField(source='user.username')
class Meta:
model = EntryInstance
fields = ('user', 'category', 'date', 'entry', 'quality_rating', 'pk',)
Revert "beginning hyperlink model serialization"
This reverts commit 6d41c54397512da69604f7e730757f4aff96374f.
|
from rest_framework import serializers
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.ModelSerializer):
class Meta:
model = UserDefinedCategory
fields = ('user', 'category', 'pk',)
class EntryInstanceSerializer(serializers.ModelSerializer):
user = serializers.ReadOnlyField(source='user.username')
class Meta:
model = EntryInstance
fields = ('user', 'category', 'date', 'entry', 'quality_rating', 'pk',)
|
<commit_before>from django.contrib.auth.models import User
from rest_framework import serializers
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(
view_name='category-detail'
)
# TODO Should I just either not include it, or go with the user name?
user = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = UserDefinedCategory
fields = ('url', 'user', 'category', 'pk',)
class EntryInstanceSerializer(serializers.ModelSerializer):
user = serializers.ReadOnlyField(source='user.username')
class Meta:
model = EntryInstance
fields = ('user', 'category', 'date', 'entry', 'quality_rating', 'pk',)
<commit_msg>Revert "beginning hyperlink model serialization"
This reverts commit 6d41c54397512da69604f7e730757f4aff96374f.<commit_after>
|
from rest_framework import serializers
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.ModelSerializer):
class Meta:
model = UserDefinedCategory
fields = ('user', 'category', 'pk',)
class EntryInstanceSerializer(serializers.ModelSerializer):
user = serializers.ReadOnlyField(source='user.username')
class Meta:
model = EntryInstance
fields = ('user', 'category', 'date', 'entry', 'quality_rating', 'pk',)
|
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(
view_name='category-detail'
)
# TODO Should I just either not include it, or go with the user name?
user = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = UserDefinedCategory
fields = ('url', 'user', 'category', 'pk',)
class EntryInstanceSerializer(serializers.ModelSerializer):
user = serializers.ReadOnlyField(source='user.username')
class Meta:
model = EntryInstance
fields = ('user', 'category', 'date', 'entry', 'quality_rating', 'pk',)
Revert "beginning hyperlink model serialization"
This reverts commit 6d41c54397512da69604f7e730757f4aff96374f.from rest_framework import serializers
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.ModelSerializer):
class Meta:
model = UserDefinedCategory
fields = ('user', 'category', 'pk',)
class EntryInstanceSerializer(serializers.ModelSerializer):
user = serializers.ReadOnlyField(source='user.username')
class Meta:
model = EntryInstance
fields = ('user', 'category', 'date', 'entry', 'quality_rating', 'pk',)
|
<commit_before>from django.contrib.auth.models import User
from rest_framework import serializers
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(
view_name='category-detail'
)
# TODO Should I just either not include it, or go with the user name?
user = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = UserDefinedCategory
fields = ('url', 'user', 'category', 'pk',)
class EntryInstanceSerializer(serializers.ModelSerializer):
user = serializers.ReadOnlyField(source='user.username')
class Meta:
model = EntryInstance
fields = ('user', 'category', 'date', 'entry', 'quality_rating', 'pk',)
<commit_msg>Revert "beginning hyperlink model serialization"
This reverts commit 6d41c54397512da69604f7e730757f4aff96374f.<commit_after>from rest_framework import serializers
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.ModelSerializer):
class Meta:
model = UserDefinedCategory
fields = ('user', 'category', 'pk',)
class EntryInstanceSerializer(serializers.ModelSerializer):
user = serializers.ReadOnlyField(source='user.username')
class Meta:
model = EntryInstance
fields = ('user', 'category', 'date', 'entry', 'quality_rating', 'pk',)
|
5a12f027079d109228456c6f3e4912317721246a
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='cyrtranslit',
packages=['cyrtranslit'],
version='0.4',
description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.',
author='Open Data Kosovo',
author_email='dev@opendatakosovo.org',
url='https://github.com/opendatakosovo/cyrillic-transliteration',
download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz',
license='MIT',
long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.',
keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'],
)
|
from distutils.core import setup
setup(
name='cyrtranslit',
packages=['cyrtranslit'],
version='0.4',
description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.',
author='Open Data Kosovo',
author_email='dev@opendatakosovo.org',
url='https://github.com/opendatakosovo/cyrillic-transliteration',
download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz',
license='MIT',
long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.',
keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'],
)
|
Declare that cyrtranslit supports Python 3.7
|
Declare that cyrtranslit supports Python 3.7
|
Python
|
mit
|
opendatakosovo/cyrillic-transliteration
|
from distutils.core import setup
setup(
name='cyrtranslit',
packages=['cyrtranslit'],
version='0.4',
description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.',
author='Open Data Kosovo',
author_email='dev@opendatakosovo.org',
url='https://github.com/opendatakosovo/cyrillic-transliteration',
download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz',
license='MIT',
long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.',
keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'],
)
Declare that cyrtranslit supports Python 3.7
|
from distutils.core import setup
setup(
name='cyrtranslit',
packages=['cyrtranslit'],
version='0.4',
description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.',
author='Open Data Kosovo',
author_email='dev@opendatakosovo.org',
url='https://github.com/opendatakosovo/cyrillic-transliteration',
download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz',
license='MIT',
long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.',
keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'],
)
|
<commit_before>from distutils.core import setup
setup(
name='cyrtranslit',
packages=['cyrtranslit'],
version='0.4',
description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.',
author='Open Data Kosovo',
author_email='dev@opendatakosovo.org',
url='https://github.com/opendatakosovo/cyrillic-transliteration',
download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz',
license='MIT',
long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.',
keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'],
)
<commit_msg>Declare that cyrtranslit supports Python 3.7<commit_after>
|
from distutils.core import setup
setup(
name='cyrtranslit',
packages=['cyrtranslit'],
version='0.4',
description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.',
author='Open Data Kosovo',
author_email='dev@opendatakosovo.org',
url='https://github.com/opendatakosovo/cyrillic-transliteration',
download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz',
license='MIT',
long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.',
keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'],
)
|
from distutils.core import setup
setup(
name='cyrtranslit',
packages=['cyrtranslit'],
version='0.4',
description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.',
author='Open Data Kosovo',
author_email='dev@opendatakosovo.org',
url='https://github.com/opendatakosovo/cyrillic-transliteration',
download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz',
license='MIT',
long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.',
keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'],
)
Declare that cyrtranslit supports Python 3.7from distutils.core import setup
setup(
name='cyrtranslit',
packages=['cyrtranslit'],
version='0.4',
description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.',
author='Open Data Kosovo',
author_email='dev@opendatakosovo.org',
url='https://github.com/opendatakosovo/cyrillic-transliteration',
download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz',
license='MIT',
long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.',
keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'],
)
|
<commit_before>from distutils.core import setup
setup(
name='cyrtranslit',
packages=['cyrtranslit'],
version='0.4',
description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.',
author='Open Data Kosovo',
author_email='dev@opendatakosovo.org',
url='https://github.com/opendatakosovo/cyrillic-transliteration',
download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz',
license='MIT',
long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.',
keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'],
)
<commit_msg>Declare that cyrtranslit supports Python 3.7<commit_after>from distutils.core import setup
setup(
name='cyrtranslit',
packages=['cyrtranslit'],
version='0.4',
description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.',
author='Open Data Kosovo',
author_email='dev@opendatakosovo.org',
url='https://github.com/opendatakosovo/cyrillic-transliteration',
download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz',
license='MIT',
long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.',
keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'],
)
|
26730c073e183249a8eb7c0d0333fdd32e307e4a
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
deps = [
'ijson==2.2',
'mozci==0.15.1',
'MozillaPulse==1.2.1',
'requests==2.7.0', # Maximum version taskcluster will work with
'taskcluster==0.0.27',
'treeherder-client==1.7.0',
]
setup(name='pulse-actions',
version='0.2.0',
description='A pulse listener that acts upon messages with mozci.',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
author='Alice Scarpa',
author_email='alicescarpa@gmail.com',
license='MPL 2.0',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=deps,
url='https://github.com/adusca/pulse_actions',
entry_points={
'console_scripts': [
'run-pulse-actions = pulse_actions.worker:main'
],
})
|
from setuptools import setup, find_packages
deps = [
'ijson==2.2',
'mozci==0.15.1',
'MozillaPulse==1.2.2',
'requests==2.7.0', # Maximum version taskcluster will work with
'taskcluster==0.0.27',
'treeherder-client==1.7.0',
]
setup(name='pulse-actions',
version='0.2.0',
description='A pulse listener that acts upon messages with mozci.',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
author='Alice Scarpa',
author_email='alicescarpa@gmail.com',
license='MPL 2.0',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=deps,
url='https://github.com/adusca/pulse_actions',
entry_points={
'console_scripts': [
'run-pulse-actions = pulse_actions.worker:main'
],
})
|
Upgrade to working MozillaPulse version
|
Upgrade to working MozillaPulse version
|
Python
|
mpl-2.0
|
armenzg/pulse_actions,adusca/pulse_actions,mozilla/pulse_actions
|
from setuptools import setup, find_packages
deps = [
'ijson==2.2',
'mozci==0.15.1',
'MozillaPulse==1.2.1',
'requests==2.7.0', # Maximum version taskcluster will work with
'taskcluster==0.0.27',
'treeherder-client==1.7.0',
]
setup(name='pulse-actions',
version='0.2.0',
description='A pulse listener that acts upon messages with mozci.',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
author='Alice Scarpa',
author_email='alicescarpa@gmail.com',
license='MPL 2.0',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=deps,
url='https://github.com/adusca/pulse_actions',
entry_points={
'console_scripts': [
'run-pulse-actions = pulse_actions.worker:main'
],
})
Upgrade to working MozillaPulse version
|
from setuptools import setup, find_packages
deps = [
'ijson==2.2',
'mozci==0.15.1',
'MozillaPulse==1.2.2',
'requests==2.7.0', # Maximum version taskcluster will work with
'taskcluster==0.0.27',
'treeherder-client==1.7.0',
]
setup(name='pulse-actions',
version='0.2.0',
description='A pulse listener that acts upon messages with mozci.',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
author='Alice Scarpa',
author_email='alicescarpa@gmail.com',
license='MPL 2.0',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=deps,
url='https://github.com/adusca/pulse_actions',
entry_points={
'console_scripts': [
'run-pulse-actions = pulse_actions.worker:main'
],
})
|
<commit_before>from setuptools import setup, find_packages
deps = [
'ijson==2.2',
'mozci==0.15.1',
'MozillaPulse==1.2.1',
'requests==2.7.0', # Maximum version taskcluster will work with
'taskcluster==0.0.27',
'treeherder-client==1.7.0',
]
setup(name='pulse-actions',
version='0.2.0',
description='A pulse listener that acts upon messages with mozci.',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
author='Alice Scarpa',
author_email='alicescarpa@gmail.com',
license='MPL 2.0',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=deps,
url='https://github.com/adusca/pulse_actions',
entry_points={
'console_scripts': [
'run-pulse-actions = pulse_actions.worker:main'
],
})
<commit_msg>Upgrade to working MozillaPulse version<commit_after>
|
from setuptools import setup, find_packages
deps = [
'ijson==2.2',
'mozci==0.15.1',
'MozillaPulse==1.2.2',
'requests==2.7.0', # Maximum version taskcluster will work with
'taskcluster==0.0.27',
'treeherder-client==1.7.0',
]
setup(name='pulse-actions',
version='0.2.0',
description='A pulse listener that acts upon messages with mozci.',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
author='Alice Scarpa',
author_email='alicescarpa@gmail.com',
license='MPL 2.0',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=deps,
url='https://github.com/adusca/pulse_actions',
entry_points={
'console_scripts': [
'run-pulse-actions = pulse_actions.worker:main'
],
})
|
from setuptools import setup, find_packages
deps = [
'ijson==2.2',
'mozci==0.15.1',
'MozillaPulse==1.2.1',
'requests==2.7.0', # Maximum version taskcluster will work with
'taskcluster==0.0.27',
'treeherder-client==1.7.0',
]
setup(name='pulse-actions',
version='0.2.0',
description='A pulse listener that acts upon messages with mozci.',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
author='Alice Scarpa',
author_email='alicescarpa@gmail.com',
license='MPL 2.0',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=deps,
url='https://github.com/adusca/pulse_actions',
entry_points={
'console_scripts': [
'run-pulse-actions = pulse_actions.worker:main'
],
})
Upgrade to working MozillaPulse versionfrom setuptools import setup, find_packages
deps = [
'ijson==2.2',
'mozci==0.15.1',
'MozillaPulse==1.2.2',
'requests==2.7.0', # Maximum version taskcluster will work with
'taskcluster==0.0.27',
'treeherder-client==1.7.0',
]
setup(name='pulse-actions',
version='0.2.0',
description='A pulse listener that acts upon messages with mozci.',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
author='Alice Scarpa',
author_email='alicescarpa@gmail.com',
license='MPL 2.0',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=deps,
url='https://github.com/adusca/pulse_actions',
entry_points={
'console_scripts': [
'run-pulse-actions = pulse_actions.worker:main'
],
})
|
<commit_before>from setuptools import setup, find_packages
deps = [
'ijson==2.2',
'mozci==0.15.1',
'MozillaPulse==1.2.1',
'requests==2.7.0', # Maximum version taskcluster will work with
'taskcluster==0.0.27',
'treeherder-client==1.7.0',
]
setup(name='pulse-actions',
version='0.2.0',
description='A pulse listener that acts upon messages with mozci.',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
author='Alice Scarpa',
author_email='alicescarpa@gmail.com',
license='MPL 2.0',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=deps,
url='https://github.com/adusca/pulse_actions',
entry_points={
'console_scripts': [
'run-pulse-actions = pulse_actions.worker:main'
],
})
<commit_msg>Upgrade to working MozillaPulse version<commit_after>from setuptools import setup, find_packages
deps = [
'ijson==2.2',
'mozci==0.15.1',
'MozillaPulse==1.2.2',
'requests==2.7.0', # Maximum version taskcluster will work with
'taskcluster==0.0.27',
'treeherder-client==1.7.0',
]
setup(name='pulse-actions',
version='0.2.0',
description='A pulse listener that acts upon messages with mozci.',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
author='Alice Scarpa',
author_email='alicescarpa@gmail.com',
license='MPL 2.0',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=deps,
url='https://github.com/adusca/pulse_actions',
entry_points={
'console_scripts': [
'run-pulse-actions = pulse_actions.worker:main'
],
})
|
45369df80923fdd42f7d5c079f6131c0ace43130
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
# Package requirements here
"Jinja2==2.9.5"
]
test_requirements = [
# Package test requirements here
]
setup(
name='HtmlTestRunner',
version='1',
description="A Test Runner in python, for Human Readable HTML Reports",
long_description=readme + '\n\n' + history,
author="Ordanis Sanchez Suero",
author_email='ordanisanchez@gmail.com',
url='https://github.com/oldani/HtmlTestRunner',
packages=[
'HtmlTestRunner',
],
package_dir={'HtmlTestRunner':
'HtmlTestRunner'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='HtmlTestRunner, TestRunner, Html Reports',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
# Package requirements here
"Jinja2==2.9.5"
]
test_requirements = [
# Package test requirements here
]
setup(
name='HtmlTestRunner',
version='1',
description="A Test Runner in python, for Human Readable HTML Reports",
long_description=readme + '\n\n' + history,
author="Ordanis Sanchez Suero",
author_email='ordanisanchez@gmail.com',
url='https://github.com/oldani/HtmlTestRunner',
packages=[
'HtmlTestRunner',
],
package_dir={'HtmlTestRunner':
'HtmlTestRunner'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='HtmlTestRunner TestRunner Html Reports',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
Update development status and keywords
|
Update development status and keywords
|
Python
|
mit
|
oldani/HtmlTestRunner,oldani/HtmlTestRunner
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
# Package requirements here
"Jinja2==2.9.5"
]
test_requirements = [
# Package test requirements here
]
setup(
name='HtmlTestRunner',
version='1',
description="A Test Runner in python, for Human Readable HTML Reports",
long_description=readme + '\n\n' + history,
author="Ordanis Sanchez Suero",
author_email='ordanisanchez@gmail.com',
url='https://github.com/oldani/HtmlTestRunner',
packages=[
'HtmlTestRunner',
],
package_dir={'HtmlTestRunner':
'HtmlTestRunner'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='HtmlTestRunner, TestRunner, Html Reports',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
Update development status and keywords
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
# Package requirements here
"Jinja2==2.9.5"
]
test_requirements = [
# Package test requirements here
]
setup(
name='HtmlTestRunner',
version='1',
description="A Test Runner in python, for Human Readable HTML Reports",
long_description=readme + '\n\n' + history,
author="Ordanis Sanchez Suero",
author_email='ordanisanchez@gmail.com',
url='https://github.com/oldani/HtmlTestRunner',
packages=[
'HtmlTestRunner',
],
package_dir={'HtmlTestRunner':
'HtmlTestRunner'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='HtmlTestRunner TestRunner Html Reports',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
# Package requirements here
"Jinja2==2.9.5"
]
test_requirements = [
# Package test requirements here
]
setup(
name='HtmlTestRunner',
version='1',
description="A Test Runner in python, for Human Readable HTML Reports",
long_description=readme + '\n\n' + history,
author="Ordanis Sanchez Suero",
author_email='ordanisanchez@gmail.com',
url='https://github.com/oldani/HtmlTestRunner',
packages=[
'HtmlTestRunner',
],
package_dir={'HtmlTestRunner':
'HtmlTestRunner'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='HtmlTestRunner, TestRunner, Html Reports',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
<commit_msg>Update development status and keywords<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
# Package requirements here
"Jinja2==2.9.5"
]
test_requirements = [
# Package test requirements here
]
setup(
name='HtmlTestRunner',
version='1',
description="A Test Runner in python, for Human Readable HTML Reports",
long_description=readme + '\n\n' + history,
author="Ordanis Sanchez Suero",
author_email='ordanisanchez@gmail.com',
url='https://github.com/oldani/HtmlTestRunner',
packages=[
'HtmlTestRunner',
],
package_dir={'HtmlTestRunner':
'HtmlTestRunner'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='HtmlTestRunner TestRunner Html Reports',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
# Package requirements here
"Jinja2==2.9.5"
]
test_requirements = [
# Package test requirements here
]
setup(
name='HtmlTestRunner',
version='1',
description="A Test Runner in python, for Human Readable HTML Reports",
long_description=readme + '\n\n' + history,
author="Ordanis Sanchez Suero",
author_email='ordanisanchez@gmail.com',
url='https://github.com/oldani/HtmlTestRunner',
packages=[
'HtmlTestRunner',
],
package_dir={'HtmlTestRunner':
'HtmlTestRunner'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='HtmlTestRunner, TestRunner, Html Reports',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
Update development status and keywords#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
# Package requirements here
"Jinja2==2.9.5"
]
test_requirements = [
# Package test requirements here
]
setup(
name='HtmlTestRunner',
version='1',
description="A Test Runner in python, for Human Readable HTML Reports",
long_description=readme + '\n\n' + history,
author="Ordanis Sanchez Suero",
author_email='ordanisanchez@gmail.com',
url='https://github.com/oldani/HtmlTestRunner',
packages=[
'HtmlTestRunner',
],
package_dir={'HtmlTestRunner':
'HtmlTestRunner'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='HtmlTestRunner TestRunner Html Reports',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
# Package requirements here
"Jinja2==2.9.5"
]
test_requirements = [
# Package test requirements here
]
setup(
name='HtmlTestRunner',
version='1',
description="A Test Runner in python, for Human Readable HTML Reports",
long_description=readme + '\n\n' + history,
author="Ordanis Sanchez Suero",
author_email='ordanisanchez@gmail.com',
url='https://github.com/oldani/HtmlTestRunner',
packages=[
'HtmlTestRunner',
],
package_dir={'HtmlTestRunner':
'HtmlTestRunner'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='HtmlTestRunner, TestRunner, Html Reports',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
<commit_msg>Update development status and keywords<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
# Package requirements here
"Jinja2==2.9.5"
]
test_requirements = [
# Package test requirements here
]
setup(
name='HtmlTestRunner',
version='1',
description="A Test Runner in python, for Human Readable HTML Reports",
long_description=readme + '\n\n' + history,
author="Ordanis Sanchez Suero",
author_email='ordanisanchez@gmail.com',
url='https://github.com/oldani/HtmlTestRunner',
packages=[
'HtmlTestRunner',
],
package_dir={'HtmlTestRunner':
'HtmlTestRunner'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='HtmlTestRunner TestRunner Html Reports',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
5fb92c2cf19fc7990db9945c89db31ca32930696
|
setup.py
|
setup.py
|
import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a3',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
|
import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>\n\n', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a4',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
|
Cut out blank lines at the start of PyPi README
|
Cut out blank lines at the start of PyPi README
|
Python
|
mit
|
SectorLabs/django-postgres-extra
|
import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a3',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
Cut out blank lines at the start of PyPi README
|
import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>\n\n', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a4',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
|
<commit_before>import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a3',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
<commit_msg>Cut out blank lines at the start of PyPi README<commit_after>
|
import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>\n\n', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a4',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
|
import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a3',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
Cut out blank lines at the start of PyPi READMEimport os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>\n\n', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a4',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
|
<commit_before>import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a3',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
<commit_msg>Cut out blank lines at the start of PyPi README<commit_after>import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
README = readme.read().split('h1>\n\n', 2)[1]
setup(
name='django-postgres-extra',
version='1.21a4',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Bringing all of PostgreSQL\'s awesomeness to Django.',
long_description=README,
url='https://github.com/SectorLabs/django-postgres-extra',
author='Sector Labs',
author_email='open-source@sectorlabs.ro',
keywords=['django', 'postgres', 'extra', 'hstore', 'ltree'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
|
19a47d8390ed1db3f91568375bb2726ee56d24f3
|
setup.py
|
setup.py
|
from setuptools import setup
PACKAGE_VERSION = '1.0'
deps = []
setup(name='wptserve',
version=PACKAGE_VERSION,
description="Python webserver intended for in web browser testing",
long_description=open("README.md").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=["Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers"],
keywords='',
author='James Graham',
author_email='james@hoppipolla.co.uk',
url='http://wptserve.readthedocs.org/',
license='BSD',
packages=['wptserve'],
include_package_data=True,
zip_safe=False,
install_requires=deps
)
|
from setuptools import setup
PACKAGE_VERSION = '1.0.1'
deps = []
setup(name='wptserve',
version=PACKAGE_VERSION,
description="Python webserver intended for in web browser testing",
long_description=open("README.md").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=["Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers"],
keywords='',
author='James Graham',
author_email='james@hoppipolla.co.uk',
url='http://wptserve.readthedocs.org/',
license='BSD',
packages=['wptserve'],
include_package_data=True,
zip_safe=False,
install_requires=deps
)
|
Update the version for manifest update
|
Update the version for manifest update
|
Python
|
bsd-3-clause
|
youennf/wptserve
|
from setuptools import setup
PACKAGE_VERSION = '1.0'
deps = []
setup(name='wptserve',
version=PACKAGE_VERSION,
description="Python webserver intended for in web browser testing",
long_description=open("README.md").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=["Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers"],
keywords='',
author='James Graham',
author_email='james@hoppipolla.co.uk',
url='http://wptserve.readthedocs.org/',
license='BSD',
packages=['wptserve'],
include_package_data=True,
zip_safe=False,
install_requires=deps
)
Update the version for manifest update
|
from setuptools import setup
PACKAGE_VERSION = '1.0.1'
deps = []
setup(name='wptserve',
version=PACKAGE_VERSION,
description="Python webserver intended for in web browser testing",
long_description=open("README.md").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=["Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers"],
keywords='',
author='James Graham',
author_email='james@hoppipolla.co.uk',
url='http://wptserve.readthedocs.org/',
license='BSD',
packages=['wptserve'],
include_package_data=True,
zip_safe=False,
install_requires=deps
)
|
<commit_before>from setuptools import setup
PACKAGE_VERSION = '1.0'
deps = []
setup(name='wptserve',
version=PACKAGE_VERSION,
description="Python webserver intended for in web browser testing",
long_description=open("README.md").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=["Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers"],
keywords='',
author='James Graham',
author_email='james@hoppipolla.co.uk',
url='http://wptserve.readthedocs.org/',
license='BSD',
packages=['wptserve'],
include_package_data=True,
zip_safe=False,
install_requires=deps
)
<commit_msg>Update the version for manifest update<commit_after>
|
from setuptools import setup
PACKAGE_VERSION = '1.0.1'
deps = []
setup(name='wptserve',
version=PACKAGE_VERSION,
description="Python webserver intended for in web browser testing",
long_description=open("README.md").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=["Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers"],
keywords='',
author='James Graham',
author_email='james@hoppipolla.co.uk',
url='http://wptserve.readthedocs.org/',
license='BSD',
packages=['wptserve'],
include_package_data=True,
zip_safe=False,
install_requires=deps
)
|
from setuptools import setup
PACKAGE_VERSION = '1.0'
deps = []
setup(name='wptserve',
version=PACKAGE_VERSION,
description="Python webserver intended for in web browser testing",
long_description=open("README.md").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=["Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers"],
keywords='',
author='James Graham',
author_email='james@hoppipolla.co.uk',
url='http://wptserve.readthedocs.org/',
license='BSD',
packages=['wptserve'],
include_package_data=True,
zip_safe=False,
install_requires=deps
)
Update the version for manifest updatefrom setuptools import setup
PACKAGE_VERSION = '1.0.1'
deps = []
setup(name='wptserve',
version=PACKAGE_VERSION,
description="Python webserver intended for in web browser testing",
long_description=open("README.md").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=["Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers"],
keywords='',
author='James Graham',
author_email='james@hoppipolla.co.uk',
url='http://wptserve.readthedocs.org/',
license='BSD',
packages=['wptserve'],
include_package_data=True,
zip_safe=False,
install_requires=deps
)
|
<commit_before>from setuptools import setup
PACKAGE_VERSION = '1.0'
deps = []
setup(name='wptserve',
version=PACKAGE_VERSION,
description="Python webserver intended for in web browser testing",
long_description=open("README.md").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=["Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers"],
keywords='',
author='James Graham',
author_email='james@hoppipolla.co.uk',
url='http://wptserve.readthedocs.org/',
license='BSD',
packages=['wptserve'],
include_package_data=True,
zip_safe=False,
install_requires=deps
)
<commit_msg>Update the version for manifest update<commit_after>from setuptools import setup
PACKAGE_VERSION = '1.0.1'
deps = []
setup(name='wptserve',
version=PACKAGE_VERSION,
description="Python webserver intended for in web browser testing",
long_description=open("README.md").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=["Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers"],
keywords='',
author='James Graham',
author_email='james@hoppipolla.co.uk',
url='http://wptserve.readthedocs.org/',
license='BSD',
packages=['wptserve'],
include_package_data=True,
zip_safe=False,
install_requires=deps
)
|
363dbc3dac71b9ce2d5ab7d9178253fc9b5bf483
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README')) as f:
README = f.read()
requires = []
setup(name='python-bitcoinlib',
version='0.2.0',
description='This python library provides an easy interface to the bitcoin data structures and protocol.',
long_description=README,
classifiers=[
"Programming Language :: Python",
],
url='https://github.com/petertodd/python-bitcoinlib',
keywords='bitcoin',
packages=find_packages(),
zip_safe=False,
install_requires=requires,
test_suite="bitcoin.tests"
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README')) as f:
README = f.read()
requires = []
setup(name='python-bitcoinlib',
version='0.2.1-SNAPSHOT',
description='This python library provides an easy interface to the bitcoin data structures and protocol.',
long_description=README,
classifiers=[
"Programming Language :: Python",
],
url='https://github.com/petertodd/python-bitcoinlib',
keywords='bitcoin',
packages=find_packages(),
zip_safe=False,
install_requires=requires,
test_suite="bitcoin.tests"
)
|
Reset version for future development
|
Reset version for future development
|
Python
|
mit
|
petertodd/dust-b-gone
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README')) as f:
README = f.read()
requires = []
setup(name='python-bitcoinlib',
version='0.2.0',
description='This python library provides an easy interface to the bitcoin data structures and protocol.',
long_description=README,
classifiers=[
"Programming Language :: Python",
],
url='https://github.com/petertodd/python-bitcoinlib',
keywords='bitcoin',
packages=find_packages(),
zip_safe=False,
install_requires=requires,
test_suite="bitcoin.tests"
)
Reset version for future development
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README')) as f:
README = f.read()
requires = []
setup(name='python-bitcoinlib',
version='0.2.1-SNAPSHOT',
description='This python library provides an easy interface to the bitcoin data structures and protocol.',
long_description=README,
classifiers=[
"Programming Language :: Python",
],
url='https://github.com/petertodd/python-bitcoinlib',
keywords='bitcoin',
packages=find_packages(),
zip_safe=False,
install_requires=requires,
test_suite="bitcoin.tests"
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
import os
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README')) as f:
README = f.read()
requires = []
setup(name='python-bitcoinlib',
version='0.2.0',
description='This python library provides an easy interface to the bitcoin data structures and protocol.',
long_description=README,
classifiers=[
"Programming Language :: Python",
],
url='https://github.com/petertodd/python-bitcoinlib',
keywords='bitcoin',
packages=find_packages(),
zip_safe=False,
install_requires=requires,
test_suite="bitcoin.tests"
)
<commit_msg>Reset version for future development<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README')) as f:
README = f.read()
requires = []
setup(name='python-bitcoinlib',
version='0.2.1-SNAPSHOT',
description='This python library provides an easy interface to the bitcoin data structures and protocol.',
long_description=README,
classifiers=[
"Programming Language :: Python",
],
url='https://github.com/petertodd/python-bitcoinlib',
keywords='bitcoin',
packages=find_packages(),
zip_safe=False,
install_requires=requires,
test_suite="bitcoin.tests"
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README')) as f:
README = f.read()
requires = []
setup(name='python-bitcoinlib',
version='0.2.0',
description='This python library provides an easy interface to the bitcoin data structures and protocol.',
long_description=README,
classifiers=[
"Programming Language :: Python",
],
url='https://github.com/petertodd/python-bitcoinlib',
keywords='bitcoin',
packages=find_packages(),
zip_safe=False,
install_requires=requires,
test_suite="bitcoin.tests"
)
Reset version for future development#!/usr/bin/env python
from setuptools import setup, find_packages
import os
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README')) as f:
README = f.read()
requires = []
setup(name='python-bitcoinlib',
version='0.2.1-SNAPSHOT',
description='This python library provides an easy interface to the bitcoin data structures and protocol.',
long_description=README,
classifiers=[
"Programming Language :: Python",
],
url='https://github.com/petertodd/python-bitcoinlib',
keywords='bitcoin',
packages=find_packages(),
zip_safe=False,
install_requires=requires,
test_suite="bitcoin.tests"
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
import os
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README')) as f:
README = f.read()
requires = []
setup(name='python-bitcoinlib',
version='0.2.0',
description='This python library provides an easy interface to the bitcoin data structures and protocol.',
long_description=README,
classifiers=[
"Programming Language :: Python",
],
url='https://github.com/petertodd/python-bitcoinlib',
keywords='bitcoin',
packages=find_packages(),
zip_safe=False,
install_requires=requires,
test_suite="bitcoin.tests"
)
<commit_msg>Reset version for future development<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
import os
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README')) as f:
README = f.read()
requires = []
setup(name='python-bitcoinlib',
version='0.2.1-SNAPSHOT',
description='This python library provides an easy interface to the bitcoin data structures and protocol.',
long_description=README,
classifiers=[
"Programming Language :: Python",
],
url='https://github.com/petertodd/python-bitcoinlib',
keywords='bitcoin',
packages=find_packages(),
zip_safe=False,
install_requires=requires,
test_suite="bitcoin.tests"
)
|
1e117182e6169645940a7d7acc3eba9181e5715e
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name="pysolr",
version="3.0.3",
description="Lightweight python wrapper for Apache Solr.",
author='Daniel Lindsley',
author_email='daniel@toastdriven.com',
long_description=open('README.rst', 'r').read(),
py_modules=[
'pysolr'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
url='http://github.com/toastdriven/pysolr/',
license='BSD',
install_requires=[
'requests>=1.1.0'
],
extra_requires={
'tomcat': [
'lxml',
'cssselect',
],
}
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name="pysolr",
version="3.0.4",
description="Lightweight python wrapper for Apache Solr.",
author='Daniel Lindsley',
author_email='daniel@toastdriven.com',
long_description=open('README.rst', 'r').read(),
py_modules=[
'pysolr'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
url='http://github.com/toastdriven/pysolr/',
license='BSD',
install_requires=[
'requests>=1.1.0'
],
extra_requires={
'tomcat': [
'lxml',
'cssselect',
],
}
)
|
Tag version 3.0.4 for PyPI
|
Tag version 3.0.4 for PyPI
3.x had a minor bug (see SHA:74b0a36) but it broke logging for Solr
errors which seems worth an easily deployed fix
|
Python
|
bsd-3-clause
|
rokaka/pysolr,toastdriven/pysolr,mbeacom/pysolr,CANTUS-Project/pysolr-tornado,mylanium/pysolr,swistakm/pysolr,toastdriven/pysolr,mbeacom/pysolr,django-searchstack/skisolr,10clouds/pysolr,django-haystack/pysolr,mylanium/pysolr,CANTUS-Project/pysolr-tornado,django-searchstack/skisolr,swistakm/pysolr,upayavira/pysolr,shasha79/pysolr,10clouds/pysolr,django-haystack/pysolr,shasha79/pysolr,rokaka/pysolr,upayavira/pysolr
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name="pysolr",
version="3.0.3",
description="Lightweight python wrapper for Apache Solr.",
author='Daniel Lindsley',
author_email='daniel@toastdriven.com',
long_description=open('README.rst', 'r').read(),
py_modules=[
'pysolr'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
url='http://github.com/toastdriven/pysolr/',
license='BSD',
install_requires=[
'requests>=1.1.0'
],
extra_requires={
'tomcat': [
'lxml',
'cssselect',
],
}
)
Tag version 3.0.4 for PyPI
3.x had a minor bug (see SHA:74b0a36) but it broke logging for Solr
errors which seems worth an easily deployed fix
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name="pysolr",
version="3.0.4",
description="Lightweight python wrapper for Apache Solr.",
author='Daniel Lindsley',
author_email='daniel@toastdriven.com',
long_description=open('README.rst', 'r').read(),
py_modules=[
'pysolr'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
url='http://github.com/toastdriven/pysolr/',
license='BSD',
install_requires=[
'requests>=1.1.0'
],
extra_requires={
'tomcat': [
'lxml',
'cssselect',
],
}
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name="pysolr",
version="3.0.3",
description="Lightweight python wrapper for Apache Solr.",
author='Daniel Lindsley',
author_email='daniel@toastdriven.com',
long_description=open('README.rst', 'r').read(),
py_modules=[
'pysolr'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
url='http://github.com/toastdriven/pysolr/',
license='BSD',
install_requires=[
'requests>=1.1.0'
],
extra_requires={
'tomcat': [
'lxml',
'cssselect',
],
}
)
<commit_msg>Tag version 3.0.4 for PyPI
3.x had a minor bug (see SHA:74b0a36) but it broke logging for Solr
errors which seems worth an easily deployed fix<commit_after>
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name="pysolr",
version="3.0.4",
description="Lightweight python wrapper for Apache Solr.",
author='Daniel Lindsley',
author_email='daniel@toastdriven.com',
long_description=open('README.rst', 'r').read(),
py_modules=[
'pysolr'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
url='http://github.com/toastdriven/pysolr/',
license='BSD',
install_requires=[
'requests>=1.1.0'
],
extra_requires={
'tomcat': [
'lxml',
'cssselect',
],
}
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name="pysolr",
version="3.0.3",
description="Lightweight python wrapper for Apache Solr.",
author='Daniel Lindsley',
author_email='daniel@toastdriven.com',
long_description=open('README.rst', 'r').read(),
py_modules=[
'pysolr'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
url='http://github.com/toastdriven/pysolr/',
license='BSD',
install_requires=[
'requests>=1.1.0'
],
extra_requires={
'tomcat': [
'lxml',
'cssselect',
],
}
)
Tag version 3.0.4 for PyPI
3.x had a minor bug (see SHA:74b0a36) but it broke logging for Solr
errors which seems worth an easily deployed fixtry:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name="pysolr",
version="3.0.4",
description="Lightweight python wrapper for Apache Solr.",
author='Daniel Lindsley',
author_email='daniel@toastdriven.com',
long_description=open('README.rst', 'r').read(),
py_modules=[
'pysolr'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
url='http://github.com/toastdriven/pysolr/',
license='BSD',
install_requires=[
'requests>=1.1.0'
],
extra_requires={
'tomcat': [
'lxml',
'cssselect',
],
}
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name="pysolr",
version="3.0.3",
description="Lightweight python wrapper for Apache Solr.",
author='Daniel Lindsley',
author_email='daniel@toastdriven.com',
long_description=open('README.rst', 'r').read(),
py_modules=[
'pysolr'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
url='http://github.com/toastdriven/pysolr/',
license='BSD',
install_requires=[
'requests>=1.1.0'
],
extra_requires={
'tomcat': [
'lxml',
'cssselect',
],
}
)
<commit_msg>Tag version 3.0.4 for PyPI
3.x had a minor bug (see SHA:74b0a36) but it broke logging for Solr
errors which seems worth an easily deployed fix<commit_after>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name="pysolr",
version="3.0.4",
description="Lightweight python wrapper for Apache Solr.",
author='Daniel Lindsley',
author_email='daniel@toastdriven.com',
long_description=open('README.rst', 'r').read(),
py_modules=[
'pysolr'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
url='http://github.com/toastdriven/pysolr/',
license='BSD',
install_requires=[
'requests>=1.1.0'
],
extra_requires={
'tomcat': [
'lxml',
'cssselect',
],
}
)
|
ed8b46542c831b5e3692368c15619a46fbe338e1
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="PyRundeck",
version="0.3.4",
description="A thin, pure Python wrapper for the Rundeck API",
author="Panagiotis Koutsourakis",
author_email="kutsurak@ekt.gr",
license='BSD',
url='https://github.com/EKT/pyrundeck',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Internet :: REST API client',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='rest api client rundeck',
packages=find_packages(exclude=['tests', '*_virtualenv', 'doc']),
install_requires=[
'lxml>=3.4.4',
'requests>=2.7.0',
'pyopenssl>=0.15.1',
'ndg-httpsclient>=0.4.0',
'pyasn1>=0.1.8'
]
)
|
from setuptools import setup, find_packages
setup(
name="PyRundeck",
version="0.3.4",
description="A thin, pure Python wrapper for the Rundeck API",
author="Panagiotis Koutsourakis",
author_email="kutsurak@ekt.gr",
license='BSD',
url='https://github.com/EKT/pyrundeck',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Internet :: REST API client',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='rest api client rundeck',
packages=find_packages(exclude=['tests', '*_virtualenv', 'doc']),
install_requires=[
'lxml>=3.4.4',
'requests>=2.7.0',
'pyopenssl>=0.15.1',
'ndg-httpsclient>=0.4.0',
'pyasn1>=0.1.8',
'pyyaml>=3.11'
]
)
|
Add PyYAML as a dependency
|
Add PyYAML as a dependency
|
Python
|
bsd-3-clause
|
EKT/pyrundeck
|
from setuptools import setup, find_packages
setup(
name="PyRundeck",
version="0.3.4",
description="A thin, pure Python wrapper for the Rundeck API",
author="Panagiotis Koutsourakis",
author_email="kutsurak@ekt.gr",
license='BSD',
url='https://github.com/EKT/pyrundeck',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Internet :: REST API client',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='rest api client rundeck',
packages=find_packages(exclude=['tests', '*_virtualenv', 'doc']),
install_requires=[
'lxml>=3.4.4',
'requests>=2.7.0',
'pyopenssl>=0.15.1',
'ndg-httpsclient>=0.4.0',
'pyasn1>=0.1.8'
]
)
Add PyYAML as a dependency
|
from setuptools import setup, find_packages
setup(
name="PyRundeck",
version="0.3.4",
description="A thin, pure Python wrapper for the Rundeck API",
author="Panagiotis Koutsourakis",
author_email="kutsurak@ekt.gr",
license='BSD',
url='https://github.com/EKT/pyrundeck',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Internet :: REST API client',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='rest api client rundeck',
packages=find_packages(exclude=['tests', '*_virtualenv', 'doc']),
install_requires=[
'lxml>=3.4.4',
'requests>=2.7.0',
'pyopenssl>=0.15.1',
'ndg-httpsclient>=0.4.0',
'pyasn1>=0.1.8',
'pyyaml>=3.11'
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="PyRundeck",
version="0.3.4",
description="A thin, pure Python wrapper for the Rundeck API",
author="Panagiotis Koutsourakis",
author_email="kutsurak@ekt.gr",
license='BSD',
url='https://github.com/EKT/pyrundeck',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Internet :: REST API client',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='rest api client rundeck',
packages=find_packages(exclude=['tests', '*_virtualenv', 'doc']),
install_requires=[
'lxml>=3.4.4',
'requests>=2.7.0',
'pyopenssl>=0.15.1',
'ndg-httpsclient>=0.4.0',
'pyasn1>=0.1.8'
]
)
<commit_msg>Add PyYAML as a dependency<commit_after>
|
from setuptools import setup, find_packages
setup(
name="PyRundeck",
version="0.3.4",
description="A thin, pure Python wrapper for the Rundeck API",
author="Panagiotis Koutsourakis",
author_email="kutsurak@ekt.gr",
license='BSD',
url='https://github.com/EKT/pyrundeck',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Internet :: REST API client',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='rest api client rundeck',
packages=find_packages(exclude=['tests', '*_virtualenv', 'doc']),
install_requires=[
'lxml>=3.4.4',
'requests>=2.7.0',
'pyopenssl>=0.15.1',
'ndg-httpsclient>=0.4.0',
'pyasn1>=0.1.8',
'pyyaml>=3.11'
]
)
|
from setuptools import setup, find_packages
setup(
name="PyRundeck",
version="0.3.4",
description="A thin, pure Python wrapper for the Rundeck API",
author="Panagiotis Koutsourakis",
author_email="kutsurak@ekt.gr",
license='BSD',
url='https://github.com/EKT/pyrundeck',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Internet :: REST API client',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='rest api client rundeck',
packages=find_packages(exclude=['tests', '*_virtualenv', 'doc']),
install_requires=[
'lxml>=3.4.4',
'requests>=2.7.0',
'pyopenssl>=0.15.1',
'ndg-httpsclient>=0.4.0',
'pyasn1>=0.1.8'
]
)
Add PyYAML as a dependencyfrom setuptools import setup, find_packages
setup(
name="PyRundeck",
version="0.3.4",
description="A thin, pure Python wrapper for the Rundeck API",
author="Panagiotis Koutsourakis",
author_email="kutsurak@ekt.gr",
license='BSD',
url='https://github.com/EKT/pyrundeck',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Internet :: REST API client',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='rest api client rundeck',
packages=find_packages(exclude=['tests', '*_virtualenv', 'doc']),
install_requires=[
'lxml>=3.4.4',
'requests>=2.7.0',
'pyopenssl>=0.15.1',
'ndg-httpsclient>=0.4.0',
'pyasn1>=0.1.8',
'pyyaml>=3.11'
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="PyRundeck",
version="0.3.4",
description="A thin, pure Python wrapper for the Rundeck API",
author="Panagiotis Koutsourakis",
author_email="kutsurak@ekt.gr",
license='BSD',
url='https://github.com/EKT/pyrundeck',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Internet :: REST API client',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='rest api client rundeck',
packages=find_packages(exclude=['tests', '*_virtualenv', 'doc']),
install_requires=[
'lxml>=3.4.4',
'requests>=2.7.0',
'pyopenssl>=0.15.1',
'ndg-httpsclient>=0.4.0',
'pyasn1>=0.1.8'
]
)
<commit_msg>Add PyYAML as a dependency<commit_after>from setuptools import setup, find_packages
setup(
name="PyRundeck",
version="0.3.4",
description="A thin, pure Python wrapper for the Rundeck API",
author="Panagiotis Koutsourakis",
author_email="kutsurak@ekt.gr",
license='BSD',
url='https://github.com/EKT/pyrundeck',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Internet :: REST API client',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='rest api client rundeck',
packages=find_packages(exclude=['tests', '*_virtualenv', 'doc']),
install_requires=[
'lxml>=3.4.4',
'requests>=2.7.0',
'pyopenssl>=0.15.1',
'ndg-httpsclient>=0.4.0',
'pyasn1>=0.1.8',
'pyyaml>=3.11'
]
)
|
dcf4d88b6562cafb7a365e14d66a3a1967365210
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27b.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
Change version to match RC (v0.27.0)
|
Change version to match RC (v0.27.0)
|
Python
|
mit
|
harrystech/arthur-redshift-etl,harrystech/arthur-redshift-etl,harrystech/arthur-redshift-etl,harrystech/arthur-redshift-etl,harrystech/arthur-redshift-etl
|
from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27b.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
Change version to match RC (v0.27.0)
|
from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
<commit_before>from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27b.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
<commit_msg>Change version to match RC (v0.27.0)<commit_after>
|
from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27b.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
Change version to match RC (v0.27.0)from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
<commit_before>from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27b.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
<commit_msg>Change version to match RC (v0.27.0)<commit_after>from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
c0a8ead2092f6479621bed030bac5f3ee1c5872a
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name="hal-json",
version="0.1",
description="Parse and encode links according to RFC 5988 or HAL specs",
author="Michael Burrows, Carlos Martín",
author_email="inean.es@gmail.com",
url="https://inean@github.com/inean/LinkHeader.git",
packages=find_packages(),
license="BSD",
keywords="RFC5988, HAL, json",
zip_safe=True,
long_description="""
A simple module to allow developers to format
or encode links according to RFC 5988 or trying to follow HAL
specifications (http://stateless.co/hal_specification.html)
""",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules"
]
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name="hal-json",
version="0.1",
description="Parse and encode links according to RFC 5988 or HAL specs",
author="Michael Burrows, Carlos Martín",
author_email="inean.es@gmail.com",
url="https://github.com/inean/LinkHeader.git",
packages=find_packages(),
license="BSD",
keywords="RFC5988, HAL, json",
zip_safe=True,
long_description="""
A simple module to allow developers to format
or encode links according to RFC 5988 or trying to follow HAL
specifications (http://stateless.co/hal_specification.html)
""",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules"
]
)
|
Use public github URL for package homepage
|
Use public github URL for package homepage
|
Python
|
mit
|
inean/LinkHeader
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name="hal-json",
version="0.1",
description="Parse and encode links according to RFC 5988 or HAL specs",
author="Michael Burrows, Carlos Martín",
author_email="inean.es@gmail.com",
url="https://inean@github.com/inean/LinkHeader.git",
packages=find_packages(),
license="BSD",
keywords="RFC5988, HAL, json",
zip_safe=True,
long_description="""
A simple module to allow developers to format
or encode links according to RFC 5988 or trying to follow HAL
specifications (http://stateless.co/hal_specification.html)
""",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules"
]
)
Use public github URL for package homepage
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name="hal-json",
version="0.1",
description="Parse and encode links according to RFC 5988 or HAL specs",
author="Michael Burrows, Carlos Martín",
author_email="inean.es@gmail.com",
url="https://github.com/inean/LinkHeader.git",
packages=find_packages(),
license="BSD",
keywords="RFC5988, HAL, json",
zip_safe=True,
long_description="""
A simple module to allow developers to format
or encode links according to RFC 5988 or trying to follow HAL
specifications (http://stateless.co/hal_specification.html)
""",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules"
]
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name="hal-json",
version="0.1",
description="Parse and encode links according to RFC 5988 or HAL specs",
author="Michael Burrows, Carlos Martín",
author_email="inean.es@gmail.com",
url="https://inean@github.com/inean/LinkHeader.git",
packages=find_packages(),
license="BSD",
keywords="RFC5988, HAL, json",
zip_safe=True,
long_description="""
A simple module to allow developers to format
or encode links according to RFC 5988 or trying to follow HAL
specifications (http://stateless.co/hal_specification.html)
""",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules"
]
)
<commit_msg>Use public github URL for package homepage<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name="hal-json",
version="0.1",
description="Parse and encode links according to RFC 5988 or HAL specs",
author="Michael Burrows, Carlos Martín",
author_email="inean.es@gmail.com",
url="https://github.com/inean/LinkHeader.git",
packages=find_packages(),
license="BSD",
keywords="RFC5988, HAL, json",
zip_safe=True,
long_description="""
A simple module to allow developers to format
or encode links according to RFC 5988 or trying to follow HAL
specifications (http://stateless.co/hal_specification.html)
""",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules"
]
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name="hal-json",
version="0.1",
description="Parse and encode links according to RFC 5988 or HAL specs",
author="Michael Burrows, Carlos Martín",
author_email="inean.es@gmail.com",
url="https://inean@github.com/inean/LinkHeader.git",
packages=find_packages(),
license="BSD",
keywords="RFC5988, HAL, json",
zip_safe=True,
long_description="""
A simple module to allow developers to format
or encode links according to RFC 5988 or trying to follow HAL
specifications (http://stateless.co/hal_specification.html)
""",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules"
]
)
Use public github URL for package homepage#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name="hal-json",
version="0.1",
description="Parse and encode links according to RFC 5988 or HAL specs",
author="Michael Burrows, Carlos Martín",
author_email="inean.es@gmail.com",
url="https://github.com/inean/LinkHeader.git",
packages=find_packages(),
license="BSD",
keywords="RFC5988, HAL, json",
zip_safe=True,
long_description="""
A simple module to allow developers to format
or encode links according to RFC 5988 or trying to follow HAL
specifications (http://stateless.co/hal_specification.html)
""",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules"
]
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name="hal-json",
version="0.1",
description="Parse and encode links according to RFC 5988 or HAL specs",
author="Michael Burrows, Carlos Martín",
author_email="inean.es@gmail.com",
url="https://inean@github.com/inean/LinkHeader.git",
packages=find_packages(),
license="BSD",
keywords="RFC5988, HAL, json",
zip_safe=True,
long_description="""
A simple module to allow developers to format
or encode links according to RFC 5988 or trying to follow HAL
specifications (http://stateless.co/hal_specification.html)
""",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules"
]
)
<commit_msg>Use public github URL for package homepage<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name="hal-json",
version="0.1",
description="Parse and encode links according to RFC 5988 or HAL specs",
author="Michael Burrows, Carlos Martín",
author_email="inean.es@gmail.com",
url="https://github.com/inean/LinkHeader.git",
packages=find_packages(),
license="BSD",
keywords="RFC5988, HAL, json",
zip_safe=True,
long_description="""
A simple module to allow developers to format
or encode links according to RFC 5988 or trying to follow HAL
specifications (http://stateless.co/hal_specification.html)
""",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules"
]
)
|
17d2bde78fb195536bd57b5b92aa8ba557d4314b
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
tests_require=[
'mock==2.0.0',
'nose==1.3.7',
],
license = "Apache-2.0",
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.1.0",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar>=0.1.3',
'numpy>=1.10.1',
'preconditions>=0.1',
'pyfasta>=0.5.2',
'pysam>=0.10.0',
],
tests_require=[
# NOTE: `mock` is not actually needed in Python 3.
# `unittest.mock` can be used instead.
'mock>=2.0.0',
'nose>=1.3.7',
],
license = "Apache-2.0",
)
|
Stop pinning to specific patch versions
|
Stop pinning to specific patch versions
This package should not require specific patch versions. Rather, it
should express the broadest range of possible versions that it supports.
In the current `setup.py`, we are very particular about which versions
must be used. Because of the specificity, installing `clrsvsim` makes
updating any other dependent libraries (e.g. `numpy`) quite difficult.
Modern dependency managers like `poetry` on `pipenv` will loudly
complain about dependency conflicts, making `clrsvism` a problematic
dependency.
Instead, we should just specify each of these as the *minimum* version
required. If there's some reason that the package does not support the
latest versions, then we can sort that out at a later time.
|
Python
|
apache-2.0
|
color/clrsvsim
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
tests_require=[
'mock==2.0.0',
'nose==1.3.7',
],
license = "Apache-2.0",
)
Stop pinning to specific patch versions
This package should not require specific patch versions. Rather, it
should express the broadest range of possible versions that it supports.
In the current `setup.py`, we are very particular about which versions
must be used. Because of the specificity, installing `clrsvsim` makes
updating any other dependent libraries (e.g. `numpy`) quite difficult.
Modern dependency managers like `poetry` on `pipenv` will loudly
complain about dependency conflicts, making `clrsvism` a problematic
dependency.
Instead, we should just specify each of these as the *minimum* version
required. If there's some reason that the package does not support the
latest versions, then we can sort that out at a later time.
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.1.0",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar>=0.1.3',
'numpy>=1.10.1',
'preconditions>=0.1',
'pyfasta>=0.5.2',
'pysam>=0.10.0',
],
tests_require=[
# NOTE: `mock` is not actually needed in Python 3.
# `unittest.mock` can be used instead.
'mock>=2.0.0',
'nose>=1.3.7',
],
license = "Apache-2.0",
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
tests_require=[
'mock==2.0.0',
'nose==1.3.7',
],
license = "Apache-2.0",
)
<commit_msg>Stop pinning to specific patch versions
This package should not require specific patch versions. Rather, it
should express the broadest range of possible versions that it supports.
In the current `setup.py`, we are very particular about which versions
must be used. Because of the specificity, installing `clrsvsim` makes
updating any other dependent libraries (e.g. `numpy`) quite difficult.
Modern dependency managers like `poetry` on `pipenv` will loudly
complain about dependency conflicts, making `clrsvism` a problematic
dependency.
Instead, we should just specify each of these as the *minimum* version
required. If there's some reason that the package does not support the
latest versions, then we can sort that out at a later time.<commit_after>
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.1.0",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar>=0.1.3',
'numpy>=1.10.1',
'preconditions>=0.1',
'pyfasta>=0.5.2',
'pysam>=0.10.0',
],
tests_require=[
# NOTE: `mock` is not actually needed in Python 3.
# `unittest.mock` can be used instead.
'mock>=2.0.0',
'nose>=1.3.7',
],
license = "Apache-2.0",
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
tests_require=[
'mock==2.0.0',
'nose==1.3.7',
],
license = "Apache-2.0",
)
Stop pinning to specific patch versions
This package should not require specific patch versions. Rather, it
should express the broadest range of possible versions that it supports.
In the current `setup.py`, we are very particular about which versions
must be used. Because of the specificity, installing `clrsvsim` makes
updating any other dependent libraries (e.g. `numpy`) quite difficult.
Modern dependency managers like `poetry` on `pipenv` will loudly
complain about dependency conflicts, making `clrsvism` a problematic
dependency.
Instead, we should just specify each of these as the *minimum* version
required. If there's some reason that the package does not support the
latest versions, then we can sort that out at a later time.try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.1.0",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar>=0.1.3',
'numpy>=1.10.1',
'preconditions>=0.1',
'pyfasta>=0.5.2',
'pysam>=0.10.0',
],
tests_require=[
# NOTE: `mock` is not actually needed in Python 3.
# `unittest.mock` can be used instead.
'mock>=2.0.0',
'nose>=1.3.7',
],
license = "Apache-2.0",
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.0.2",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar==0.1.3',
'numpy==1.10.1',
'preconditions==0.1',
'pyfasta==0.5.2',
'pysam==0.10.0',
],
tests_require=[
'mock==2.0.0',
'nose==1.3.7',
],
license = "Apache-2.0",
)
<commit_msg>Stop pinning to specific patch versions
This package should not require specific patch versions. Rather, it
should express the broadest range of possible versions that it supports.
In the current `setup.py`, we are very particular about which versions
must be used. Because of the specificity, installing `clrsvsim` makes
updating any other dependent libraries (e.g. `numpy`) quite difficult.
Modern dependency managers like `poetry` on `pipenv` will loudly
complain about dependency conflicts, making `clrsvism` a problematic
dependency.
Instead, we should just specify each of these as the *minimum* version
required. If there's some reason that the package does not support the
latest versions, then we can sort that out at a later time.<commit_after>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = "clrsvsim",
version = "0.1.0",
description = "Color Genomics Structural Variant Simulator",
author = "Color Genomics",
author_email = "dev@color.com",
url = "https://github.com/ColorGenomics/clrsvsim",
packages = ["clrsvsim"],
install_requires=[
'cigar>=0.1.3',
'numpy>=1.10.1',
'preconditions>=0.1',
'pyfasta>=0.5.2',
'pysam>=0.10.0',
],
tests_require=[
# NOTE: `mock` is not actually needed in Python 3.
# `unittest.mock` can be used instead.
'mock>=2.0.0',
'nose>=1.3.7',
],
license = "Apache-2.0",
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.