commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dc6d56b7997a9c15419fb66cd724ae3fc1a011a0
|
rdrf/rdrf/initial_data/iprestrict_permissive.py
|
rdrf/rdrf/initial_data/iprestrict_permissive.py
|
"""Disable iprestriction completely."""
from iprestrict.models import IPGroup, IPRange, Rule
def load_data(**kwargs):
allow_all()
def allow_all():
all_group = get_or_create_all_group()
Rule.objects.all().delete()
Rule.objects.create(
ip_group=all_group,
action='A',
url_pattern='ALL',
rank=65536)
def get_or_create_all_group():
all_group, created = IPGroup.objects.get_or_create(name='ALL', description='Matches ALL IP Addresses')
if created:
IPRange.objects.create(
ip_group=all_group,
first_ip='0.0.0.0',
last_ip='255.255.255.255')
IPRange.objects.create(
ip_group=all_group,
first_ip='0:0:0:0:0:0:0:0',
last_ip='ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff')
return all_group
|
"""Disable iprestriction completely."""
from iprestrict.models import RangeBasedIPGroup, IPRange, Rule
def load_data(**kwargs):
allow_all()
def allow_all():
all_group = get_or_create_all_group()
Rule.objects.all().delete()
Rule.objects.create(
ip_group=all_group,
action='A',
url_pattern='ALL',
rank=65536)
def get_or_create_all_group():
all_group, created = RangeBasedIPGroup.objects.get_or_create(name='ALL', description='Matches ALL IP Addresses')
if created:
IPRange.objects.create(
ip_group=all_group,
first_ip='0.0.0.0',
last_ip='255.255.255.255')
IPRange.objects.create(
ip_group=all_group,
first_ip='0:0:0:0:0:0:0:0',
last_ip='ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff')
return all_group
|
Update iprestrict models in initial data
|
Update iprestrict models in initial data
|
Python
|
agpl-3.0
|
muccg/rdrf,muccg/rdrf,muccg/rdrf,muccg/rdrf,muccg/rdrf
|
"""Disable iprestriction completely."""
from iprestrict.models import IPGroup, IPRange, Rule
def load_data(**kwargs):
allow_all()
def allow_all():
all_group = get_or_create_all_group()
Rule.objects.all().delete()
Rule.objects.create(
ip_group=all_group,
action='A',
url_pattern='ALL',
rank=65536)
def get_or_create_all_group():
all_group, created = IPGroup.objects.get_or_create(name='ALL', description='Matches ALL IP Addresses')
if created:
IPRange.objects.create(
ip_group=all_group,
first_ip='0.0.0.0',
last_ip='255.255.255.255')
IPRange.objects.create(
ip_group=all_group,
first_ip='0:0:0:0:0:0:0:0',
last_ip='ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff')
return all_group
Update iprestrict models in initial data
|
"""Disable iprestriction completely."""
from iprestrict.models import RangeBasedIPGroup, IPRange, Rule
def load_data(**kwargs):
allow_all()
def allow_all():
all_group = get_or_create_all_group()
Rule.objects.all().delete()
Rule.objects.create(
ip_group=all_group,
action='A',
url_pattern='ALL',
rank=65536)
def get_or_create_all_group():
all_group, created = RangeBasedIPGroup.objects.get_or_create(name='ALL', description='Matches ALL IP Addresses')
if created:
IPRange.objects.create(
ip_group=all_group,
first_ip='0.0.0.0',
last_ip='255.255.255.255')
IPRange.objects.create(
ip_group=all_group,
first_ip='0:0:0:0:0:0:0:0',
last_ip='ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff')
return all_group
|
<commit_before>"""Disable iprestriction completely."""
from iprestrict.models import IPGroup, IPRange, Rule
def load_data(**kwargs):
allow_all()
def allow_all():
all_group = get_or_create_all_group()
Rule.objects.all().delete()
Rule.objects.create(
ip_group=all_group,
action='A',
url_pattern='ALL',
rank=65536)
def get_or_create_all_group():
all_group, created = IPGroup.objects.get_or_create(name='ALL', description='Matches ALL IP Addresses')
if created:
IPRange.objects.create(
ip_group=all_group,
first_ip='0.0.0.0',
last_ip='255.255.255.255')
IPRange.objects.create(
ip_group=all_group,
first_ip='0:0:0:0:0:0:0:0',
last_ip='ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff')
return all_group
<commit_msg>Update iprestrict models in initial data<commit_after>
|
"""Disable iprestriction completely."""
from iprestrict.models import RangeBasedIPGroup, IPRange, Rule
def load_data(**kwargs):
allow_all()
def allow_all():
all_group = get_or_create_all_group()
Rule.objects.all().delete()
Rule.objects.create(
ip_group=all_group,
action='A',
url_pattern='ALL',
rank=65536)
def get_or_create_all_group():
all_group, created = RangeBasedIPGroup.objects.get_or_create(name='ALL', description='Matches ALL IP Addresses')
if created:
IPRange.objects.create(
ip_group=all_group,
first_ip='0.0.0.0',
last_ip='255.255.255.255')
IPRange.objects.create(
ip_group=all_group,
first_ip='0:0:0:0:0:0:0:0',
last_ip='ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff')
return all_group
|
"""Disable iprestriction completely."""
from iprestrict.models import IPGroup, IPRange, Rule
def load_data(**kwargs):
allow_all()
def allow_all():
all_group = get_or_create_all_group()
Rule.objects.all().delete()
Rule.objects.create(
ip_group=all_group,
action='A',
url_pattern='ALL',
rank=65536)
def get_or_create_all_group():
all_group, created = IPGroup.objects.get_or_create(name='ALL', description='Matches ALL IP Addresses')
if created:
IPRange.objects.create(
ip_group=all_group,
first_ip='0.0.0.0',
last_ip='255.255.255.255')
IPRange.objects.create(
ip_group=all_group,
first_ip='0:0:0:0:0:0:0:0',
last_ip='ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff')
return all_group
Update iprestrict models in initial data"""Disable iprestriction completely."""
from iprestrict.models import RangeBasedIPGroup, IPRange, Rule
def load_data(**kwargs):
allow_all()
def allow_all():
all_group = get_or_create_all_group()
Rule.objects.all().delete()
Rule.objects.create(
ip_group=all_group,
action='A',
url_pattern='ALL',
rank=65536)
def get_or_create_all_group():
all_group, created = RangeBasedIPGroup.objects.get_or_create(name='ALL', description='Matches ALL IP Addresses')
if created:
IPRange.objects.create(
ip_group=all_group,
first_ip='0.0.0.0',
last_ip='255.255.255.255')
IPRange.objects.create(
ip_group=all_group,
first_ip='0:0:0:0:0:0:0:0',
last_ip='ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff')
return all_group
|
<commit_before>"""Disable iprestriction completely."""
from iprestrict.models import IPGroup, IPRange, Rule
def load_data(**kwargs):
allow_all()
def allow_all():
all_group = get_or_create_all_group()
Rule.objects.all().delete()
Rule.objects.create(
ip_group=all_group,
action='A',
url_pattern='ALL',
rank=65536)
def get_or_create_all_group():
all_group, created = IPGroup.objects.get_or_create(name='ALL', description='Matches ALL IP Addresses')
if created:
IPRange.objects.create(
ip_group=all_group,
first_ip='0.0.0.0',
last_ip='255.255.255.255')
IPRange.objects.create(
ip_group=all_group,
first_ip='0:0:0:0:0:0:0:0',
last_ip='ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff')
return all_group
<commit_msg>Update iprestrict models in initial data<commit_after>"""Disable iprestriction completely."""
from iprestrict.models import RangeBasedIPGroup, IPRange, Rule
def load_data(**kwargs):
allow_all()
def allow_all():
all_group = get_or_create_all_group()
Rule.objects.all().delete()
Rule.objects.create(
ip_group=all_group,
action='A',
url_pattern='ALL',
rank=65536)
def get_or_create_all_group():
all_group, created = RangeBasedIPGroup.objects.get_or_create(name='ALL', description='Matches ALL IP Addresses')
if created:
IPRange.objects.create(
ip_group=all_group,
first_ip='0.0.0.0',
last_ip='255.255.255.255')
IPRange.objects.create(
ip_group=all_group,
first_ip='0:0:0:0:0:0:0:0',
last_ip='ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff')
return all_group
|
a20a63415bf1343ab826d1155c1004e84b14077e
|
massa/validation.py
|
massa/validation.py
|
# -*- coding: utf-8 -*-
from schematics.exceptions import ConversionError, ValidationError
def validate(schema, data):
try:
schema.import_data(data)
schema.validate()
except (ConversionError, ValidationError) as e:
raise InvalidInputError(details=e.messages)
def weight_validator(value):
if abs(value.as_tuple().exponent) > 1:
raise ValidationError('Only one decimal point is allowed.')
return value
|
# -*- coding: utf-8 -*-
from schematics.exceptions import ConversionError, ValidationError
from .errors import InvalidInputError
def validate(schema, data):
try:
schema.import_data(data)
schema.validate()
except (ConversionError, ValidationError) as e:
raise InvalidInputError(details=e.messages)
def weight_validator(value):
if abs(value.as_tuple().exponent) > 1:
raise ValidationError('Only one decimal point is allowed.')
return value
|
Fix bug, InvalidInputError not defined.
|
Fix bug, InvalidInputError not defined.
|
Python
|
mit
|
jaapverloop/massa
|
# -*- coding: utf-8 -*-
from schematics.exceptions import ConversionError, ValidationError
def validate(schema, data):
try:
schema.import_data(data)
schema.validate()
except (ConversionError, ValidationError) as e:
raise InvalidInputError(details=e.messages)
def weight_validator(value):
if abs(value.as_tuple().exponent) > 1:
raise ValidationError('Only one decimal point is allowed.')
return value
Fix bug, InvalidInputError not defined.
|
# -*- coding: utf-8 -*-
from schematics.exceptions import ConversionError, ValidationError
from .errors import InvalidInputError
def validate(schema, data):
try:
schema.import_data(data)
schema.validate()
except (ConversionError, ValidationError) as e:
raise InvalidInputError(details=e.messages)
def weight_validator(value):
if abs(value.as_tuple().exponent) > 1:
raise ValidationError('Only one decimal point is allowed.')
return value
|
<commit_before># -*- coding: utf-8 -*-
from schematics.exceptions import ConversionError, ValidationError
def validate(schema, data):
try:
schema.import_data(data)
schema.validate()
except (ConversionError, ValidationError) as e:
raise InvalidInputError(details=e.messages)
def weight_validator(value):
if abs(value.as_tuple().exponent) > 1:
raise ValidationError('Only one decimal point is allowed.')
return value
<commit_msg>Fix bug, InvalidInputError not defined.<commit_after>
|
# -*- coding: utf-8 -*-
from schematics.exceptions import ConversionError, ValidationError
from .errors import InvalidInputError
def validate(schema, data):
try:
schema.import_data(data)
schema.validate()
except (ConversionError, ValidationError) as e:
raise InvalidInputError(details=e.messages)
def weight_validator(value):
if abs(value.as_tuple().exponent) > 1:
raise ValidationError('Only one decimal point is allowed.')
return value
|
# -*- coding: utf-8 -*-
from schematics.exceptions import ConversionError, ValidationError
def validate(schema, data):
try:
schema.import_data(data)
schema.validate()
except (ConversionError, ValidationError) as e:
raise InvalidInputError(details=e.messages)
def weight_validator(value):
if abs(value.as_tuple().exponent) > 1:
raise ValidationError('Only one decimal point is allowed.')
return value
Fix bug, InvalidInputError not defined.# -*- coding: utf-8 -*-
from schematics.exceptions import ConversionError, ValidationError
from .errors import InvalidInputError
def validate(schema, data):
try:
schema.import_data(data)
schema.validate()
except (ConversionError, ValidationError) as e:
raise InvalidInputError(details=e.messages)
def weight_validator(value):
if abs(value.as_tuple().exponent) > 1:
raise ValidationError('Only one decimal point is allowed.')
return value
|
<commit_before># -*- coding: utf-8 -*-
from schematics.exceptions import ConversionError, ValidationError
def validate(schema, data):
try:
schema.import_data(data)
schema.validate()
except (ConversionError, ValidationError) as e:
raise InvalidInputError(details=e.messages)
def weight_validator(value):
if abs(value.as_tuple().exponent) > 1:
raise ValidationError('Only one decimal point is allowed.')
return value
<commit_msg>Fix bug, InvalidInputError not defined.<commit_after># -*- coding: utf-8 -*-
from schematics.exceptions import ConversionError, ValidationError
from .errors import InvalidInputError
def validate(schema, data):
try:
schema.import_data(data)
schema.validate()
except (ConversionError, ValidationError) as e:
raise InvalidInputError(details=e.messages)
def weight_validator(value):
if abs(value.as_tuple().exponent) > 1:
raise ValidationError('Only one decimal point is allowed.')
return value
|
a2b418c89e6ad3f85c88b7dfcc2238d62cb2e36e
|
karanja_me/polls/tests.py
|
karanja_me/polls/tests.py
|
from django.test import TestCase
# Create your tests here.
|
import datetime
from django.utils import timezone
from django.test import TestCase
from .models import Question
class QuestionMethodTest(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recenlty() should return False for questions that the
pub_date is in the future
"""
time = timezone.now() + datetime.timedelta(days = 30)
future_question = Question(pub_date = time)
self.assertEqual(future_question_was_published_recently(), False)
|
Test case for Question method added
|
Test case for Question method added
A test case to avoid future published questions read as recently added
|
Python
|
mit
|
yoda-yoda/django-dive-in,yoda-yoda/django-dive-in,denisKaranja/django-dive-in,denisKaranja/django-dive-in
|
from django.test import TestCase
# Create your tests here.
Test case for Question method added
A test case to avoid future published questions read as recently added
|
import datetime
from django.utils import timezone
from django.test import TestCase
from .models import Question
class QuestionMethodTest(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recenlty() should return False for questions that the
pub_date is in the future
"""
time = timezone.now() + datetime.timedelta(days = 30)
future_question = Question(pub_date = time)
self.assertEqual(future_question_was_published_recently(), False)
|
<commit_before>from django.test import TestCase
# Create your tests here.
<commit_msg>Test case for Question method added
A test case to avoid future published questions read as recently added<commit_after>
|
import datetime
from django.utils import timezone
from django.test import TestCase
from .models import Question
class QuestionMethodTest(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recenlty() should return False for questions that the
pub_date is in the future
"""
time = timezone.now() + datetime.timedelta(days = 30)
future_question = Question(pub_date = time)
self.assertEqual(future_question_was_published_recently(), False)
|
from django.test import TestCase
# Create your tests here.
Test case for Question method added
A test case to avoid future published questions read as recently addedimport datetime
from django.utils import timezone
from django.test import TestCase
from .models import Question
class QuestionMethodTest(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recenlty() should return False for questions that the
pub_date is in the future
"""
time = timezone.now() + datetime.timedelta(days = 30)
future_question = Question(pub_date = time)
self.assertEqual(future_question_was_published_recently(), False)
|
<commit_before>from django.test import TestCase
# Create your tests here.
<commit_msg>Test case for Question method added
A test case to avoid future published questions read as recently added<commit_after>import datetime
from django.utils import timezone
from django.test import TestCase
from .models import Question
class QuestionMethodTest(TestCase):
def test_was_published_recently_with_future_question(self):
"""
was_published_recenlty() should return False for questions that the
pub_date is in the future
"""
time = timezone.now() + datetime.timedelta(days = 30)
future_question = Question(pub_date = time)
self.assertEqual(future_question_was_published_recently(), False)
|
1a1f0a9bca7458153ef84316fd84dfbe56be08ef
|
dolo/config.py
|
dolo/config.py
|
#from __future__ import print_function
# This module is supposed to be imported first
# it contains global variables used for configuration
# try to register printing methods if IPython is running
save_plots = False
try:
import dolo.misc.printing as printing
from numpy import ndarray
from dolo.symbolic.model import Model
from dolo.numeric.decision_rules import DynareDecisionRule
ip = get_ipython()
# there could be some kind of autodecovery there
ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array )
ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model )
ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule )
from IPython.core.display import display
except:
print("failing back on pretty_print")
from pprint import pprint
def display(txt):
pprint(txt)
|
#from __future__ import print_function
# This module is supposed to be imported first
# it contains global variables used for configuration
# try to register printing methods if IPython is running
save_plots = False
try:
import dolo.misc.printing as printing
from numpy import ndarray
from dolo.symbolic.model import Model
from dolo.numeric.decision_rules import DynareDecisionRule
ip = get_ipython()
# there could be some kind of autodecovery there
ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array )
ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model )
ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule )
from IPython.core.display import display
except:
import re
import sys
if not re.search('dolo-recs',sys.argv[0]):
print("failing back on pretty_print")
from pprint import pprint
def display(txt):
pprint(txt)
|
Remove print("failing back on pretty_print") when using dolo-recs
|
Remove print("failing back on pretty_print") when using dolo-recs
|
Python
|
bsd-2-clause
|
EconForge/dolo
|
#from __future__ import print_function
# This module is supposed to be imported first
# it contains global variables used for configuration
# try to register printing methods if IPython is running
save_plots = False
try:
import dolo.misc.printing as printing
from numpy import ndarray
from dolo.symbolic.model import Model
from dolo.numeric.decision_rules import DynareDecisionRule
ip = get_ipython()
# there could be some kind of autodecovery there
ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array )
ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model )
ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule )
from IPython.core.display import display
except:
print("failing back on pretty_print")
from pprint import pprint
def display(txt):
pprint(txt)
Remove print("failing back on pretty_print") when using dolo-recs
|
#from __future__ import print_function
# This module is supposed to be imported first
# it contains global variables used for configuration
# try to register printing methods if IPython is running
save_plots = False
try:
import dolo.misc.printing as printing
from numpy import ndarray
from dolo.symbolic.model import Model
from dolo.numeric.decision_rules import DynareDecisionRule
ip = get_ipython()
# there could be some kind of autodecovery there
ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array )
ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model )
ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule )
from IPython.core.display import display
except:
import re
import sys
if not re.search('dolo-recs',sys.argv[0]):
print("failing back on pretty_print")
from pprint import pprint
def display(txt):
pprint(txt)
|
<commit_before>#from __future__ import print_function
# This module is supposed to be imported first
# it contains global variables used for configuration
# try to register printing methods if IPython is running
save_plots = False
try:
import dolo.misc.printing as printing
from numpy import ndarray
from dolo.symbolic.model import Model
from dolo.numeric.decision_rules import DynareDecisionRule
ip = get_ipython()
# there could be some kind of autodecovery there
ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array )
ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model )
ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule )
from IPython.core.display import display
except:
print("failing back on pretty_print")
from pprint import pprint
def display(txt):
pprint(txt)
<commit_msg>Remove print("failing back on pretty_print") when using dolo-recs<commit_after>
|
#from __future__ import print_function
# This module is supposed to be imported first
# it contains global variables used for configuration
# try to register printing methods if IPython is running
save_plots = False
try:
import dolo.misc.printing as printing
from numpy import ndarray
from dolo.symbolic.model import Model
from dolo.numeric.decision_rules import DynareDecisionRule
ip = get_ipython()
# there could be some kind of autodecovery there
ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array )
ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model )
ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule )
from IPython.core.display import display
except:
import re
import sys
if not re.search('dolo-recs',sys.argv[0]):
print("failing back on pretty_print")
from pprint import pprint
def display(txt):
pprint(txt)
|
#from __future__ import print_function
# This module is supposed to be imported first
# it contains global variables used for configuration
# try to register printing methods if IPython is running
save_plots = False
try:
import dolo.misc.printing as printing
from numpy import ndarray
from dolo.symbolic.model import Model
from dolo.numeric.decision_rules import DynareDecisionRule
ip = get_ipython()
# there could be some kind of autodecovery there
ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array )
ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model )
ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule )
from IPython.core.display import display
except:
print("failing back on pretty_print")
from pprint import pprint
def display(txt):
pprint(txt)
Remove print("failing back on pretty_print") when using dolo-recs#from __future__ import print_function
# This module is supposed to be imported first
# it contains global variables used for configuration
# try to register printing methods if IPython is running
save_plots = False
try:
import dolo.misc.printing as printing
from numpy import ndarray
from dolo.symbolic.model import Model
from dolo.numeric.decision_rules import DynareDecisionRule
ip = get_ipython()
# there could be some kind of autodecovery there
ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array )
ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model )
ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule )
from IPython.core.display import display
except:
import re
import sys
if not re.search('dolo-recs',sys.argv[0]):
print("failing back on pretty_print")
from pprint import pprint
def display(txt):
pprint(txt)
|
<commit_before>#from __future__ import print_function
# This module is supposed to be imported first
# it contains global variables used for configuration
# try to register printing methods if IPython is running
save_plots = False
try:
import dolo.misc.printing as printing
from numpy import ndarray
from dolo.symbolic.model import Model
from dolo.numeric.decision_rules import DynareDecisionRule
ip = get_ipython()
# there could be some kind of autodecovery there
ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array )
ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model )
ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule )
from IPython.core.display import display
except:
print("failing back on pretty_print")
from pprint import pprint
def display(txt):
pprint(txt)
<commit_msg>Remove print("failing back on pretty_print") when using dolo-recs<commit_after>#from __future__ import print_function
# This module is supposed to be imported first
# it contains global variables used for configuration
# try to register printing methods if IPython is running
save_plots = False
try:
import dolo.misc.printing as printing
from numpy import ndarray
from dolo.symbolic.model import Model
from dolo.numeric.decision_rules import DynareDecisionRule
ip = get_ipython()
# there could be some kind of autodecovery there
ip.display_formatter.formatters['text/html'].for_type( ndarray, printing.print_array )
ip.display_formatter.formatters['text/html'].for_type( Model, printing.print_model )
ip.display_formatter.formatters['text/html'].for_type( DynareDecisionRule, printing.print_dynare_decision_rule )
from IPython.core.display import display
except:
import re
import sys
if not re.search('dolo-recs',sys.argv[0]):
print("failing back on pretty_print")
from pprint import pprint
def display(txt):
pprint(txt)
|
bf69962ab7cb730c270ba31508af8af270c912a6
|
examples/generate-manager-file.py
|
examples/generate-manager-file.py
|
#!/usr/bin/python
import sys
import telepathy
from telepathy.interfaces import CONN_MGR_INTERFACE
if len(sys.argv) >= 2:
manager_name = sys.argv[1]
else:
manager_name = "haze"
service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name
object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name
object = telepathy.client.ConnectionManager(service_name, object_path)
manager = object[CONN_MGR_INTERFACE]
print "[ConnectionManager]"
print "BusName=%s" % service_name
print "ObjectPath=%s" % object_path
print
protocols = manager.ListProtocols()
protocols.sort()
for protocol in protocols:
print "[Protocol %s]" % protocol
for param in manager.GetParameters(protocol):
print "param-%s=%s" % (param[0], param[2]),
# FIXME: deal with the "register" flag
if param[1] == 1L:
print "required",
print
print
|
#!/usr/bin/python
import sys
import telepathy
from telepathy.interfaces import CONN_MGR_INTERFACE
from telepathy.constants import CONN_MGR_PARAM_FLAG_REQUIRED, \
CONN_MGR_PARAM_FLAG_REGISTER
if len(sys.argv) >= 2:
manager_name = sys.argv[1]
else:
manager_name = "haze"
service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name
object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name
object = telepathy.client.ConnectionManager(service_name, object_path)
manager = object[CONN_MGR_INTERFACE]
print "[ConnectionManager]"
print "BusName=%s" % service_name
print "ObjectPath=%s" % object_path
print
protocols = manager.ListProtocols()
protocols.sort()
for protocol in protocols:
print "[Protocol %s]" % protocol
for param in manager.GetParameters(protocol):
(name, flags, type, default) = param
print "param-%s=%s" % (name, type),
if flags & CONN_MGR_PARAM_FLAG_REQUIRED:
print "required",
if flags & CONN_MGR_PARAM_FLAG_REGISTER:
print "register",
print
print
|
Handle register flag; use CONN_MGR_PARAM_FLAG_REQUIRED not 1L
|
Handle register flag; use CONN_MGR_PARAM_FLAG_REQUIRED not 1L
20070911135601-4210b-dec39420c4af7a81bd9b6060cb81d787ebb707fc.gz
|
Python
|
lgpl-2.1
|
detrout/telepathy-python,max-posedon/telepathy-python,PabloCastellano/telepathy-python,epage/telepathy-python,PabloCastellano/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,epage/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,max-posedon/telepathy-python,detrout/telepathy-python
|
#!/usr/bin/python
import sys
import telepathy
from telepathy.interfaces import CONN_MGR_INTERFACE
if len(sys.argv) >= 2:
manager_name = sys.argv[1]
else:
manager_name = "haze"
service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name
object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name
object = telepathy.client.ConnectionManager(service_name, object_path)
manager = object[CONN_MGR_INTERFACE]
print "[ConnectionManager]"
print "BusName=%s" % service_name
print "ObjectPath=%s" % object_path
print
protocols = manager.ListProtocols()
protocols.sort()
for protocol in protocols:
print "[Protocol %s]" % protocol
for param in manager.GetParameters(protocol):
print "param-%s=%s" % (param[0], param[2]),
# FIXME: deal with the "register" flag
if param[1] == 1L:
print "required",
print
print
Handle register flag; use CONN_MGR_PARAM_FLAG_REQUIRED not 1L
20070911135601-4210b-dec39420c4af7a81bd9b6060cb81d787ebb707fc.gz
|
#!/usr/bin/python
import sys
import telepathy
from telepathy.interfaces import CONN_MGR_INTERFACE
from telepathy.constants import CONN_MGR_PARAM_FLAG_REQUIRED, \
CONN_MGR_PARAM_FLAG_REGISTER
if len(sys.argv) >= 2:
manager_name = sys.argv[1]
else:
manager_name = "haze"
service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name
object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name
object = telepathy.client.ConnectionManager(service_name, object_path)
manager = object[CONN_MGR_INTERFACE]
print "[ConnectionManager]"
print "BusName=%s" % service_name
print "ObjectPath=%s" % object_path
print
protocols = manager.ListProtocols()
protocols.sort()
for protocol in protocols:
print "[Protocol %s]" % protocol
for param in manager.GetParameters(protocol):
(name, flags, type, default) = param
print "param-%s=%s" % (name, type),
if flags & CONN_MGR_PARAM_FLAG_REQUIRED:
print "required",
if flags & CONN_MGR_PARAM_FLAG_REGISTER:
print "register",
print
print
|
<commit_before>#!/usr/bin/python
import sys
import telepathy
from telepathy.interfaces import CONN_MGR_INTERFACE
if len(sys.argv) >= 2:
manager_name = sys.argv[1]
else:
manager_name = "haze"
service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name
object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name
object = telepathy.client.ConnectionManager(service_name, object_path)
manager = object[CONN_MGR_INTERFACE]
print "[ConnectionManager]"
print "BusName=%s" % service_name
print "ObjectPath=%s" % object_path
print
protocols = manager.ListProtocols()
protocols.sort()
for protocol in protocols:
print "[Protocol %s]" % protocol
for param in manager.GetParameters(protocol):
print "param-%s=%s" % (param[0], param[2]),
# FIXME: deal with the "register" flag
if param[1] == 1L:
print "required",
print
print
<commit_msg>Handle register flag; use CONN_MGR_PARAM_FLAG_REQUIRED not 1L
20070911135601-4210b-dec39420c4af7a81bd9b6060cb81d787ebb707fc.gz<commit_after>
|
#!/usr/bin/python
import sys
import telepathy
from telepathy.interfaces import CONN_MGR_INTERFACE
from telepathy.constants import CONN_MGR_PARAM_FLAG_REQUIRED, \
CONN_MGR_PARAM_FLAG_REGISTER
if len(sys.argv) >= 2:
manager_name = sys.argv[1]
else:
manager_name = "haze"
service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name
object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name
object = telepathy.client.ConnectionManager(service_name, object_path)
manager = object[CONN_MGR_INTERFACE]
print "[ConnectionManager]"
print "BusName=%s" % service_name
print "ObjectPath=%s" % object_path
print
protocols = manager.ListProtocols()
protocols.sort()
for protocol in protocols:
print "[Protocol %s]" % protocol
for param in manager.GetParameters(protocol):
(name, flags, type, default) = param
print "param-%s=%s" % (name, type),
if flags & CONN_MGR_PARAM_FLAG_REQUIRED:
print "required",
if flags & CONN_MGR_PARAM_FLAG_REGISTER:
print "register",
print
print
|
#!/usr/bin/python
import sys
import telepathy
from telepathy.interfaces import CONN_MGR_INTERFACE
if len(sys.argv) >= 2:
manager_name = sys.argv[1]
else:
manager_name = "haze"
service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name
object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name
object = telepathy.client.ConnectionManager(service_name, object_path)
manager = object[CONN_MGR_INTERFACE]
print "[ConnectionManager]"
print "BusName=%s" % service_name
print "ObjectPath=%s" % object_path
print
protocols = manager.ListProtocols()
protocols.sort()
for protocol in protocols:
print "[Protocol %s]" % protocol
for param in manager.GetParameters(protocol):
print "param-%s=%s" % (param[0], param[2]),
# FIXME: deal with the "register" flag
if param[1] == 1L:
print "required",
print
print
Handle register flag; use CONN_MGR_PARAM_FLAG_REQUIRED not 1L
20070911135601-4210b-dec39420c4af7a81bd9b6060cb81d787ebb707fc.gz#!/usr/bin/python
import sys
import telepathy
from telepathy.interfaces import CONN_MGR_INTERFACE
from telepathy.constants import CONN_MGR_PARAM_FLAG_REQUIRED, \
CONN_MGR_PARAM_FLAG_REGISTER
if len(sys.argv) >= 2:
manager_name = sys.argv[1]
else:
manager_name = "haze"
service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name
object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name
object = telepathy.client.ConnectionManager(service_name, object_path)
manager = object[CONN_MGR_INTERFACE]
print "[ConnectionManager]"
print "BusName=%s" % service_name
print "ObjectPath=%s" % object_path
print
protocols = manager.ListProtocols()
protocols.sort()
for protocol in protocols:
print "[Protocol %s]" % protocol
for param in manager.GetParameters(protocol):
(name, flags, type, default) = param
print "param-%s=%s" % (name, type),
if flags & CONN_MGR_PARAM_FLAG_REQUIRED:
print "required",
if flags & CONN_MGR_PARAM_FLAG_REGISTER:
print "register",
print
print
|
<commit_before>#!/usr/bin/python
import sys
import telepathy
from telepathy.interfaces import CONN_MGR_INTERFACE
if len(sys.argv) >= 2:
manager_name = sys.argv[1]
else:
manager_name = "haze"
service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name
object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name
object = telepathy.client.ConnectionManager(service_name, object_path)
manager = object[CONN_MGR_INTERFACE]
print "[ConnectionManager]"
print "BusName=%s" % service_name
print "ObjectPath=%s" % object_path
print
protocols = manager.ListProtocols()
protocols.sort()
for protocol in protocols:
print "[Protocol %s]" % protocol
for param in manager.GetParameters(protocol):
print "param-%s=%s" % (param[0], param[2]),
# FIXME: deal with the "register" flag
if param[1] == 1L:
print "required",
print
print
<commit_msg>Handle register flag; use CONN_MGR_PARAM_FLAG_REQUIRED not 1L
20070911135601-4210b-dec39420c4af7a81bd9b6060cb81d787ebb707fc.gz<commit_after>#!/usr/bin/python
import sys
import telepathy
from telepathy.interfaces import CONN_MGR_INTERFACE
from telepathy.constants import CONN_MGR_PARAM_FLAG_REQUIRED, \
CONN_MGR_PARAM_FLAG_REGISTER
if len(sys.argv) >= 2:
manager_name = sys.argv[1]
else:
manager_name = "haze"
service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name
object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name
object = telepathy.client.ConnectionManager(service_name, object_path)
manager = object[CONN_MGR_INTERFACE]
print "[ConnectionManager]"
print "BusName=%s" % service_name
print "ObjectPath=%s" % object_path
print
protocols = manager.ListProtocols()
protocols.sort()
for protocol in protocols:
print "[Protocol %s]" % protocol
for param in manager.GetParameters(protocol):
(name, flags, type, default) = param
print "param-%s=%s" % (name, type),
if flags & CONN_MGR_PARAM_FLAG_REQUIRED:
print "required",
if flags & CONN_MGR_PARAM_FLAG_REGISTER:
print "register",
print
print
|
5af9796dc0fcc425efd6b0283f2e4a79dec31d5f
|
server/liveblog/blogs/blogs_test.py
|
server/liveblog/blogs/blogs_test.py
|
import unittest
from liveblog.blogs.blogs import BlogService
from superdesk.errors import SuperdeskApiError
class BlogsTestCase(unittest.TestCase):
def setUp(self):
pass
def test_if_check_max_active(self):
increment = 10
"""so if check "subscription in SUBSCRIPTION_MAX_ACTIVE_BLOGS" pass in method \
_check_max_active it will create exception"""
error_reach_maximum = False
try:
BlogService()._check_max_active(increment)
except:
error_reach_maximum = True
self.assertEqual(error_reach_maximum, True)
|
from liveblog.blogs import init_app
from superdesk.tests import TestCase
from superdesk import get_resource_service
from superdesk.errors import SuperdeskApiError
class BlogsTestCase(TestCase):
def setUp(self):
# from nose.tools import set_trace; set_trace()
init_app(self.app)
def test_if_not_check_max_active(self):
increment = 0
self.assertEqual(get_resource_service('blogs')._check_max_active(increment), None)
def test_if_check_max_active(self):
increment = 5
with self.assertRaises(SuperdeskApiError):
get_resource_service('blogs')._check_max_active(increment)
|
Test case: _check_max_active method to check both scenarios
|
Test case: _check_max_active method to check both scenarios
|
Python
|
agpl-3.0
|
liveblog/liveblog,superdesk/liveblog,hlmnrmr/liveblog,superdesk/liveblog,superdesk/liveblog,liveblog/liveblog,hlmnrmr/liveblog,liveblog/liveblog,liveblog/liveblog,liveblog/liveblog,hlmnrmr/liveblog,hlmnrmr/liveblog,superdesk/liveblog
|
import unittest
from liveblog.blogs.blogs import BlogService
from superdesk.errors import SuperdeskApiError
class BlogsTestCase(unittest.TestCase):
def setUp(self):
pass
def test_if_check_max_active(self):
increment = 10
"""so if check "subscription in SUBSCRIPTION_MAX_ACTIVE_BLOGS" pass in method \
_check_max_active it will create exception"""
error_reach_maximum = False
try:
BlogService()._check_max_active(increment)
except:
error_reach_maximum = True
self.assertEqual(error_reach_maximum, True)
Test case: _check_max_active method to check both scenarios
|
from liveblog.blogs import init_app
from superdesk.tests import TestCase
from superdesk import get_resource_service
from superdesk.errors import SuperdeskApiError
class BlogsTestCase(TestCase):
def setUp(self):
# from nose.tools import set_trace; set_trace()
init_app(self.app)
def test_if_not_check_max_active(self):
increment = 0
self.assertEqual(get_resource_service('blogs')._check_max_active(increment), None)
def test_if_check_max_active(self):
increment = 5
with self.assertRaises(SuperdeskApiError):
get_resource_service('blogs')._check_max_active(increment)
|
<commit_before>import unittest
from liveblog.blogs.blogs import BlogService
from superdesk.errors import SuperdeskApiError
class BlogsTestCase(unittest.TestCase):
def setUp(self):
pass
def test_if_check_max_active(self):
increment = 10
"""so if check "subscription in SUBSCRIPTION_MAX_ACTIVE_BLOGS" pass in method \
_check_max_active it will create exception"""
error_reach_maximum = False
try:
BlogService()._check_max_active(increment)
except:
error_reach_maximum = True
self.assertEqual(error_reach_maximum, True)
<commit_msg>Test case: _check_max_active method to check both scenarios<commit_after>
|
from liveblog.blogs import init_app
from superdesk.tests import TestCase
from superdesk import get_resource_service
from superdesk.errors import SuperdeskApiError
class BlogsTestCase(TestCase):
def setUp(self):
# from nose.tools import set_trace; set_trace()
init_app(self.app)
def test_if_not_check_max_active(self):
increment = 0
self.assertEqual(get_resource_service('blogs')._check_max_active(increment), None)
def test_if_check_max_active(self):
increment = 5
with self.assertRaises(SuperdeskApiError):
get_resource_service('blogs')._check_max_active(increment)
|
import unittest
from liveblog.blogs.blogs import BlogService
from superdesk.errors import SuperdeskApiError
class BlogsTestCase(unittest.TestCase):
def setUp(self):
pass
def test_if_check_max_active(self):
increment = 10
"""so if check "subscription in SUBSCRIPTION_MAX_ACTIVE_BLOGS" pass in method \
_check_max_active it will create exception"""
error_reach_maximum = False
try:
BlogService()._check_max_active(increment)
except:
error_reach_maximum = True
self.assertEqual(error_reach_maximum, True)
Test case: _check_max_active method to check both scenariosfrom liveblog.blogs import init_app
from superdesk.tests import TestCase
from superdesk import get_resource_service
from superdesk.errors import SuperdeskApiError
class BlogsTestCase(TestCase):
def setUp(self):
# from nose.tools import set_trace; set_trace()
init_app(self.app)
def test_if_not_check_max_active(self):
increment = 0
self.assertEqual(get_resource_service('blogs')._check_max_active(increment), None)
def test_if_check_max_active(self):
increment = 5
with self.assertRaises(SuperdeskApiError):
get_resource_service('blogs')._check_max_active(increment)
|
<commit_before>import unittest
from liveblog.blogs.blogs import BlogService
from superdesk.errors import SuperdeskApiError
class BlogsTestCase(unittest.TestCase):
def setUp(self):
pass
def test_if_check_max_active(self):
increment = 10
"""so if check "subscription in SUBSCRIPTION_MAX_ACTIVE_BLOGS" pass in method \
_check_max_active it will create exception"""
error_reach_maximum = False
try:
BlogService()._check_max_active(increment)
except:
error_reach_maximum = True
self.assertEqual(error_reach_maximum, True)
<commit_msg>Test case: _check_max_active method to check both scenarios<commit_after>from liveblog.blogs import init_app
from superdesk.tests import TestCase
from superdesk import get_resource_service
from superdesk.errors import SuperdeskApiError
class BlogsTestCase(TestCase):
def setUp(self):
# from nose.tools import set_trace; set_trace()
init_app(self.app)
def test_if_not_check_max_active(self):
increment = 0
self.assertEqual(get_resource_service('blogs')._check_max_active(increment), None)
def test_if_check_max_active(self):
increment = 5
with self.assertRaises(SuperdeskApiError):
get_resource_service('blogs')._check_max_active(increment)
|
eee7ee47f0a6e2be31c74f9967fa1b2f1a8b3b01
|
experiments/example-fsrcnn/run.py
|
experiments/example-fsrcnn/run.py
|
"""Example experiment."""
from functools import partial
from toolbox.data import load_set
from toolbox.models import compile
from toolbox.models import fsrcnn
from toolbox.experiment import FSRCNNExperiment
# Model
scale = 3
model = compile(fsrcnn(c=1, d=56, s=12, m=4, k=3))
model.summary()
# Data
train_set = '91-image'
val_set = 'Set5'
test_sets = ['Set5', 'Set14']
load_set = partial(load_set, sub_size=20, sub_stride=100, scale=scale)
# Training
experiment = FSRCNNExperiment(scale=scale, model=model, load_set=load_set,
save_dir='.')
experiment.train(train_set=train_set, val_set=val_set, epochs=2, resume=True)
# Evaluation
for test_set in test_sets:
experiment.test(test_set=test_set)
|
"""Example experiment."""
from functools import partial
from toolbox.data import load_set
from toolbox.models import compile
from toolbox.models import fsrcnn
from toolbox.experiment import FSRCNNExperiment
# Model
scale = 3
model = compile(fsrcnn(c=1, d=56, s=12, m=4, k=scale))
model.summary()
# Data
train_set = '91-image'
val_set = 'Set5'
test_sets = ['Set5', 'Set14']
load_set = partial(load_set, sub_size=20, sub_stride=100, scale=scale)
# Training
experiment = FSRCNNExperiment(scale=scale, model=model, load_set=load_set,
save_dir='.')
experiment.train(train_set=train_set, val_set=val_set, epochs=2, resume=True)
# Evaluation
for test_set in test_sets:
experiment.test(test_set=test_set)
|
Set the stride to scale
|
Set the stride to scale
|
Python
|
mit
|
qobilidop/srcnn,qobilidop/srcnn
|
"""Example experiment."""
from functools import partial
from toolbox.data import load_set
from toolbox.models import compile
from toolbox.models import fsrcnn
from toolbox.experiment import FSRCNNExperiment
# Model
scale = 3
model = compile(fsrcnn(c=1, d=56, s=12, m=4, k=3))
model.summary()
# Data
train_set = '91-image'
val_set = 'Set5'
test_sets = ['Set5', 'Set14']
load_set = partial(load_set, sub_size=20, sub_stride=100, scale=scale)
# Training
experiment = FSRCNNExperiment(scale=scale, model=model, load_set=load_set,
save_dir='.')
experiment.train(train_set=train_set, val_set=val_set, epochs=2, resume=True)
# Evaluation
for test_set in test_sets:
experiment.test(test_set=test_set)
Set the stride to scale
|
"""Example experiment."""
from functools import partial
from toolbox.data import load_set
from toolbox.models import compile
from toolbox.models import fsrcnn
from toolbox.experiment import FSRCNNExperiment
# Model
scale = 3
model = compile(fsrcnn(c=1, d=56, s=12, m=4, k=scale))
model.summary()
# Data
train_set = '91-image'
val_set = 'Set5'
test_sets = ['Set5', 'Set14']
load_set = partial(load_set, sub_size=20, sub_stride=100, scale=scale)
# Training
experiment = FSRCNNExperiment(scale=scale, model=model, load_set=load_set,
save_dir='.')
experiment.train(train_set=train_set, val_set=val_set, epochs=2, resume=True)
# Evaluation
for test_set in test_sets:
experiment.test(test_set=test_set)
|
<commit_before>"""Example experiment."""
from functools import partial
from toolbox.data import load_set
from toolbox.models import compile
from toolbox.models import fsrcnn
from toolbox.experiment import FSRCNNExperiment
# Model
scale = 3
model = compile(fsrcnn(c=1, d=56, s=12, m=4, k=3))
model.summary()
# Data
train_set = '91-image'
val_set = 'Set5'
test_sets = ['Set5', 'Set14']
load_set = partial(load_set, sub_size=20, sub_stride=100, scale=scale)
# Training
experiment = FSRCNNExperiment(scale=scale, model=model, load_set=load_set,
save_dir='.')
experiment.train(train_set=train_set, val_set=val_set, epochs=2, resume=True)
# Evaluation
for test_set in test_sets:
experiment.test(test_set=test_set)
<commit_msg>Set the stride to scale<commit_after>
|
"""Example experiment."""
from functools import partial
from toolbox.data import load_set
from toolbox.models import compile
from toolbox.models import fsrcnn
from toolbox.experiment import FSRCNNExperiment
# Model
scale = 3
model = compile(fsrcnn(c=1, d=56, s=12, m=4, k=scale))
model.summary()
# Data
train_set = '91-image'
val_set = 'Set5'
test_sets = ['Set5', 'Set14']
load_set = partial(load_set, sub_size=20, sub_stride=100, scale=scale)
# Training
experiment = FSRCNNExperiment(scale=scale, model=model, load_set=load_set,
save_dir='.')
experiment.train(train_set=train_set, val_set=val_set, epochs=2, resume=True)
# Evaluation
for test_set in test_sets:
experiment.test(test_set=test_set)
|
"""Example experiment."""
from functools import partial
from toolbox.data import load_set
from toolbox.models import compile
from toolbox.models import fsrcnn
from toolbox.experiment import FSRCNNExperiment
# Model
scale = 3
model = compile(fsrcnn(c=1, d=56, s=12, m=4, k=3))
model.summary()
# Data
train_set = '91-image'
val_set = 'Set5'
test_sets = ['Set5', 'Set14']
load_set = partial(load_set, sub_size=20, sub_stride=100, scale=scale)
# Training
experiment = FSRCNNExperiment(scale=scale, model=model, load_set=load_set,
save_dir='.')
experiment.train(train_set=train_set, val_set=val_set, epochs=2, resume=True)
# Evaluation
for test_set in test_sets:
experiment.test(test_set=test_set)
Set the stride to scale"""Example experiment."""
from functools import partial
from toolbox.data import load_set
from toolbox.models import compile
from toolbox.models import fsrcnn
from toolbox.experiment import FSRCNNExperiment
# Model
scale = 3
model = compile(fsrcnn(c=1, d=56, s=12, m=4, k=scale))
model.summary()
# Data
train_set = '91-image'
val_set = 'Set5'
test_sets = ['Set5', 'Set14']
load_set = partial(load_set, sub_size=20, sub_stride=100, scale=scale)
# Training
experiment = FSRCNNExperiment(scale=scale, model=model, load_set=load_set,
save_dir='.')
experiment.train(train_set=train_set, val_set=val_set, epochs=2, resume=True)
# Evaluation
for test_set in test_sets:
experiment.test(test_set=test_set)
|
<commit_before>"""Example experiment."""
from functools import partial
from toolbox.data import load_set
from toolbox.models import compile
from toolbox.models import fsrcnn
from toolbox.experiment import FSRCNNExperiment
# Model
scale = 3
model = compile(fsrcnn(c=1, d=56, s=12, m=4, k=3))
model.summary()
# Data
train_set = '91-image'
val_set = 'Set5'
test_sets = ['Set5', 'Set14']
load_set = partial(load_set, sub_size=20, sub_stride=100, scale=scale)
# Training
experiment = FSRCNNExperiment(scale=scale, model=model, load_set=load_set,
save_dir='.')
experiment.train(train_set=train_set, val_set=val_set, epochs=2, resume=True)
# Evaluation
for test_set in test_sets:
experiment.test(test_set=test_set)
<commit_msg>Set the stride to scale<commit_after>"""Example experiment."""
from functools import partial
from toolbox.data import load_set
from toolbox.models import compile
from toolbox.models import fsrcnn
from toolbox.experiment import FSRCNNExperiment
# Model
scale = 3
model = compile(fsrcnn(c=1, d=56, s=12, m=4, k=scale))
model.summary()
# Data
train_set = '91-image'
val_set = 'Set5'
test_sets = ['Set5', 'Set14']
load_set = partial(load_set, sub_size=20, sub_stride=100, scale=scale)
# Training
experiment = FSRCNNExperiment(scale=scale, model=model, load_set=load_set,
save_dir='.')
experiment.train(train_set=train_set, val_set=val_set, epochs=2, resume=True)
# Evaluation
for test_set in test_sets:
experiment.test(test_set=test_set)
|
fd7f413925491f305a30a73f0c6eb6306a9ebf19
|
tests/test_member_access.py
|
tests/test_member_access.py
|
import pytest # type: ignore
from ppb_vector import Vector
@pytest.fixture()
def vector():
return Vector(10, 20)
def test_class_member_access(vector):
assert vector.x == 10
assert vector.y == 20
def test_index_access(vector):
assert vector[0] == 10
assert vector[1] == 20
def test_key_access(vector):
assert vector["x"] == 10
assert vector["y"] == 20
|
from hypothesis import given
import pytest # type: ignore
from ppb_vector import Vector
from utils import vectors
@pytest.fixture()
def vector():
return Vector(10, 20)
def test_class_member_access(vector):
assert vector.x == 10
assert vector.y == 20
@given(v=vectors())
def test_index_access(v: Vector):
assert v[0] == v.x
assert v[1] == v.y
@given(v=vectors())
def test_key_access(v: Vector):
assert v["x"] == v.x
assert v["y"] == v.y
|
Make {index.key}_access into Hypothesis tests
|
tests/member_access: Make {index.key}_access into Hypothesis tests
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
import pytest # type: ignore
from ppb_vector import Vector
@pytest.fixture()
def vector():
return Vector(10, 20)
def test_class_member_access(vector):
assert vector.x == 10
assert vector.y == 20
def test_index_access(vector):
assert vector[0] == 10
assert vector[1] == 20
def test_key_access(vector):
assert vector["x"] == 10
assert vector["y"] == 20
tests/member_access: Make {index.key}_access into Hypothesis tests
|
from hypothesis import given
import pytest # type: ignore
from ppb_vector import Vector
from utils import vectors
@pytest.fixture()
def vector():
return Vector(10, 20)
def test_class_member_access(vector):
assert vector.x == 10
assert vector.y == 20
@given(v=vectors())
def test_index_access(v: Vector):
assert v[0] == v.x
assert v[1] == v.y
@given(v=vectors())
def test_key_access(v: Vector):
assert v["x"] == v.x
assert v["y"] == v.y
|
<commit_before>import pytest # type: ignore
from ppb_vector import Vector
@pytest.fixture()
def vector():
return Vector(10, 20)
def test_class_member_access(vector):
assert vector.x == 10
assert vector.y == 20
def test_index_access(vector):
assert vector[0] == 10
assert vector[1] == 20
def test_key_access(vector):
assert vector["x"] == 10
assert vector["y"] == 20
<commit_msg>tests/member_access: Make {index.key}_access into Hypothesis tests<commit_after>
|
from hypothesis import given
import pytest # type: ignore
from ppb_vector import Vector
from utils import vectors
@pytest.fixture()
def vector():
return Vector(10, 20)
def test_class_member_access(vector):
assert vector.x == 10
assert vector.y == 20
@given(v=vectors())
def test_index_access(v: Vector):
assert v[0] == v.x
assert v[1] == v.y
@given(v=vectors())
def test_key_access(v: Vector):
assert v["x"] == v.x
assert v["y"] == v.y
|
import pytest # type: ignore
from ppb_vector import Vector
@pytest.fixture()
def vector():
return Vector(10, 20)
def test_class_member_access(vector):
assert vector.x == 10
assert vector.y == 20
def test_index_access(vector):
assert vector[0] == 10
assert vector[1] == 20
def test_key_access(vector):
assert vector["x"] == 10
assert vector["y"] == 20
tests/member_access: Make {index.key}_access into Hypothesis testsfrom hypothesis import given
import pytest # type: ignore
from ppb_vector import Vector
from utils import vectors
@pytest.fixture()
def vector():
return Vector(10, 20)
def test_class_member_access(vector):
assert vector.x == 10
assert vector.y == 20
@given(v=vectors())
def test_index_access(v: Vector):
assert v[0] == v.x
assert v[1] == v.y
@given(v=vectors())
def test_key_access(v: Vector):
assert v["x"] == v.x
assert v["y"] == v.y
|
<commit_before>import pytest # type: ignore
from ppb_vector import Vector
@pytest.fixture()
def vector():
return Vector(10, 20)
def test_class_member_access(vector):
assert vector.x == 10
assert vector.y == 20
def test_index_access(vector):
assert vector[0] == 10
assert vector[1] == 20
def test_key_access(vector):
assert vector["x"] == 10
assert vector["y"] == 20
<commit_msg>tests/member_access: Make {index.key}_access into Hypothesis tests<commit_after>from hypothesis import given
import pytest # type: ignore
from ppb_vector import Vector
from utils import vectors
@pytest.fixture()
def vector():
return Vector(10, 20)
def test_class_member_access(vector):
assert vector.x == 10
assert vector.y == 20
@given(v=vectors())
def test_index_access(v: Vector):
assert v[0] == v.x
assert v[1] == v.y
@given(v=vectors())
def test_key_access(v: Vector):
assert v["x"] == v.x
assert v["y"] == v.y
|
7f99ba5d06d646eef03bd3848fae579d0f51e2f6
|
alembic/testing/__init__.py
|
alembic/testing/__init__.py
|
from sqlalchemy.testing import config
from sqlalchemy.testing import emits_warning
from sqlalchemy.testing import engines
from sqlalchemy.testing import exclusions
from sqlalchemy.testing import mock
from sqlalchemy.testing import provide_metadata
from sqlalchemy.testing import skip_if
from sqlalchemy.testing import uses_deprecated
from sqlalchemy.testing.config import combinations
from sqlalchemy.testing.config import fixture
from sqlalchemy.testing.config import requirements as requires
from .assertions import assert_raises
from .assertions import assert_raises_message
from .assertions import emits_python_deprecation_warning
from .assertions import eq_
from .assertions import eq_ignore_whitespace
from .assertions import expect_raises
from .assertions import expect_raises_message
from .assertions import expect_sqlalchemy_deprecated
from .assertions import expect_sqlalchemy_deprecated_20
from .assertions import expect_warnings
from .assertions import is_
from .assertions import is_false
from .assertions import is_not_
from .assertions import is_true
from .assertions import ne_
from .fixtures import TestBase
from .util import resolve_lambda
try:
from sqlalchemy.testing import asyncio
except ImportError:
pass
else:
asyncio.ENABLE_ASYNCIO = False
|
from sqlalchemy.testing import config
from sqlalchemy.testing import emits_warning
from sqlalchemy.testing import engines
from sqlalchemy.testing import exclusions
from sqlalchemy.testing import mock
from sqlalchemy.testing import provide_metadata
from sqlalchemy.testing import skip_if
from sqlalchemy.testing import uses_deprecated
from sqlalchemy.testing.config import combinations
from sqlalchemy.testing.config import fixture
from sqlalchemy.testing.config import requirements as requires
from .assertions import assert_raises
from .assertions import assert_raises_message
from .assertions import emits_python_deprecation_warning
from .assertions import eq_
from .assertions import eq_ignore_whitespace
from .assertions import expect_raises
from .assertions import expect_raises_message
from .assertions import expect_sqlalchemy_deprecated
from .assertions import expect_sqlalchemy_deprecated_20
from .assertions import expect_warnings
from .assertions import is_
from .assertions import is_false
from .assertions import is_not_
from .assertions import is_true
from .assertions import ne_
from .fixtures import TestBase
from .util import resolve_lambda
|
Remove code to force ENABLE_ASYNCIO to False
|
Remove code to force ENABLE_ASYNCIO to False
Forcing ENABLE_ASYNCIO to False was interfering
with testing under async drivers when the
(third-party dialect) test suite included both
SQLAlchemy and Alembic tests.
Change-Id: I2fe40049c24ba8eba0a10011849a912c03aa381e
|
Python
|
mit
|
zzzeek/alembic,sqlalchemy/alembic
|
from sqlalchemy.testing import config
from sqlalchemy.testing import emits_warning
from sqlalchemy.testing import engines
from sqlalchemy.testing import exclusions
from sqlalchemy.testing import mock
from sqlalchemy.testing import provide_metadata
from sqlalchemy.testing import skip_if
from sqlalchemy.testing import uses_deprecated
from sqlalchemy.testing.config import combinations
from sqlalchemy.testing.config import fixture
from sqlalchemy.testing.config import requirements as requires
from .assertions import assert_raises
from .assertions import assert_raises_message
from .assertions import emits_python_deprecation_warning
from .assertions import eq_
from .assertions import eq_ignore_whitespace
from .assertions import expect_raises
from .assertions import expect_raises_message
from .assertions import expect_sqlalchemy_deprecated
from .assertions import expect_sqlalchemy_deprecated_20
from .assertions import expect_warnings
from .assertions import is_
from .assertions import is_false
from .assertions import is_not_
from .assertions import is_true
from .assertions import ne_
from .fixtures import TestBase
from .util import resolve_lambda
try:
from sqlalchemy.testing import asyncio
except ImportError:
pass
else:
asyncio.ENABLE_ASYNCIO = False
Remove code to force ENABLE_ASYNCIO to False
Forcing ENABLE_ASYNCIO to False was interfering
with testing under async drivers when the
(third-party dialect) test suite included both
SQLAlchemy and Alembic tests.
Change-Id: I2fe40049c24ba8eba0a10011849a912c03aa381e
|
from sqlalchemy.testing import config
from sqlalchemy.testing import emits_warning
from sqlalchemy.testing import engines
from sqlalchemy.testing import exclusions
from sqlalchemy.testing import mock
from sqlalchemy.testing import provide_metadata
from sqlalchemy.testing import skip_if
from sqlalchemy.testing import uses_deprecated
from sqlalchemy.testing.config import combinations
from sqlalchemy.testing.config import fixture
from sqlalchemy.testing.config import requirements as requires
from .assertions import assert_raises
from .assertions import assert_raises_message
from .assertions import emits_python_deprecation_warning
from .assertions import eq_
from .assertions import eq_ignore_whitespace
from .assertions import expect_raises
from .assertions import expect_raises_message
from .assertions import expect_sqlalchemy_deprecated
from .assertions import expect_sqlalchemy_deprecated_20
from .assertions import expect_warnings
from .assertions import is_
from .assertions import is_false
from .assertions import is_not_
from .assertions import is_true
from .assertions import ne_
from .fixtures import TestBase
from .util import resolve_lambda
|
<commit_before>from sqlalchemy.testing import config
from sqlalchemy.testing import emits_warning
from sqlalchemy.testing import engines
from sqlalchemy.testing import exclusions
from sqlalchemy.testing import mock
from sqlalchemy.testing import provide_metadata
from sqlalchemy.testing import skip_if
from sqlalchemy.testing import uses_deprecated
from sqlalchemy.testing.config import combinations
from sqlalchemy.testing.config import fixture
from sqlalchemy.testing.config import requirements as requires
from .assertions import assert_raises
from .assertions import assert_raises_message
from .assertions import emits_python_deprecation_warning
from .assertions import eq_
from .assertions import eq_ignore_whitespace
from .assertions import expect_raises
from .assertions import expect_raises_message
from .assertions import expect_sqlalchemy_deprecated
from .assertions import expect_sqlalchemy_deprecated_20
from .assertions import expect_warnings
from .assertions import is_
from .assertions import is_false
from .assertions import is_not_
from .assertions import is_true
from .assertions import ne_
from .fixtures import TestBase
from .util import resolve_lambda
try:
from sqlalchemy.testing import asyncio
except ImportError:
pass
else:
asyncio.ENABLE_ASYNCIO = False
<commit_msg>Remove code to force ENABLE_ASYNCIO to False
Forcing ENABLE_ASYNCIO to False was interfering
with testing under async drivers when the
(third-party dialect) test suite included both
SQLAlchemy and Alembic tests.
Change-Id: I2fe40049c24ba8eba0a10011849a912c03aa381e<commit_after>
|
from sqlalchemy.testing import config
from sqlalchemy.testing import emits_warning
from sqlalchemy.testing import engines
from sqlalchemy.testing import exclusions
from sqlalchemy.testing import mock
from sqlalchemy.testing import provide_metadata
from sqlalchemy.testing import skip_if
from sqlalchemy.testing import uses_deprecated
from sqlalchemy.testing.config import combinations
from sqlalchemy.testing.config import fixture
from sqlalchemy.testing.config import requirements as requires
from .assertions import assert_raises
from .assertions import assert_raises_message
from .assertions import emits_python_deprecation_warning
from .assertions import eq_
from .assertions import eq_ignore_whitespace
from .assertions import expect_raises
from .assertions import expect_raises_message
from .assertions import expect_sqlalchemy_deprecated
from .assertions import expect_sqlalchemy_deprecated_20
from .assertions import expect_warnings
from .assertions import is_
from .assertions import is_false
from .assertions import is_not_
from .assertions import is_true
from .assertions import ne_
from .fixtures import TestBase
from .util import resolve_lambda
|
from sqlalchemy.testing import config
from sqlalchemy.testing import emits_warning
from sqlalchemy.testing import engines
from sqlalchemy.testing import exclusions
from sqlalchemy.testing import mock
from sqlalchemy.testing import provide_metadata
from sqlalchemy.testing import skip_if
from sqlalchemy.testing import uses_deprecated
from sqlalchemy.testing.config import combinations
from sqlalchemy.testing.config import fixture
from sqlalchemy.testing.config import requirements as requires
from .assertions import assert_raises
from .assertions import assert_raises_message
from .assertions import emits_python_deprecation_warning
from .assertions import eq_
from .assertions import eq_ignore_whitespace
from .assertions import expect_raises
from .assertions import expect_raises_message
from .assertions import expect_sqlalchemy_deprecated
from .assertions import expect_sqlalchemy_deprecated_20
from .assertions import expect_warnings
from .assertions import is_
from .assertions import is_false
from .assertions import is_not_
from .assertions import is_true
from .assertions import ne_
from .fixtures import TestBase
from .util import resolve_lambda
try:
from sqlalchemy.testing import asyncio
except ImportError:
pass
else:
asyncio.ENABLE_ASYNCIO = False
Remove code to force ENABLE_ASYNCIO to False
Forcing ENABLE_ASYNCIO to False was interfering
with testing under async drivers when the
(third-party dialect) test suite included both
SQLAlchemy and Alembic tests.
Change-Id: I2fe40049c24ba8eba0a10011849a912c03aa381efrom sqlalchemy.testing import config
from sqlalchemy.testing import emits_warning
from sqlalchemy.testing import engines
from sqlalchemy.testing import exclusions
from sqlalchemy.testing import mock
from sqlalchemy.testing import provide_metadata
from sqlalchemy.testing import skip_if
from sqlalchemy.testing import uses_deprecated
from sqlalchemy.testing.config import combinations
from sqlalchemy.testing.config import fixture
from sqlalchemy.testing.config import requirements as requires
from .assertions import assert_raises
from .assertions import assert_raises_message
from .assertions import emits_python_deprecation_warning
from .assertions import eq_
from .assertions import eq_ignore_whitespace
from .assertions import expect_raises
from .assertions import expect_raises_message
from .assertions import expect_sqlalchemy_deprecated
from .assertions import expect_sqlalchemy_deprecated_20
from .assertions import expect_warnings
from .assertions import is_
from .assertions import is_false
from .assertions import is_not_
from .assertions import is_true
from .assertions import ne_
from .fixtures import TestBase
from .util import resolve_lambda
|
<commit_before>from sqlalchemy.testing import config
from sqlalchemy.testing import emits_warning
from sqlalchemy.testing import engines
from sqlalchemy.testing import exclusions
from sqlalchemy.testing import mock
from sqlalchemy.testing import provide_metadata
from sqlalchemy.testing import skip_if
from sqlalchemy.testing import uses_deprecated
from sqlalchemy.testing.config import combinations
from sqlalchemy.testing.config import fixture
from sqlalchemy.testing.config import requirements as requires
from .assertions import assert_raises
from .assertions import assert_raises_message
from .assertions import emits_python_deprecation_warning
from .assertions import eq_
from .assertions import eq_ignore_whitespace
from .assertions import expect_raises
from .assertions import expect_raises_message
from .assertions import expect_sqlalchemy_deprecated
from .assertions import expect_sqlalchemy_deprecated_20
from .assertions import expect_warnings
from .assertions import is_
from .assertions import is_false
from .assertions import is_not_
from .assertions import is_true
from .assertions import ne_
from .fixtures import TestBase
from .util import resolve_lambda
try:
from sqlalchemy.testing import asyncio
except ImportError:
pass
else:
asyncio.ENABLE_ASYNCIO = False
<commit_msg>Remove code to force ENABLE_ASYNCIO to False
Forcing ENABLE_ASYNCIO to False was interfering
with testing under async drivers when the
(third-party dialect) test suite included both
SQLAlchemy and Alembic tests.
Change-Id: I2fe40049c24ba8eba0a10011849a912c03aa381e<commit_after>from sqlalchemy.testing import config
from sqlalchemy.testing import emits_warning
from sqlalchemy.testing import engines
from sqlalchemy.testing import exclusions
from sqlalchemy.testing import mock
from sqlalchemy.testing import provide_metadata
from sqlalchemy.testing import skip_if
from sqlalchemy.testing import uses_deprecated
from sqlalchemy.testing.config import combinations
from sqlalchemy.testing.config import fixture
from sqlalchemy.testing.config import requirements as requires
from .assertions import assert_raises
from .assertions import assert_raises_message
from .assertions import emits_python_deprecation_warning
from .assertions import eq_
from .assertions import eq_ignore_whitespace
from .assertions import expect_raises
from .assertions import expect_raises_message
from .assertions import expect_sqlalchemy_deprecated
from .assertions import expect_sqlalchemy_deprecated_20
from .assertions import expect_warnings
from .assertions import is_
from .assertions import is_false
from .assertions import is_not_
from .assertions import is_true
from .assertions import ne_
from .fixtures import TestBase
from .util import resolve_lambda
|
9ed6833c88e2718e54cb25b6c1837ff4868c81c9
|
emote/emote.py
|
emote/emote.py
|
""" A simple CLI tool for quickly copying common emoticon/emoji to your
clipboard. """
import pyperclip
import json
import sys
import argparse
with open("mapping.json") as f:
emotes = json.load(f)
def main():
parser = argparse.ArgumentParser(
description=sys.modules[__name__].__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p','--web_port')
args = parser.parse_args()
if __name__ == "__main__":
main()
|
""" A simple CLI tool for quickly copying common emoticon/emoji to your
clipboard. """
import pyperclip
import json
import sys
import argparse
with open("mapping.json") as f:
emotes = json.load(f)
def main():
parser = argparse.ArgumentParser(
description=sys.modules[__name__].__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p','--web_port')
args = parser.parse_args()
# Print help if no cli args are specified.
if len(sys.argv) < 2:
parser.print_help()
if __name__ == "__main__":
main()
|
Add logic for displaying help if no args are specified.
|
Add logic for displaying help if no args are specified.
|
Python
|
mit
|
d6e/emotion
|
""" A simple CLI tool for quickly copying common emoticon/emoji to your
clipboard. """
import pyperclip
import json
import sys
import argparse
with open("mapping.json") as f:
emotes = json.load(f)
def main():
parser = argparse.ArgumentParser(
description=sys.modules[__name__].__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p','--web_port')
args = parser.parse_args()
if __name__ == "__main__":
main()
Add logic for displaying help if no args are specified.
|
""" A simple CLI tool for quickly copying common emoticon/emoji to your
clipboard. """
import pyperclip
import json
import sys
import argparse
with open("mapping.json") as f:
emotes = json.load(f)
def main():
parser = argparse.ArgumentParser(
description=sys.modules[__name__].__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p','--web_port')
args = parser.parse_args()
# Print help if no cli args are specified.
if len(sys.argv) < 2:
parser.print_help()
if __name__ == "__main__":
main()
|
<commit_before>""" A simple CLI tool for quickly copying common emoticon/emoji to your
clipboard. """
import pyperclip
import json
import sys
import argparse
with open("mapping.json") as f:
emotes = json.load(f)
def main():
parser = argparse.ArgumentParser(
description=sys.modules[__name__].__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p','--web_port')
args = parser.parse_args()
if __name__ == "__main__":
main()
<commit_msg>Add logic for displaying help if no args are specified.<commit_after>
|
""" A simple CLI tool for quickly copying common emoticon/emoji to your
clipboard. """
import pyperclip
import json
import sys
import argparse
with open("mapping.json") as f:
emotes = json.load(f)
def main():
parser = argparse.ArgumentParser(
description=sys.modules[__name__].__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p','--web_port')
args = parser.parse_args()
# Print help if no cli args are specified.
if len(sys.argv) < 2:
parser.print_help()
if __name__ == "__main__":
main()
|
""" A simple CLI tool for quickly copying common emoticon/emoji to your
clipboard. """
import pyperclip
import json
import sys
import argparse
with open("mapping.json") as f:
emotes = json.load(f)
def main():
parser = argparse.ArgumentParser(
description=sys.modules[__name__].__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p','--web_port')
args = parser.parse_args()
if __name__ == "__main__":
main()
Add logic for displaying help if no args are specified.""" A simple CLI tool for quickly copying common emoticon/emoji to your
clipboard. """
import pyperclip
import json
import sys
import argparse
with open("mapping.json") as f:
emotes = json.load(f)
def main():
parser = argparse.ArgumentParser(
description=sys.modules[__name__].__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p','--web_port')
args = parser.parse_args()
# Print help if no cli args are specified.
if len(sys.argv) < 2:
parser.print_help()
if __name__ == "__main__":
main()
|
<commit_before>""" A simple CLI tool for quickly copying common emoticon/emoji to your
clipboard. """
import pyperclip
import json
import sys
import argparse
with open("mapping.json") as f:
emotes = json.load(f)
def main():
parser = argparse.ArgumentParser(
description=sys.modules[__name__].__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p','--web_port')
args = parser.parse_args()
if __name__ == "__main__":
main()
<commit_msg>Add logic for displaying help if no args are specified.<commit_after>""" A simple CLI tool for quickly copying common emoticon/emoji to your
clipboard. """
import pyperclip
import json
import sys
import argparse
with open("mapping.json") as f:
emotes = json.load(f)
def main():
parser = argparse.ArgumentParser(
description=sys.modules[__name__].__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p','--web_port')
args = parser.parse_args()
# Print help if no cli args are specified.
if len(sys.argv) < 2:
parser.print_help()
if __name__ == "__main__":
main()
|
df045cb2e5e53c497aa101719c528b1f17c03a1f
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask
from flask.ext.login import LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
login_manager = LoginManager()
login_manager.init_app(app)
db = SQLAlchemy(app)
from app import views, models
|
from werkzeug.contrib.fixers import ProxyFix
from flask import Flask
from flask.ext.login import LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
app.wsgi_app = ProxyFix(app.wsgi_app)
login_manager = LoginManager()
login_manager.init_app(app)
db = SQLAlchemy(app)
from app import views, models
|
Add ProxyFix() middleware component to fix the HTTPS redirection issue. See !17
|
Add ProxyFix() middleware component to fix the HTTPS redirection issue. See !17
|
Python
|
mit
|
ngoduykhanh/PowerDNS-Admin,ivanfilippov/PowerDNS-Admin,ivanfilippov/PowerDNS-Admin,CaptainQwark/PowerDNS-Admin,CaptainQwark/PowerDNS-Admin,ivanfilippov/PowerDNS-Admin,0x97/PowerDNS-Admin,ngoduykhanh/PowerDNS-Admin,CaptainQwark/PowerDNS-Admin,0x97/PowerDNS-Admin,0x97/PowerDNS-Admin,CaptainQwark/PowerDNS-Admin,ivanfilippov/PowerDNS-Admin,0x97/PowerDNS-Admin,ngoduykhanh/PowerDNS-Admin,ngoduykhanh/PowerDNS-Admin
|
from flask import Flask
from flask.ext.login import LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
login_manager = LoginManager()
login_manager.init_app(app)
db = SQLAlchemy(app)
from app import views, modelsAdd ProxyFix() middleware component to fix the HTTPS redirection issue. See !17
|
from werkzeug.contrib.fixers import ProxyFix
from flask import Flask
from flask.ext.login import LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
app.wsgi_app = ProxyFix(app.wsgi_app)
login_manager = LoginManager()
login_manager.init_app(app)
db = SQLAlchemy(app)
from app import views, models
|
<commit_before>from flask import Flask
from flask.ext.login import LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
login_manager = LoginManager()
login_manager.init_app(app)
db = SQLAlchemy(app)
from app import views, models<commit_msg>Add ProxyFix() middleware component to fix the HTTPS redirection issue. See !17<commit_after>
|
from werkzeug.contrib.fixers import ProxyFix
from flask import Flask
from flask.ext.login import LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
app.wsgi_app = ProxyFix(app.wsgi_app)
login_manager = LoginManager()
login_manager.init_app(app)
db = SQLAlchemy(app)
from app import views, models
|
from flask import Flask
from flask.ext.login import LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
login_manager = LoginManager()
login_manager.init_app(app)
db = SQLAlchemy(app)
from app import views, modelsAdd ProxyFix() middleware component to fix the HTTPS redirection issue. See !17from werkzeug.contrib.fixers import ProxyFix
from flask import Flask
from flask.ext.login import LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
app.wsgi_app = ProxyFix(app.wsgi_app)
login_manager = LoginManager()
login_manager.init_app(app)
db = SQLAlchemy(app)
from app import views, models
|
<commit_before>from flask import Flask
from flask.ext.login import LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
login_manager = LoginManager()
login_manager.init_app(app)
db = SQLAlchemy(app)
from app import views, models<commit_msg>Add ProxyFix() middleware component to fix the HTTPS redirection issue. See !17<commit_after>from werkzeug.contrib.fixers import ProxyFix
from flask import Flask
from flask.ext.login import LoginManager
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
app.wsgi_app = ProxyFix(app.wsgi_app)
login_manager = LoginManager()
login_manager.init_app(app)
db = SQLAlchemy(app)
from app import views, models
|
c6c4c2f9acc348053372506a6ab8fe8d3b6d9b02
|
tempodb/__init__.py
|
tempodb/__init__.py
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import client
from client import *
VERSION = (0, 3, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import client
from client import *
VERSION = (0, 2, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
|
Revert "Roll to version 0.3.0"
|
Revert "Roll to version 0.3.0"
This reverts commit d98b62b366317a6188a743912ee6feea035e998b.
|
Python
|
mit
|
mrgaaron/tempoiq-python,tempodb/tempodb-python,TempoIQ/tempoiq-python,tempodb/tempodb-python
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import client
from client import *
VERSION = (0, 3, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
Revert "Roll to version 0.3.0"
This reverts commit d98b62b366317a6188a743912ee6feea035e998b.
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import client
from client import *
VERSION = (0, 2, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
|
<commit_before>
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import client
from client import *
VERSION = (0, 3, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
<commit_msg>Revert "Roll to version 0.3.0"
This reverts commit d98b62b366317a6188a743912ee6feea035e998b.<commit_after>
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import client
from client import *
VERSION = (0, 2, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import client
from client import *
VERSION = (0, 3, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
Revert "Roll to version 0.3.0"
This reverts commit d98b62b366317a6188a743912ee6feea035e998b.
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import client
from client import *
VERSION = (0, 2, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
|
<commit_before>
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import client
from client import *
VERSION = (0, 3, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
<commit_msg>Revert "Roll to version 0.3.0"
This reverts commit d98b62b366317a6188a743912ee6feea035e998b.<commit_after>
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import client
from client import *
VERSION = (0, 2, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
__version__ = get_version()
|
54e83b1836afcd758b9ef140a6bbf5c395ac4a4a
|
indico/modules/bootstrap/forms.py
|
indico/modules/bootstrap/forms.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from wtforms import BooleanField, StringField
from wtforms.fields.html5 import EmailField
from wtforms.validators import DataRequired, Email
from indico.modules.auth.forms import LocalRegistrationForm
from indico.util.i18n import _
from indico.web.forms.validators import UsedIfChecked
from indico.web.forms.widgets import SwitchWidget
class BootstrapForm(LocalRegistrationForm):
first_name = StringField('First Name', [DataRequired()])
last_name = StringField('Last Name', [DataRequired()])
email = EmailField(_('Email address'), [DataRequired()])
affiliation = StringField('Affiliation', [DataRequired()])
enable_tracking = BooleanField('Join the community', widget=SwitchWidget())
contact_name = StringField('Contact Name', [UsedIfChecked('enable_tracking'), DataRequired()])
contact_email = EmailField('Contact Email Address', [UsedIfChecked('enable_tracking'), DataRequired(), Email()])
|
# This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from wtforms import BooleanField, StringField
from wtforms.fields.html5 import EmailField
from wtforms.validators import DataRequired, Email
from indico.modules.auth.forms import LocalRegistrationForm
from indico.util.i18n import _
from indico.web.forms.validators import UsedIfChecked
from indico.web.forms.widgets import SwitchWidget
class BootstrapForm(LocalRegistrationForm):
first_name = StringField('First Name', [DataRequired()])
last_name = StringField('Last Name', [DataRequired()])
email = EmailField(_('Email address'), [DataRequired(), Email()])
affiliation = StringField('Affiliation', [DataRequired()])
enable_tracking = BooleanField('Join the community', widget=SwitchWidget())
contact_name = StringField('Contact Name', [UsedIfChecked('enable_tracking'), DataRequired()])
contact_email = EmailField('Contact Email Address', [UsedIfChecked('enable_tracking'), DataRequired(), Email()])
|
Add missing email validation during initial setup
|
Add missing email validation during initial setup
|
Python
|
mit
|
pferreir/indico,indico/indico,DirkHoffmann/indico,DirkHoffmann/indico,DirkHoffmann/indico,ThiefMaster/indico,pferreir/indico,OmeGak/indico,OmeGak/indico,indico/indico,OmeGak/indico,ThiefMaster/indico,mic4ael/indico,OmeGak/indico,ThiefMaster/indico,pferreir/indico,mic4ael/indico,indico/indico,pferreir/indico,mic4ael/indico,ThiefMaster/indico,DirkHoffmann/indico,mic4ael/indico,indico/indico
|
# This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from wtforms import BooleanField, StringField
from wtforms.fields.html5 import EmailField
from wtforms.validators import DataRequired, Email
from indico.modules.auth.forms import LocalRegistrationForm
from indico.util.i18n import _
from indico.web.forms.validators import UsedIfChecked
from indico.web.forms.widgets import SwitchWidget
class BootstrapForm(LocalRegistrationForm):
first_name = StringField('First Name', [DataRequired()])
last_name = StringField('Last Name', [DataRequired()])
email = EmailField(_('Email address'), [DataRequired()])
affiliation = StringField('Affiliation', [DataRequired()])
enable_tracking = BooleanField('Join the community', widget=SwitchWidget())
contact_name = StringField('Contact Name', [UsedIfChecked('enable_tracking'), DataRequired()])
contact_email = EmailField('Contact Email Address', [UsedIfChecked('enable_tracking'), DataRequired(), Email()])
Add missing email validation during initial setup
|
# This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from wtforms import BooleanField, StringField
from wtforms.fields.html5 import EmailField
from wtforms.validators import DataRequired, Email
from indico.modules.auth.forms import LocalRegistrationForm
from indico.util.i18n import _
from indico.web.forms.validators import UsedIfChecked
from indico.web.forms.widgets import SwitchWidget
class BootstrapForm(LocalRegistrationForm):
first_name = StringField('First Name', [DataRequired()])
last_name = StringField('Last Name', [DataRequired()])
email = EmailField(_('Email address'), [DataRequired(), Email()])
affiliation = StringField('Affiliation', [DataRequired()])
enable_tracking = BooleanField('Join the community', widget=SwitchWidget())
contact_name = StringField('Contact Name', [UsedIfChecked('enable_tracking'), DataRequired()])
contact_email = EmailField('Contact Email Address', [UsedIfChecked('enable_tracking'), DataRequired(), Email()])
|
<commit_before># This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from wtforms import BooleanField, StringField
from wtforms.fields.html5 import EmailField
from wtforms.validators import DataRequired, Email
from indico.modules.auth.forms import LocalRegistrationForm
from indico.util.i18n import _
from indico.web.forms.validators import UsedIfChecked
from indico.web.forms.widgets import SwitchWidget
class BootstrapForm(LocalRegistrationForm):
first_name = StringField('First Name', [DataRequired()])
last_name = StringField('Last Name', [DataRequired()])
email = EmailField(_('Email address'), [DataRequired()])
affiliation = StringField('Affiliation', [DataRequired()])
enable_tracking = BooleanField('Join the community', widget=SwitchWidget())
contact_name = StringField('Contact Name', [UsedIfChecked('enable_tracking'), DataRequired()])
contact_email = EmailField('Contact Email Address', [UsedIfChecked('enable_tracking'), DataRequired(), Email()])
<commit_msg>Add missing email validation during initial setup<commit_after>
|
# This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from wtforms import BooleanField, StringField
from wtforms.fields.html5 import EmailField
from wtforms.validators import DataRequired, Email
from indico.modules.auth.forms import LocalRegistrationForm
from indico.util.i18n import _
from indico.web.forms.validators import UsedIfChecked
from indico.web.forms.widgets import SwitchWidget
class BootstrapForm(LocalRegistrationForm):
first_name = StringField('First Name', [DataRequired()])
last_name = StringField('Last Name', [DataRequired()])
email = EmailField(_('Email address'), [DataRequired(), Email()])
affiliation = StringField('Affiliation', [DataRequired()])
enable_tracking = BooleanField('Join the community', widget=SwitchWidget())
contact_name = StringField('Contact Name', [UsedIfChecked('enable_tracking'), DataRequired()])
contact_email = EmailField('Contact Email Address', [UsedIfChecked('enable_tracking'), DataRequired(), Email()])
|
# This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from wtforms import BooleanField, StringField
from wtforms.fields.html5 import EmailField
from wtforms.validators import DataRequired, Email
from indico.modules.auth.forms import LocalRegistrationForm
from indico.util.i18n import _
from indico.web.forms.validators import UsedIfChecked
from indico.web.forms.widgets import SwitchWidget
class BootstrapForm(LocalRegistrationForm):
first_name = StringField('First Name', [DataRequired()])
last_name = StringField('Last Name', [DataRequired()])
email = EmailField(_('Email address'), [DataRequired()])
affiliation = StringField('Affiliation', [DataRequired()])
enable_tracking = BooleanField('Join the community', widget=SwitchWidget())
contact_name = StringField('Contact Name', [UsedIfChecked('enable_tracking'), DataRequired()])
contact_email = EmailField('Contact Email Address', [UsedIfChecked('enable_tracking'), DataRequired(), Email()])
Add missing email validation during initial setup# This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from wtforms import BooleanField, StringField
from wtforms.fields.html5 import EmailField
from wtforms.validators import DataRequired, Email
from indico.modules.auth.forms import LocalRegistrationForm
from indico.util.i18n import _
from indico.web.forms.validators import UsedIfChecked
from indico.web.forms.widgets import SwitchWidget
class BootstrapForm(LocalRegistrationForm):
first_name = StringField('First Name', [DataRequired()])
last_name = StringField('Last Name', [DataRequired()])
email = EmailField(_('Email address'), [DataRequired(), Email()])
affiliation = StringField('Affiliation', [DataRequired()])
enable_tracking = BooleanField('Join the community', widget=SwitchWidget())
contact_name = StringField('Contact Name', [UsedIfChecked('enable_tracking'), DataRequired()])
contact_email = EmailField('Contact Email Address', [UsedIfChecked('enable_tracking'), DataRequired(), Email()])
|
<commit_before># This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from wtforms import BooleanField, StringField
from wtforms.fields.html5 import EmailField
from wtforms.validators import DataRequired, Email
from indico.modules.auth.forms import LocalRegistrationForm
from indico.util.i18n import _
from indico.web.forms.validators import UsedIfChecked
from indico.web.forms.widgets import SwitchWidget
class BootstrapForm(LocalRegistrationForm):
first_name = StringField('First Name', [DataRequired()])
last_name = StringField('Last Name', [DataRequired()])
email = EmailField(_('Email address'), [DataRequired()])
affiliation = StringField('Affiliation', [DataRequired()])
enable_tracking = BooleanField('Join the community', widget=SwitchWidget())
contact_name = StringField('Contact Name', [UsedIfChecked('enable_tracking'), DataRequired()])
contact_email = EmailField('Contact Email Address', [UsedIfChecked('enable_tracking'), DataRequired(), Email()])
<commit_msg>Add missing email validation during initial setup<commit_after># This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from wtforms import BooleanField, StringField
from wtforms.fields.html5 import EmailField
from wtforms.validators import DataRequired, Email
from indico.modules.auth.forms import LocalRegistrationForm
from indico.util.i18n import _
from indico.web.forms.validators import UsedIfChecked
from indico.web.forms.widgets import SwitchWidget
class BootstrapForm(LocalRegistrationForm):
first_name = StringField('First Name', [DataRequired()])
last_name = StringField('Last Name', [DataRequired()])
email = EmailField(_('Email address'), [DataRequired(), Email()])
affiliation = StringField('Affiliation', [DataRequired()])
enable_tracking = BooleanField('Join the community', widget=SwitchWidget())
contact_name = StringField('Contact Name', [UsedIfChecked('enable_tracking'), DataRequired()])
contact_email = EmailField('Contact Email Address', [UsedIfChecked('enable_tracking'), DataRequired(), Email()])
|
19b15df8b2d92b3a00f94f53b684f9422d570c13
|
vumi/middleware/__init__.py
|
vumi/middleware/__init__.py
|
"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
TransportMiddleware, ApplicationMiddleware, MiddlewareStack,
create_middlewares_from_config, setup_middlewares_from_config)
__all__ = [
'TransportMiddleware', 'ApplicationMiddleware', 'MiddlewareStack',
'create_middlewares_from_config', 'setup_middlewares_from_config']
|
"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
BaseMiddleware, TransportMiddleware, ApplicationMiddleware,
MiddlewareStack, create_middlewares_from_config,
setup_middlewares_from_config)
__all__ = [
'BaseMiddleware', 'TransportMiddleware', 'ApplicationMiddleware',
'MiddlewareStack', 'create_middlewares_from_config',
'setup_middlewares_from_config']
|
Add BaseMiddleware to vumi.middleware API for 3rd-party middleware that wants to support both transports and applications.
|
Add BaseMiddleware to vumi.middleware API for 3rd-party middleware that wants to support both transports and applications.
|
Python
|
bsd-3-clause
|
harrissoerja/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,TouK/vumi,TouK/vumi
|
"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
TransportMiddleware, ApplicationMiddleware, MiddlewareStack,
create_middlewares_from_config, setup_middlewares_from_config)
__all__ = [
'TransportMiddleware', 'ApplicationMiddleware', 'MiddlewareStack',
'create_middlewares_from_config', 'setup_middlewares_from_config']
Add BaseMiddleware to vumi.middleware API for 3rd-party middleware that wants to support both transports and applications.
|
"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
BaseMiddleware, TransportMiddleware, ApplicationMiddleware,
MiddlewareStack, create_middlewares_from_config,
setup_middlewares_from_config)
__all__ = [
'BaseMiddleware', 'TransportMiddleware', 'ApplicationMiddleware',
'MiddlewareStack', 'create_middlewares_from_config',
'setup_middlewares_from_config']
|
<commit_before>"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
TransportMiddleware, ApplicationMiddleware, MiddlewareStack,
create_middlewares_from_config, setup_middlewares_from_config)
__all__ = [
'TransportMiddleware', 'ApplicationMiddleware', 'MiddlewareStack',
'create_middlewares_from_config', 'setup_middlewares_from_config']
<commit_msg>Add BaseMiddleware to vumi.middleware API for 3rd-party middleware that wants to support both transports and applications.<commit_after>
|
"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
BaseMiddleware, TransportMiddleware, ApplicationMiddleware,
MiddlewareStack, create_middlewares_from_config,
setup_middlewares_from_config)
__all__ = [
'BaseMiddleware', 'TransportMiddleware', 'ApplicationMiddleware',
'MiddlewareStack', 'create_middlewares_from_config',
'setup_middlewares_from_config']
|
"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
TransportMiddleware, ApplicationMiddleware, MiddlewareStack,
create_middlewares_from_config, setup_middlewares_from_config)
__all__ = [
'TransportMiddleware', 'ApplicationMiddleware', 'MiddlewareStack',
'create_middlewares_from_config', 'setup_middlewares_from_config']
Add BaseMiddleware to vumi.middleware API for 3rd-party middleware that wants to support both transports and applications."""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
BaseMiddleware, TransportMiddleware, ApplicationMiddleware,
MiddlewareStack, create_middlewares_from_config,
setup_middlewares_from_config)
__all__ = [
'BaseMiddleware', 'TransportMiddleware', 'ApplicationMiddleware',
'MiddlewareStack', 'create_middlewares_from_config',
'setup_middlewares_from_config']
|
<commit_before>"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
TransportMiddleware, ApplicationMiddleware, MiddlewareStack,
create_middlewares_from_config, setup_middlewares_from_config)
__all__ = [
'TransportMiddleware', 'ApplicationMiddleware', 'MiddlewareStack',
'create_middlewares_from_config', 'setup_middlewares_from_config']
<commit_msg>Add BaseMiddleware to vumi.middleware API for 3rd-party middleware that wants to support both transports and applications.<commit_after>"""Middleware classes to process messages on their way in and out of workers.
"""
from vumi.middleware.base import (
BaseMiddleware, TransportMiddleware, ApplicationMiddleware,
MiddlewareStack, create_middlewares_from_config,
setup_middlewares_from_config)
__all__ = [
'BaseMiddleware', 'TransportMiddleware', 'ApplicationMiddleware',
'MiddlewareStack', 'create_middlewares_from_config',
'setup_middlewares_from_config']
|
22772750d7bee9e9f1f8ac28068d1865e8f0ec32
|
fuf/interop.py
|
fuf/interop.py
|
import sys # Used to get rid of py2/3 differences
# Blatantly stolen from the excellent `six` library
# Allows the same calls between python2 and python3
if sys.version_info[0] == 3:
exec_ = getattr(__builtins__, "exec")
raw_input = input
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
|
import sys # Used to get rid of py2/3 differences
# Blatantly stolen from the excellent `six` library
# Allows the same calls between python2 and python3
if sys.version_info[0] == 3:
exec_ = __builtins__["exec"]
raw_input = input
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
|
Fix python 3 support for exec
|
Fix python 3 support for exec
|
Python
|
mit
|
msoucy/fuf
|
import sys # Used to get rid of py2/3 differences
# Blatantly stolen from the excellent `six` library
# Allows the same calls between python2 and python3
if sys.version_info[0] == 3:
exec_ = getattr(__builtins__, "exec")
raw_input = input
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
Fix python 3 support for exec
|
import sys # Used to get rid of py2/3 differences
# Blatantly stolen from the excellent `six` library
# Allows the same calls between python2 and python3
if sys.version_info[0] == 3:
exec_ = __builtins__["exec"]
raw_input = input
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
|
<commit_before>import sys # Used to get rid of py2/3 differences
# Blatantly stolen from the excellent `six` library
# Allows the same calls between python2 and python3
if sys.version_info[0] == 3:
exec_ = getattr(__builtins__, "exec")
raw_input = input
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
<commit_msg>Fix python 3 support for exec<commit_after>
|
import sys # Used to get rid of py2/3 differences
# Blatantly stolen from the excellent `six` library
# Allows the same calls between python2 and python3
if sys.version_info[0] == 3:
exec_ = __builtins__["exec"]
raw_input = input
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
|
import sys # Used to get rid of py2/3 differences
# Blatantly stolen from the excellent `six` library
# Allows the same calls between python2 and python3
if sys.version_info[0] == 3:
exec_ = getattr(__builtins__, "exec")
raw_input = input
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
Fix python 3 support for execimport sys # Used to get rid of py2/3 differences
# Blatantly stolen from the excellent `six` library
# Allows the same calls between python2 and python3
if sys.version_info[0] == 3:
exec_ = __builtins__["exec"]
raw_input = input
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
|
<commit_before>import sys # Used to get rid of py2/3 differences
# Blatantly stolen from the excellent `six` library
# Allows the same calls between python2 and python3
if sys.version_info[0] == 3:
exec_ = getattr(__builtins__, "exec")
raw_input = input
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
<commit_msg>Fix python 3 support for exec<commit_after>import sys # Used to get rid of py2/3 differences
# Blatantly stolen from the excellent `six` library
# Allows the same calls between python2 and python3
if sys.version_info[0] == 3:
exec_ = __builtins__["exec"]
raw_input = input
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
|
ca8aca917234bdc10a47091ee83be8eed4845b5f
|
applications/decorators.py
|
applications/decorators.py
|
from functools import wraps
from django.http import HttpResponseNotFound
from django.shortcuts import redirect
from core.utils import get_event_page
def organiser_only(function):
"""
Decorator for views that checks that the user is logged in and that
they are a team member for a particular page. Returns 404 otherwise.
"""
@wraps(function)
def decorator(request, *args, **kwargs):
if not kwargs.get('city'):
raise ValueError(
'"City" slug must be present to user this decorator.')
if not request.user.is_authenticated():
return redirect('core:event', kwargs.get('city'))
city = kwargs.get('city')
page = get_event_page(city, request.user.is_authenticated(), False)
if page and (request.user in page.event.team.all() or request.user.is_superuser):
return function(request, *args, **kwargs)
return HttpResponseNotFound()
return decorator
|
from functools import wraps
from django.http import HttpResponseNotFound
from django.shortcuts import redirect
from core.utils import get_event_page
def organiser_only(function):
"""
Decorator for views that checks that the user is logged in and that
they are a team member for a particular page. Returns 404 otherwise.
"""
@wraps(function)
def decorator(request, *args, **kwargs):
city = kwargs.get('city')
if not city:
raise ValueError(
'"City" slug must be present to user this decorator.')
if not request.user.is_authenticated():
return redirect('core:event', city)
page = get_event_page(city, request.user.is_authenticated(), False)
if page and (request.user in page.event.team.all() or request.user.is_superuser):
return function(request, *args, **kwargs)
return HttpResponseNotFound()
return decorator
|
Define 'city' at top decorator
|
Define 'city' at top decorator
|
Python
|
bsd-3-clause
|
patjouk/djangogirls,DjangoGirls/djangogirls,DjangoGirls/djangogirls,patjouk/djangogirls,patjouk/djangogirls,patjouk/djangogirls,DjangoGirls/djangogirls
|
from functools import wraps
from django.http import HttpResponseNotFound
from django.shortcuts import redirect
from core.utils import get_event_page
def organiser_only(function):
"""
Decorator for views that checks that the user is logged in and that
they are a team member for a particular page. Returns 404 otherwise.
"""
@wraps(function)
def decorator(request, *args, **kwargs):
if not kwargs.get('city'):
raise ValueError(
'"City" slug must be present to user this decorator.')
if not request.user.is_authenticated():
return redirect('core:event', kwargs.get('city'))
city = kwargs.get('city')
page = get_event_page(city, request.user.is_authenticated(), False)
if page and (request.user in page.event.team.all() or request.user.is_superuser):
return function(request, *args, **kwargs)
return HttpResponseNotFound()
return decorator
Define 'city' at top decorator
|
from functools import wraps
from django.http import HttpResponseNotFound
from django.shortcuts import redirect
from core.utils import get_event_page
def organiser_only(function):
"""
Decorator for views that checks that the user is logged in and that
they are a team member for a particular page. Returns 404 otherwise.
"""
@wraps(function)
def decorator(request, *args, **kwargs):
city = kwargs.get('city')
if not city:
raise ValueError(
'"City" slug must be present to user this decorator.')
if not request.user.is_authenticated():
return redirect('core:event', city)
page = get_event_page(city, request.user.is_authenticated(), False)
if page and (request.user in page.event.team.all() or request.user.is_superuser):
return function(request, *args, **kwargs)
return HttpResponseNotFound()
return decorator
|
<commit_before>from functools import wraps
from django.http import HttpResponseNotFound
from django.shortcuts import redirect
from core.utils import get_event_page
def organiser_only(function):
"""
Decorator for views that checks that the user is logged in and that
they are a team member for a particular page. Returns 404 otherwise.
"""
@wraps(function)
def decorator(request, *args, **kwargs):
if not kwargs.get('city'):
raise ValueError(
'"City" slug must be present to user this decorator.')
if not request.user.is_authenticated():
return redirect('core:event', kwargs.get('city'))
city = kwargs.get('city')
page = get_event_page(city, request.user.is_authenticated(), False)
if page and (request.user in page.event.team.all() or request.user.is_superuser):
return function(request, *args, **kwargs)
return HttpResponseNotFound()
return decorator
<commit_msg>Define 'city' at top decorator<commit_after>
|
from functools import wraps
from django.http import HttpResponseNotFound
from django.shortcuts import redirect
from core.utils import get_event_page
def organiser_only(function):
"""
Decorator for views that checks that the user is logged in and that
they are a team member for a particular page. Returns 404 otherwise.
"""
@wraps(function)
def decorator(request, *args, **kwargs):
city = kwargs.get('city')
if not city:
raise ValueError(
'"City" slug must be present to user this decorator.')
if not request.user.is_authenticated():
return redirect('core:event', city)
page = get_event_page(city, request.user.is_authenticated(), False)
if page and (request.user in page.event.team.all() or request.user.is_superuser):
return function(request, *args, **kwargs)
return HttpResponseNotFound()
return decorator
|
from functools import wraps
from django.http import HttpResponseNotFound
from django.shortcuts import redirect
from core.utils import get_event_page
def organiser_only(function):
"""
Decorator for views that checks that the user is logged in and that
they are a team member for a particular page. Returns 404 otherwise.
"""
@wraps(function)
def decorator(request, *args, **kwargs):
if not kwargs.get('city'):
raise ValueError(
'"City" slug must be present to user this decorator.')
if not request.user.is_authenticated():
return redirect('core:event', kwargs.get('city'))
city = kwargs.get('city')
page = get_event_page(city, request.user.is_authenticated(), False)
if page and (request.user in page.event.team.all() or request.user.is_superuser):
return function(request, *args, **kwargs)
return HttpResponseNotFound()
return decorator
Define 'city' at top decoratorfrom functools import wraps
from django.http import HttpResponseNotFound
from django.shortcuts import redirect
from core.utils import get_event_page
def organiser_only(function):
"""
Decorator for views that checks that the user is logged in and that
they are a team member for a particular page. Returns 404 otherwise.
"""
@wraps(function)
def decorator(request, *args, **kwargs):
city = kwargs.get('city')
if not city:
raise ValueError(
'"City" slug must be present to user this decorator.')
if not request.user.is_authenticated():
return redirect('core:event', city)
page = get_event_page(city, request.user.is_authenticated(), False)
if page and (request.user in page.event.team.all() or request.user.is_superuser):
return function(request, *args, **kwargs)
return HttpResponseNotFound()
return decorator
|
<commit_before>from functools import wraps
from django.http import HttpResponseNotFound
from django.shortcuts import redirect
from core.utils import get_event_page
def organiser_only(function):
"""
Decorator for views that checks that the user is logged in and that
they are a team member for a particular page. Returns 404 otherwise.
"""
@wraps(function)
def decorator(request, *args, **kwargs):
if not kwargs.get('city'):
raise ValueError(
'"City" slug must be present to user this decorator.')
if not request.user.is_authenticated():
return redirect('core:event', kwargs.get('city'))
city = kwargs.get('city')
page = get_event_page(city, request.user.is_authenticated(), False)
if page and (request.user in page.event.team.all() or request.user.is_superuser):
return function(request, *args, **kwargs)
return HttpResponseNotFound()
return decorator
<commit_msg>Define 'city' at top decorator<commit_after>from functools import wraps
from django.http import HttpResponseNotFound
from django.shortcuts import redirect
from core.utils import get_event_page
def organiser_only(function):
"""
Decorator for views that checks that the user is logged in and that
they are a team member for a particular page. Returns 404 otherwise.
"""
@wraps(function)
def decorator(request, *args, **kwargs):
city = kwargs.get('city')
if not city:
raise ValueError(
'"City" slug must be present to user this decorator.')
if not request.user.is_authenticated():
return redirect('core:event', city)
page = get_event_page(city, request.user.is_authenticated(), False)
if page and (request.user in page.event.team.all() or request.user.is_superuser):
return function(request, *args, **kwargs)
return HttpResponseNotFound()
return decorator
|
6761d8230d59031ad5183615f68a71e51f5f0309
|
elasticmock/__init__.py
|
elasticmock/__init__.py
|
# -*- coding: utf-8 -*-
from functools import wraps
from mock import patch
from elasticmock.fake_elasticsearch import FakeElasticsearch
ELASTIC_INSTANCES = {}
def _get_elasticmock(hosts=None):
elastic_key = 'localhost:9200' if hosts is None else '{0}:{1}'.format(hosts[0].get('host'), hosts[0].get('port'))
if elastic_key in ELASTIC_INSTANCES:
connection = ELASTIC_INSTANCES.get(elastic_key)
else:
connection = FakeElasticsearch()
ELASTIC_INSTANCES[elastic_key] = connection
return connection
def elasticmock(f):
@wraps(f)
def decorated(*args, **kwargs):
ELASTIC_INSTANCES.clear()
with patch('elasticsearch.Elasticsearch', _get_elasticmock):
result = f(*args, **kwargs)
return result
return decorated
|
# -*- coding: utf-8 -*-
from functools import wraps
from mock import patch
from elasticmock.fake_elasticsearch import FakeElasticsearch
ELASTIC_INSTANCES = {}
def _get_elasticmock(hosts=None, *args, **kwargs):
elastic_key = 'localhost:9200' if hosts is None else '{0}:{1}'.format(hosts[0].get('host'), hosts[0].get('port'))
if elastic_key in ELASTIC_INSTANCES:
connection = ELASTIC_INSTANCES.get(elastic_key)
else:
connection = FakeElasticsearch()
ELASTIC_INSTANCES[elastic_key] = connection
return connection
def elasticmock(f):
@wraps(f)
def decorated(*args, **kwargs):
ELASTIC_INSTANCES.clear()
with patch('elasticsearch.Elasticsearch', _get_elasticmock):
result = f(*args, **kwargs)
return result
return decorated
|
Allow ignored params to Elasticsearch
|
Allow ignored params to Elasticsearch
|
Python
|
mit
|
vrcmarcos/elasticmock
|
# -*- coding: utf-8 -*-
from functools import wraps
from mock import patch
from elasticmock.fake_elasticsearch import FakeElasticsearch
ELASTIC_INSTANCES = {}
def _get_elasticmock(hosts=None):
elastic_key = 'localhost:9200' if hosts is None else '{0}:{1}'.format(hosts[0].get('host'), hosts[0].get('port'))
if elastic_key in ELASTIC_INSTANCES:
connection = ELASTIC_INSTANCES.get(elastic_key)
else:
connection = FakeElasticsearch()
ELASTIC_INSTANCES[elastic_key] = connection
return connection
def elasticmock(f):
@wraps(f)
def decorated(*args, **kwargs):
ELASTIC_INSTANCES.clear()
with patch('elasticsearch.Elasticsearch', _get_elasticmock):
result = f(*args, **kwargs)
return result
return decorated
Allow ignored params to Elasticsearch
|
# -*- coding: utf-8 -*-
from functools import wraps
from mock import patch
from elasticmock.fake_elasticsearch import FakeElasticsearch
ELASTIC_INSTANCES = {}
def _get_elasticmock(hosts=None, *args, **kwargs):
elastic_key = 'localhost:9200' if hosts is None else '{0}:{1}'.format(hosts[0].get('host'), hosts[0].get('port'))
if elastic_key in ELASTIC_INSTANCES:
connection = ELASTIC_INSTANCES.get(elastic_key)
else:
connection = FakeElasticsearch()
ELASTIC_INSTANCES[elastic_key] = connection
return connection
def elasticmock(f):
@wraps(f)
def decorated(*args, **kwargs):
ELASTIC_INSTANCES.clear()
with patch('elasticsearch.Elasticsearch', _get_elasticmock):
result = f(*args, **kwargs)
return result
return decorated
|
<commit_before># -*- coding: utf-8 -*-
from functools import wraps
from mock import patch
from elasticmock.fake_elasticsearch import FakeElasticsearch
ELASTIC_INSTANCES = {}
def _get_elasticmock(hosts=None):
elastic_key = 'localhost:9200' if hosts is None else '{0}:{1}'.format(hosts[0].get('host'), hosts[0].get('port'))
if elastic_key in ELASTIC_INSTANCES:
connection = ELASTIC_INSTANCES.get(elastic_key)
else:
connection = FakeElasticsearch()
ELASTIC_INSTANCES[elastic_key] = connection
return connection
def elasticmock(f):
@wraps(f)
def decorated(*args, **kwargs):
ELASTIC_INSTANCES.clear()
with patch('elasticsearch.Elasticsearch', _get_elasticmock):
result = f(*args, **kwargs)
return result
return decorated
<commit_msg>Allow ignored params to Elasticsearch<commit_after>
|
# -*- coding: utf-8 -*-
from functools import wraps
from mock import patch
from elasticmock.fake_elasticsearch import FakeElasticsearch
ELASTIC_INSTANCES = {}
def _get_elasticmock(hosts=None, *args, **kwargs):
elastic_key = 'localhost:9200' if hosts is None else '{0}:{1}'.format(hosts[0].get('host'), hosts[0].get('port'))
if elastic_key in ELASTIC_INSTANCES:
connection = ELASTIC_INSTANCES.get(elastic_key)
else:
connection = FakeElasticsearch()
ELASTIC_INSTANCES[elastic_key] = connection
return connection
def elasticmock(f):
@wraps(f)
def decorated(*args, **kwargs):
ELASTIC_INSTANCES.clear()
with patch('elasticsearch.Elasticsearch', _get_elasticmock):
result = f(*args, **kwargs)
return result
return decorated
|
# -*- coding: utf-8 -*-
from functools import wraps
from mock import patch
from elasticmock.fake_elasticsearch import FakeElasticsearch
ELASTIC_INSTANCES = {}
def _get_elasticmock(hosts=None):
elastic_key = 'localhost:9200' if hosts is None else '{0}:{1}'.format(hosts[0].get('host'), hosts[0].get('port'))
if elastic_key in ELASTIC_INSTANCES:
connection = ELASTIC_INSTANCES.get(elastic_key)
else:
connection = FakeElasticsearch()
ELASTIC_INSTANCES[elastic_key] = connection
return connection
def elasticmock(f):
@wraps(f)
def decorated(*args, **kwargs):
ELASTIC_INSTANCES.clear()
with patch('elasticsearch.Elasticsearch', _get_elasticmock):
result = f(*args, **kwargs)
return result
return decorated
Allow ignored params to Elasticsearch# -*- coding: utf-8 -*-
from functools import wraps
from mock import patch
from elasticmock.fake_elasticsearch import FakeElasticsearch
ELASTIC_INSTANCES = {}
def _get_elasticmock(hosts=None, *args, **kwargs):
elastic_key = 'localhost:9200' if hosts is None else '{0}:{1}'.format(hosts[0].get('host'), hosts[0].get('port'))
if elastic_key in ELASTIC_INSTANCES:
connection = ELASTIC_INSTANCES.get(elastic_key)
else:
connection = FakeElasticsearch()
ELASTIC_INSTANCES[elastic_key] = connection
return connection
def elasticmock(f):
@wraps(f)
def decorated(*args, **kwargs):
ELASTIC_INSTANCES.clear()
with patch('elasticsearch.Elasticsearch', _get_elasticmock):
result = f(*args, **kwargs)
return result
return decorated
|
<commit_before># -*- coding: utf-8 -*-
from functools import wraps
from mock import patch
from elasticmock.fake_elasticsearch import FakeElasticsearch
ELASTIC_INSTANCES = {}
def _get_elasticmock(hosts=None):
elastic_key = 'localhost:9200' if hosts is None else '{0}:{1}'.format(hosts[0].get('host'), hosts[0].get('port'))
if elastic_key in ELASTIC_INSTANCES:
connection = ELASTIC_INSTANCES.get(elastic_key)
else:
connection = FakeElasticsearch()
ELASTIC_INSTANCES[elastic_key] = connection
return connection
def elasticmock(f):
@wraps(f)
def decorated(*args, **kwargs):
ELASTIC_INSTANCES.clear()
with patch('elasticsearch.Elasticsearch', _get_elasticmock):
result = f(*args, **kwargs)
return result
return decorated
<commit_msg>Allow ignored params to Elasticsearch<commit_after># -*- coding: utf-8 -*-
from functools import wraps
from mock import patch
from elasticmock.fake_elasticsearch import FakeElasticsearch
ELASTIC_INSTANCES = {}
def _get_elasticmock(hosts=None, *args, **kwargs):
elastic_key = 'localhost:9200' if hosts is None else '{0}:{1}'.format(hosts[0].get('host'), hosts[0].get('port'))
if elastic_key in ELASTIC_INSTANCES:
connection = ELASTIC_INSTANCES.get(elastic_key)
else:
connection = FakeElasticsearch()
ELASTIC_INSTANCES[elastic_key] = connection
return connection
def elasticmock(f):
@wraps(f)
def decorated(*args, **kwargs):
ELASTIC_INSTANCES.clear()
with patch('elasticsearch.Elasticsearch', _get_elasticmock):
result = f(*args, **kwargs)
return result
return decorated
|
b291a1594985a3c671b81fb05a8487a8d7a403ea
|
icekit/page_types/layout_page/models.py
|
icekit/page_types/layout_page/models.py
|
from . import abstract_models
class LayoutPage(abstract_models.AbstractLayoutPage):
class Meta:
verbose_name = "Layout page"
# Fluent prepends `pagetype_` to the db table. This seems to break
# Django's inference of m2m table names during migrations, when the
# m2m is defined on an abstract model that's mixed in. Instead we
# give the table a name that's different from the default.
# https://github.com/django-fluent/django-fluent-pages/issues/89
db_table = "icekit_layoutpage"
|
from . import abstract_models
class LayoutPage(abstract_models.AbstractLayoutPage):
class Meta:
verbose_name = "Page"
# Fluent prepends `pagetype_` to the db table. This seems to break
# Django's inference of m2m table names during migrations, when the
# m2m is defined on an abstract model that's mixed in. Instead we
# give the table a name that's different from the default.
# https://github.com/django-fluent/django-fluent-pages/issues/89
db_table = "icekit_layoutpage"
|
Change verbose name of Layout Page to ‘Page’ for simplicity.
|
Change verbose name of Layout Page to ‘Page’ for simplicity.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
from . import abstract_models
class LayoutPage(abstract_models.AbstractLayoutPage):
class Meta:
verbose_name = "Layout page"
# Fluent prepends `pagetype_` to the db table. This seems to break
# Django's inference of m2m table names during migrations, when the
# m2m is defined on an abstract model that's mixed in. Instead we
# give the table a name that's different from the default.
# https://github.com/django-fluent/django-fluent-pages/issues/89
db_table = "icekit_layoutpage"Change verbose name of Layout Page to ‘Page’ for simplicity.
|
from . import abstract_models
class LayoutPage(abstract_models.AbstractLayoutPage):
class Meta:
verbose_name = "Page"
# Fluent prepends `pagetype_` to the db table. This seems to break
# Django's inference of m2m table names during migrations, when the
# m2m is defined on an abstract model that's mixed in. Instead we
# give the table a name that's different from the default.
# https://github.com/django-fluent/django-fluent-pages/issues/89
db_table = "icekit_layoutpage"
|
<commit_before>from . import abstract_models
class LayoutPage(abstract_models.AbstractLayoutPage):
class Meta:
verbose_name = "Layout page"
# Fluent prepends `pagetype_` to the db table. This seems to break
# Django's inference of m2m table names during migrations, when the
# m2m is defined on an abstract model that's mixed in. Instead we
# give the table a name that's different from the default.
# https://github.com/django-fluent/django-fluent-pages/issues/89
db_table = "icekit_layoutpage"<commit_msg>Change verbose name of Layout Page to ‘Page’ for simplicity.<commit_after>
|
from . import abstract_models
class LayoutPage(abstract_models.AbstractLayoutPage):
class Meta:
verbose_name = "Page"
# Fluent prepends `pagetype_` to the db table. This seems to break
# Django's inference of m2m table names during migrations, when the
# m2m is defined on an abstract model that's mixed in. Instead we
# give the table a name that's different from the default.
# https://github.com/django-fluent/django-fluent-pages/issues/89
db_table = "icekit_layoutpage"
|
from . import abstract_models
class LayoutPage(abstract_models.AbstractLayoutPage):
class Meta:
verbose_name = "Layout page"
# Fluent prepends `pagetype_` to the db table. This seems to break
# Django's inference of m2m table names during migrations, when the
# m2m is defined on an abstract model that's mixed in. Instead we
# give the table a name that's different from the default.
# https://github.com/django-fluent/django-fluent-pages/issues/89
db_table = "icekit_layoutpage"Change verbose name of Layout Page to ‘Page’ for simplicity.from . import abstract_models
class LayoutPage(abstract_models.AbstractLayoutPage):
class Meta:
verbose_name = "Page"
# Fluent prepends `pagetype_` to the db table. This seems to break
# Django's inference of m2m table names during migrations, when the
# m2m is defined on an abstract model that's mixed in. Instead we
# give the table a name that's different from the default.
# https://github.com/django-fluent/django-fluent-pages/issues/89
db_table = "icekit_layoutpage"
|
<commit_before>from . import abstract_models
class LayoutPage(abstract_models.AbstractLayoutPage):
class Meta:
verbose_name = "Layout page"
# Fluent prepends `pagetype_` to the db table. This seems to break
# Django's inference of m2m table names during migrations, when the
# m2m is defined on an abstract model that's mixed in. Instead we
# give the table a name that's different from the default.
# https://github.com/django-fluent/django-fluent-pages/issues/89
db_table = "icekit_layoutpage"<commit_msg>Change verbose name of Layout Page to ‘Page’ for simplicity.<commit_after>from . import abstract_models
class LayoutPage(abstract_models.AbstractLayoutPage):
class Meta:
verbose_name = "Page"
# Fluent prepends `pagetype_` to the db table. This seems to break
# Django's inference of m2m table names during migrations, when the
# m2m is defined on an abstract model that's mixed in. Instead we
# give the table a name that's different from the default.
# https://github.com/django-fluent/django-fluent-pages/issues/89
db_table = "icekit_layoutpage"
|
c21c53a625b2ca1e2f704286bfa99e61bbed0619
|
takeyourmeds/reminders/reminders_calls/tests.py
|
takeyourmeds/reminders/reminders_calls/tests.py
|
from takeyourmeds.utils.test import TestCase
from ..enums import TypeEnum, SourceEnum
from .enums import StateEnum
class TwimlCallbackTest(TestCase):
def setUp(self):
super(TwimlCallbackTest, self).setUp()
self.call = self.user.reminders.create(
type=TypeEnum.call,
).instances.create(
source=SourceEnum.manual,
).calls.create()
def test_url(self):
url = self.call.get_twiml_callback_url()
self.assert_(url.startswith('http'))
|
from django.conf import settings
from takeyourmeds.utils.test import TestCase
from ..enums import TypeEnum, SourceEnum
from .enums import StateEnum
class TwimlCallbackTest(TestCase):
def setUp(self):
super(TwimlCallbackTest, self).setUp()
self.reminder = self.user.reminders.create(
type=TypeEnum.call,
audio_url='/dummy.mp3',
)
self.call = self.reminder.instances.create(
source=SourceEnum.manual,
).calls.create()
def test_url(self):
url = self.call.get_twiml_callback_url()
self.assert_(url.startswith('http'))
def test_content(self):
response = self.assertGET(
200,
'reminders:calls:twiml-callback',
self.call.ident,
)
self.assert_(response.content.startswith('<?xml'))
self.assert_(settings.SITE_URL in response.content)
self.assert_(self.reminder.audio_url in response.content)
|
Check we get some sane XML back
|
Check we get some sane XML back
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>
|
Python
|
mit
|
takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web
|
from takeyourmeds.utils.test import TestCase
from ..enums import TypeEnum, SourceEnum
from .enums import StateEnum
class TwimlCallbackTest(TestCase):
def setUp(self):
super(TwimlCallbackTest, self).setUp()
self.call = self.user.reminders.create(
type=TypeEnum.call,
).instances.create(
source=SourceEnum.manual,
).calls.create()
def test_url(self):
url = self.call.get_twiml_callback_url()
self.assert_(url.startswith('http'))
Check we get some sane XML back
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>
|
from django.conf import settings
from takeyourmeds.utils.test import TestCase
from ..enums import TypeEnum, SourceEnum
from .enums import StateEnum
class TwimlCallbackTest(TestCase):
def setUp(self):
super(TwimlCallbackTest, self).setUp()
self.reminder = self.user.reminders.create(
type=TypeEnum.call,
audio_url='/dummy.mp3',
)
self.call = self.reminder.instances.create(
source=SourceEnum.manual,
).calls.create()
def test_url(self):
url = self.call.get_twiml_callback_url()
self.assert_(url.startswith('http'))
def test_content(self):
response = self.assertGET(
200,
'reminders:calls:twiml-callback',
self.call.ident,
)
self.assert_(response.content.startswith('<?xml'))
self.assert_(settings.SITE_URL in response.content)
self.assert_(self.reminder.audio_url in response.content)
|
<commit_before>from takeyourmeds.utils.test import TestCase
from ..enums import TypeEnum, SourceEnum
from .enums import StateEnum
class TwimlCallbackTest(TestCase):
def setUp(self):
super(TwimlCallbackTest, self).setUp()
self.call = self.user.reminders.create(
type=TypeEnum.call,
).instances.create(
source=SourceEnum.manual,
).calls.create()
def test_url(self):
url = self.call.get_twiml_callback_url()
self.assert_(url.startswith('http'))
<commit_msg>Check we get some sane XML back
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk><commit_after>
|
from django.conf import settings
from takeyourmeds.utils.test import TestCase
from ..enums import TypeEnum, SourceEnum
from .enums import StateEnum
class TwimlCallbackTest(TestCase):
def setUp(self):
super(TwimlCallbackTest, self).setUp()
self.reminder = self.user.reminders.create(
type=TypeEnum.call,
audio_url='/dummy.mp3',
)
self.call = self.reminder.instances.create(
source=SourceEnum.manual,
).calls.create()
def test_url(self):
url = self.call.get_twiml_callback_url()
self.assert_(url.startswith('http'))
def test_content(self):
response = self.assertGET(
200,
'reminders:calls:twiml-callback',
self.call.ident,
)
self.assert_(response.content.startswith('<?xml'))
self.assert_(settings.SITE_URL in response.content)
self.assert_(self.reminder.audio_url in response.content)
|
from takeyourmeds.utils.test import TestCase
from ..enums import TypeEnum, SourceEnum
from .enums import StateEnum
class TwimlCallbackTest(TestCase):
def setUp(self):
super(TwimlCallbackTest, self).setUp()
self.call = self.user.reminders.create(
type=TypeEnum.call,
).instances.create(
source=SourceEnum.manual,
).calls.create()
def test_url(self):
url = self.call.get_twiml_callback_url()
self.assert_(url.startswith('http'))
Check we get some sane XML back
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>from django.conf import settings
from takeyourmeds.utils.test import TestCase
from ..enums import TypeEnum, SourceEnum
from .enums import StateEnum
class TwimlCallbackTest(TestCase):
def setUp(self):
super(TwimlCallbackTest, self).setUp()
self.reminder = self.user.reminders.create(
type=TypeEnum.call,
audio_url='/dummy.mp3',
)
self.call = self.reminder.instances.create(
source=SourceEnum.manual,
).calls.create()
def test_url(self):
url = self.call.get_twiml_callback_url()
self.assert_(url.startswith('http'))
def test_content(self):
response = self.assertGET(
200,
'reminders:calls:twiml-callback',
self.call.ident,
)
self.assert_(response.content.startswith('<?xml'))
self.assert_(settings.SITE_URL in response.content)
self.assert_(self.reminder.audio_url in response.content)
|
<commit_before>from takeyourmeds.utils.test import TestCase
from ..enums import TypeEnum, SourceEnum
from .enums import StateEnum
class TwimlCallbackTest(TestCase):
def setUp(self):
super(TwimlCallbackTest, self).setUp()
self.call = self.user.reminders.create(
type=TypeEnum.call,
).instances.create(
source=SourceEnum.manual,
).calls.create()
def test_url(self):
url = self.call.get_twiml_callback_url()
self.assert_(url.startswith('http'))
<commit_msg>Check we get some sane XML back
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk><commit_after>from django.conf import settings
from takeyourmeds.utils.test import TestCase
from ..enums import TypeEnum, SourceEnum
from .enums import StateEnum
class TwimlCallbackTest(TestCase):
def setUp(self):
super(TwimlCallbackTest, self).setUp()
self.reminder = self.user.reminders.create(
type=TypeEnum.call,
audio_url='/dummy.mp3',
)
self.call = self.reminder.instances.create(
source=SourceEnum.manual,
).calls.create()
def test_url(self):
url = self.call.get_twiml_callback_url()
self.assert_(url.startswith('http'))
def test_content(self):
response = self.assertGET(
200,
'reminders:calls:twiml-callback',
self.call.ident,
)
self.assert_(response.content.startswith('<?xml'))
self.assert_(settings.SITE_URL in response.content)
self.assert_(self.reminder.audio_url in response.content)
|
5f77be6bc80b9ed653f85f4b8c0c60ccb520f2f8
|
saleor/payment/gateways/utils.py
|
saleor/payment/gateways/utils.py
|
import warnings
from typing import TYPE_CHECKING, List
from django.conf import settings
if TYPE_CHECKING:
from ..interface import GatewayConfig
def get_supported_currencies(config: "GatewayConfig", gateway_name: str) -> List[str]:
supp_currencies = config.supported_currencies
if not supp_currencies:
currencies = [settings.DEFAULT_CURRENCY]
warnings.warn(
f"Default currency used for {gateway_name}. "
"DEFAULT_CURRENCY setting is deprecated, "
"please configure supported currencies for this gateway."
)
else:
currencies = [c.strip() for c in supp_currencies.split(",")]
return currencies
|
import re
import warnings
from typing import TYPE_CHECKING, List
from django.conf import settings
if TYPE_CHECKING:
from ..interface import GatewayConfig
def get_supported_currencies(config: "GatewayConfig", gateway_name: str) -> List[str]:
supp_currencies = config.supported_currencies
if not supp_currencies:
currencies = [settings.DEFAULT_CURRENCY]
warnings.warn(
f"Default currency used for {gateway_name}. "
"DEFAULT_CURRENCY setting is deprecated, "
"please configure supported currencies for this gateway."
)
else:
currencies = [c.strip() for c in supp_currencies.split(",")]
return currencies
|
Update supported currencies in gateways, set default currency for dummy gateway
|
Update supported currencies in gateways, set default currency for dummy gateway
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor
|
import warnings
from typing import TYPE_CHECKING, List
from django.conf import settings
if TYPE_CHECKING:
from ..interface import GatewayConfig
def get_supported_currencies(config: "GatewayConfig", gateway_name: str) -> List[str]:
supp_currencies = config.supported_currencies
if not supp_currencies:
currencies = [settings.DEFAULT_CURRENCY]
warnings.warn(
f"Default currency used for {gateway_name}. "
"DEFAULT_CURRENCY setting is deprecated, "
"please configure supported currencies for this gateway."
)
else:
currencies = [c.strip() for c in supp_currencies.split(",")]
return currencies
Update supported currencies in gateways, set default currency for dummy gateway
|
import re
import warnings
from typing import TYPE_CHECKING, List
from django.conf import settings
if TYPE_CHECKING:
from ..interface import GatewayConfig
def get_supported_currencies(config: "GatewayConfig", gateway_name: str) -> List[str]:
supp_currencies = config.supported_currencies
if not supp_currencies:
currencies = [settings.DEFAULT_CURRENCY]
warnings.warn(
f"Default currency used for {gateway_name}. "
"DEFAULT_CURRENCY setting is deprecated, "
"please configure supported currencies for this gateway."
)
else:
currencies = [c.strip() for c in supp_currencies.split(",")]
return currencies
|
<commit_before>import warnings
from typing import TYPE_CHECKING, List
from django.conf import settings
if TYPE_CHECKING:
from ..interface import GatewayConfig
def get_supported_currencies(config: "GatewayConfig", gateway_name: str) -> List[str]:
supp_currencies = config.supported_currencies
if not supp_currencies:
currencies = [settings.DEFAULT_CURRENCY]
warnings.warn(
f"Default currency used for {gateway_name}. "
"DEFAULT_CURRENCY setting is deprecated, "
"please configure supported currencies for this gateway."
)
else:
currencies = [c.strip() for c in supp_currencies.split(",")]
return currencies
<commit_msg>Update supported currencies in gateways, set default currency for dummy gateway<commit_after>
|
import re
import warnings
from typing import TYPE_CHECKING, List
from django.conf import settings
if TYPE_CHECKING:
from ..interface import GatewayConfig
def get_supported_currencies(config: "GatewayConfig", gateway_name: str) -> List[str]:
supp_currencies = config.supported_currencies
if not supp_currencies:
currencies = [settings.DEFAULT_CURRENCY]
warnings.warn(
f"Default currency used for {gateway_name}. "
"DEFAULT_CURRENCY setting is deprecated, "
"please configure supported currencies for this gateway."
)
else:
currencies = [c.strip() for c in supp_currencies.split(",")]
return currencies
|
import warnings
from typing import TYPE_CHECKING, List
from django.conf import settings
if TYPE_CHECKING:
from ..interface import GatewayConfig
def get_supported_currencies(config: "GatewayConfig", gateway_name: str) -> List[str]:
supp_currencies = config.supported_currencies
if not supp_currencies:
currencies = [settings.DEFAULT_CURRENCY]
warnings.warn(
f"Default currency used for {gateway_name}. "
"DEFAULT_CURRENCY setting is deprecated, "
"please configure supported currencies for this gateway."
)
else:
currencies = [c.strip() for c in supp_currencies.split(",")]
return currencies
Update supported currencies in gateways, set default currency for dummy gatewayimport re
import warnings
from typing import TYPE_CHECKING, List
from django.conf import settings
if TYPE_CHECKING:
from ..interface import GatewayConfig
def get_supported_currencies(config: "GatewayConfig", gateway_name: str) -> List[str]:
supp_currencies = config.supported_currencies
if not supp_currencies:
currencies = [settings.DEFAULT_CURRENCY]
warnings.warn(
f"Default currency used for {gateway_name}. "
"DEFAULT_CURRENCY setting is deprecated, "
"please configure supported currencies for this gateway."
)
else:
currencies = [c.strip() for c in supp_currencies.split(",")]
return currencies
|
<commit_before>import warnings
from typing import TYPE_CHECKING, List
from django.conf import settings
if TYPE_CHECKING:
from ..interface import GatewayConfig
def get_supported_currencies(config: "GatewayConfig", gateway_name: str) -> List[str]:
supp_currencies = config.supported_currencies
if not supp_currencies:
currencies = [settings.DEFAULT_CURRENCY]
warnings.warn(
f"Default currency used for {gateway_name}. "
"DEFAULT_CURRENCY setting is deprecated, "
"please configure supported currencies for this gateway."
)
else:
currencies = [c.strip() for c in supp_currencies.split(",")]
return currencies
<commit_msg>Update supported currencies in gateways, set default currency for dummy gateway<commit_after>import re
import warnings
from typing import TYPE_CHECKING, List
from django.conf import settings
if TYPE_CHECKING:
from ..interface import GatewayConfig
def get_supported_currencies(config: "GatewayConfig", gateway_name: str) -> List[str]:
supp_currencies = config.supported_currencies
if not supp_currencies:
currencies = [settings.DEFAULT_CURRENCY]
warnings.warn(
f"Default currency used for {gateway_name}. "
"DEFAULT_CURRENCY setting is deprecated, "
"please configure supported currencies for this gateway."
)
else:
currencies = [c.strip() for c in supp_currencies.split(",")]
return currencies
|
ea5499d36ef84e879737fd8c6d6148dd8305c356
|
bookshelf/search_indexes.py
|
bookshelf/search_indexes.py
|
# Imports #####################################################################
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
subtitle = indexes.CharField(model_attr='subtitle')
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
illustrator = indexes.CharField(model_attr='illustrator')
editor = indexes.CharField(model_attr='editor')
collection = indexes.CharField(model_attr='collection')
keywords = indexes.CharField(model_attr='keywords')
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
|
# Imports #####################################################################
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
subtitle = indexes.CharField(model_attr='subtitle', blank=True)
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
illustrator = indexes.CharField(model_attr='illustrator', blank=True)
editor = indexes.CharField(model_attr='editor')
collection = indexes.CharField(model_attr='collection', blank=True)
keywords = indexes.CharField(model_attr='keywords', blank=True)
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
|
Allow some blank fields in Booki model from search
|
Allow some blank fields in Booki model from search
|
Python
|
agpl-3.0
|
antoviaque/plin,antoviaque/plin,antoviaque/plin
|
# Imports #####################################################################
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
subtitle = indexes.CharField(model_attr='subtitle')
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
illustrator = indexes.CharField(model_attr='illustrator')
editor = indexes.CharField(model_attr='editor')
collection = indexes.CharField(model_attr='collection')
keywords = indexes.CharField(model_attr='keywords')
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
Allow some blank fields in Booki model from search
|
# Imports #####################################################################
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
subtitle = indexes.CharField(model_attr='subtitle', blank=True)
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
illustrator = indexes.CharField(model_attr='illustrator', blank=True)
editor = indexes.CharField(model_attr='editor')
collection = indexes.CharField(model_attr='collection', blank=True)
keywords = indexes.CharField(model_attr='keywords', blank=True)
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
|
<commit_before>
# Imports #####################################################################
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
subtitle = indexes.CharField(model_attr='subtitle')
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
illustrator = indexes.CharField(model_attr='illustrator')
editor = indexes.CharField(model_attr='editor')
collection = indexes.CharField(model_attr='collection')
keywords = indexes.CharField(model_attr='keywords')
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
<commit_msg>Allow some blank fields in Booki model from search<commit_after>
|
# Imports #####################################################################
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
subtitle = indexes.CharField(model_attr='subtitle', blank=True)
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
illustrator = indexes.CharField(model_attr='illustrator', blank=True)
editor = indexes.CharField(model_attr='editor')
collection = indexes.CharField(model_attr='collection', blank=True)
keywords = indexes.CharField(model_attr='keywords', blank=True)
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
|
# Imports #####################################################################
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
subtitle = indexes.CharField(model_attr='subtitle')
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
illustrator = indexes.CharField(model_attr='illustrator')
editor = indexes.CharField(model_attr='editor')
collection = indexes.CharField(model_attr='collection')
keywords = indexes.CharField(model_attr='keywords')
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
Allow some blank fields in Booki model from search
# Imports #####################################################################
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
subtitle = indexes.CharField(model_attr='subtitle', blank=True)
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
illustrator = indexes.CharField(model_attr='illustrator', blank=True)
editor = indexes.CharField(model_attr='editor')
collection = indexes.CharField(model_attr='collection', blank=True)
keywords = indexes.CharField(model_attr='keywords', blank=True)
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
|
<commit_before>
# Imports #####################################################################
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
subtitle = indexes.CharField(model_attr='subtitle')
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
illustrator = indexes.CharField(model_attr='illustrator')
editor = indexes.CharField(model_attr='editor')
collection = indexes.CharField(model_attr='collection')
keywords = indexes.CharField(model_attr='keywords')
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
<commit_msg>Allow some blank fields in Booki model from search<commit_after>
# Imports #####################################################################
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
subtitle = indexes.CharField(model_attr='subtitle', blank=True)
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
illustrator = indexes.CharField(model_attr='illustrator', blank=True)
editor = indexes.CharField(model_attr='editor')
collection = indexes.CharField(model_attr='collection', blank=True)
keywords = indexes.CharField(model_attr='keywords', blank=True)
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
|
03d8b2ca0b070f9247376c40e1f3a4655e579dd0
|
kibitzr/notifier/telegram-split.py
|
kibitzr/notifier/telegram-split.py
|
from __future__ import absolute_import
import logging
from .telegram import TelegramBot
logger = logging.getLogger(__name__)
class TelegramBotSplit(TelegramBot):
def __init__(self, chat_id=None, split_on="\n"):
self.split_on = split_on
super(TelegramBotSplit, self).__init__(chat_id=chat_id)
def post(self, report, **kwargs):
"""Overwrite post to split message on token"""
for m in report.split(self.split_on):
self.bot.send_message(
self.chat_id,
m,
parse_mode='Markdown',
)
def notify_factory(conf, value):
try:
chat_id = value['chat-id']
except (TypeError, KeyError):
chat_id = value
try:
split_on = value['split-on']
except (TypeError, KeyError):
split_on = "\n"
print(split_on)
return TelegramBotSplit(chat_id=chat_id, split_on=split_on).post
def chat_id():
bot = TelegramBotSplit()
print(bot.chat_id)
|
from __future__ import absolute_import
import logging
from .telegram import TelegramBot
logger = logging.getLogger(__name__)
class TelegramBotSplit(TelegramBot):
def __init__(self, chat_id=None, split_on="\n"):
self.split_on = split_on
super(TelegramBotSplit, self).__init__(chat_id=chat_id)
def post(self, report, **kwargs):
"""Overwrite post to split message on token"""
for m in report.split(self.split_on):
super(TelegramBotSplit, self).post(m)
def notify_factory(conf, value):
try:
chat_id = value['chat-id']
except (TypeError, KeyError):
chat_id = value
try:
split_on = value['split-on']
except (TypeError, KeyError):
split_on = "\n"
return TelegramBotSplit(chat_id=chat_id, split_on=split_on).post
def chat_id():
bot = TelegramBotSplit()
print(bot.chat_id)
|
Use parent 'post' function to actually send message
|
Use parent 'post' function to actually send message
|
Python
|
mit
|
kibitzr/kibitzr,kibitzr/kibitzr
|
from __future__ import absolute_import
import logging
from .telegram import TelegramBot
logger = logging.getLogger(__name__)
class TelegramBotSplit(TelegramBot):
def __init__(self, chat_id=None, split_on="\n"):
self.split_on = split_on
super(TelegramBotSplit, self).__init__(chat_id=chat_id)
def post(self, report, **kwargs):
"""Overwrite post to split message on token"""
for m in report.split(self.split_on):
self.bot.send_message(
self.chat_id,
m,
parse_mode='Markdown',
)
def notify_factory(conf, value):
try:
chat_id = value['chat-id']
except (TypeError, KeyError):
chat_id = value
try:
split_on = value['split-on']
except (TypeError, KeyError):
split_on = "\n"
print(split_on)
return TelegramBotSplit(chat_id=chat_id, split_on=split_on).post
def chat_id():
bot = TelegramBotSplit()
print(bot.chat_id)
Use parent 'post' function to actually send message
|
from __future__ import absolute_import
import logging
from .telegram import TelegramBot
logger = logging.getLogger(__name__)
class TelegramBotSplit(TelegramBot):
def __init__(self, chat_id=None, split_on="\n"):
self.split_on = split_on
super(TelegramBotSplit, self).__init__(chat_id=chat_id)
def post(self, report, **kwargs):
"""Overwrite post to split message on token"""
for m in report.split(self.split_on):
super(TelegramBotSplit, self).post(m)
def notify_factory(conf, value):
try:
chat_id = value['chat-id']
except (TypeError, KeyError):
chat_id = value
try:
split_on = value['split-on']
except (TypeError, KeyError):
split_on = "\n"
return TelegramBotSplit(chat_id=chat_id, split_on=split_on).post
def chat_id():
bot = TelegramBotSplit()
print(bot.chat_id)
|
<commit_before>from __future__ import absolute_import
import logging
from .telegram import TelegramBot
logger = logging.getLogger(__name__)
class TelegramBotSplit(TelegramBot):
def __init__(self, chat_id=None, split_on="\n"):
self.split_on = split_on
super(TelegramBotSplit, self).__init__(chat_id=chat_id)
def post(self, report, **kwargs):
"""Overwrite post to split message on token"""
for m in report.split(self.split_on):
self.bot.send_message(
self.chat_id,
m,
parse_mode='Markdown',
)
def notify_factory(conf, value):
try:
chat_id = value['chat-id']
except (TypeError, KeyError):
chat_id = value
try:
split_on = value['split-on']
except (TypeError, KeyError):
split_on = "\n"
print(split_on)
return TelegramBotSplit(chat_id=chat_id, split_on=split_on).post
def chat_id():
bot = TelegramBotSplit()
print(bot.chat_id)
<commit_msg>Use parent 'post' function to actually send message<commit_after>
|
from __future__ import absolute_import
import logging
from .telegram import TelegramBot
logger = logging.getLogger(__name__)
class TelegramBotSplit(TelegramBot):
def __init__(self, chat_id=None, split_on="\n"):
self.split_on = split_on
super(TelegramBotSplit, self).__init__(chat_id=chat_id)
def post(self, report, **kwargs):
"""Overwrite post to split message on token"""
for m in report.split(self.split_on):
super(TelegramBotSplit, self).post(m)
def notify_factory(conf, value):
try:
chat_id = value['chat-id']
except (TypeError, KeyError):
chat_id = value
try:
split_on = value['split-on']
except (TypeError, KeyError):
split_on = "\n"
return TelegramBotSplit(chat_id=chat_id, split_on=split_on).post
def chat_id():
bot = TelegramBotSplit()
print(bot.chat_id)
|
from __future__ import absolute_import
import logging
from .telegram import TelegramBot
logger = logging.getLogger(__name__)
class TelegramBotSplit(TelegramBot):
def __init__(self, chat_id=None, split_on="\n"):
self.split_on = split_on
super(TelegramBotSplit, self).__init__(chat_id=chat_id)
def post(self, report, **kwargs):
"""Overwrite post to split message on token"""
for m in report.split(self.split_on):
self.bot.send_message(
self.chat_id,
m,
parse_mode='Markdown',
)
def notify_factory(conf, value):
try:
chat_id = value['chat-id']
except (TypeError, KeyError):
chat_id = value
try:
split_on = value['split-on']
except (TypeError, KeyError):
split_on = "\n"
print(split_on)
return TelegramBotSplit(chat_id=chat_id, split_on=split_on).post
def chat_id():
bot = TelegramBotSplit()
print(bot.chat_id)
Use parent 'post' function to actually send messagefrom __future__ import absolute_import
import logging
from .telegram import TelegramBot
logger = logging.getLogger(__name__)
class TelegramBotSplit(TelegramBot):
def __init__(self, chat_id=None, split_on="\n"):
self.split_on = split_on
super(TelegramBotSplit, self).__init__(chat_id=chat_id)
def post(self, report, **kwargs):
"""Overwrite post to split message on token"""
for m in report.split(self.split_on):
super(TelegramBotSplit, self).post(m)
def notify_factory(conf, value):
try:
chat_id = value['chat-id']
except (TypeError, KeyError):
chat_id = value
try:
split_on = value['split-on']
except (TypeError, KeyError):
split_on = "\n"
return TelegramBotSplit(chat_id=chat_id, split_on=split_on).post
def chat_id():
bot = TelegramBotSplit()
print(bot.chat_id)
|
<commit_before>from __future__ import absolute_import
import logging
from .telegram import TelegramBot
logger = logging.getLogger(__name__)
class TelegramBotSplit(TelegramBot):
def __init__(self, chat_id=None, split_on="\n"):
self.split_on = split_on
super(TelegramBotSplit, self).__init__(chat_id=chat_id)
def post(self, report, **kwargs):
"""Overwrite post to split message on token"""
for m in report.split(self.split_on):
self.bot.send_message(
self.chat_id,
m,
parse_mode='Markdown',
)
def notify_factory(conf, value):
try:
chat_id = value['chat-id']
except (TypeError, KeyError):
chat_id = value
try:
split_on = value['split-on']
except (TypeError, KeyError):
split_on = "\n"
print(split_on)
return TelegramBotSplit(chat_id=chat_id, split_on=split_on).post
def chat_id():
bot = TelegramBotSplit()
print(bot.chat_id)
<commit_msg>Use parent 'post' function to actually send message<commit_after>from __future__ import absolute_import
import logging
from .telegram import TelegramBot
logger = logging.getLogger(__name__)
class TelegramBotSplit(TelegramBot):
def __init__(self, chat_id=None, split_on="\n"):
self.split_on = split_on
super(TelegramBotSplit, self).__init__(chat_id=chat_id)
def post(self, report, **kwargs):
"""Overwrite post to split message on token"""
for m in report.split(self.split_on):
super(TelegramBotSplit, self).post(m)
def notify_factory(conf, value):
try:
chat_id = value['chat-id']
except (TypeError, KeyError):
chat_id = value
try:
split_on = value['split-on']
except (TypeError, KeyError):
split_on = "\n"
return TelegramBotSplit(chat_id=chat_id, split_on=split_on).post
def chat_id():
bot = TelegramBotSplit()
print(bot.chat_id)
|
0b7cdb4b5a6dab5f2983313d745bea84ff302e01
|
Machines/wxMachines.py
|
Machines/wxMachines.py
|
# -*- coding: utf-8 -*-
# Import
# Import for changing the Python Path for importing Gestalt
import sys
import os
# Change the Python Path
base_dir = os.path.dirname(__file__) or '.'
appdir = os.path.abspath(os.path.join(base_dir, os.pardir))
sys.path.insert(0, appdir)
# Import Gestalt
from gestalt import nodes
from gestalt import interfaces
from gestalt import machines
from gestalt import functions
from gestalt.machines import elements
from gestalt.machines import kinematics
from gestalt.machines import state
from gestalt.utilities import notice
from gestalt.publish import rpc #remote procedure call dispatcher
# Classes
class Axis():
def __init__(self):
self.linear = True
self.rotary = False
class Machine():
def __init__(self):
self.axes = []
if __name__ == '__main__':
pass
|
# -*- coding: utf-8 -*-
# Import
# Import for changing the Python Path for importing Gestalt
import sys
import os
# Change the Python Path
base_dir = os.path.dirname(__file__) or '.'
appdir = os.path.abspath(os.path.join(base_dir, os.pardir))
sys.path.insert(0, appdir)
# Import Gestalt
from gestalt import nodes
from gestalt import interfaces
from gestalt import machines
from gestalt import functions
from gestalt.machines import elements
from gestalt.machines import kinematics
from gestalt.machines import state
from gestalt.utilities import notice
from gestalt.publish import rpc #remote procedure call dispatcher
# Classes
# A class for each Node / Axis
class wxNode():
def __init__(self):
self.linear = True
self.rotary = False
# A basic class for each Machine
class wxMachine():
def __init__(self):
self.axes = []
# Solo/Independent Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 35
class wxSolo_Independent():
def __init__(self):
pass
# Solo/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 36
class wxSolo_Gestalt():
def __init__(self):
pass
# Networked/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 36
class wxNetworked_Gestalt(wxSolo_Gestalt):
def __init__(self):
pass
# Managed/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 37
class wxManaged_Gestalt():
def __init__(self):
pass
# Compound Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 38
class wxCompound_Nodes():
def __init__(self):
pass
if __name__ == '__main__':
pass
|
Improve general classes with input from Ilan Ellison Moyer's thesis
|
Improve general classes with input from Ilan Ellison Moyer's thesis
|
Python
|
mit
|
openp2pdesign/wxGestalt
|
# -*- coding: utf-8 -*-
# Import
# Import for changing the Python Path for importing Gestalt
import sys
import os
# Change the Python Path
base_dir = os.path.dirname(__file__) or '.'
appdir = os.path.abspath(os.path.join(base_dir, os.pardir))
sys.path.insert(0, appdir)
# Import Gestalt
from gestalt import nodes
from gestalt import interfaces
from gestalt import machines
from gestalt import functions
from gestalt.machines import elements
from gestalt.machines import kinematics
from gestalt.machines import state
from gestalt.utilities import notice
from gestalt.publish import rpc #remote procedure call dispatcher
# Classes
class Axis():
def __init__(self):
self.linear = True
self.rotary = False
class Machine():
def __init__(self):
self.axes = []
if __name__ == '__main__':
pass
Improve general classes with input from Ilan Ellison Moyer's thesis
|
# -*- coding: utf-8 -*-
# Import
# Import for changing the Python Path for importing Gestalt
import sys
import os
# Change the Python Path
base_dir = os.path.dirname(__file__) or '.'
appdir = os.path.abspath(os.path.join(base_dir, os.pardir))
sys.path.insert(0, appdir)
# Import Gestalt
from gestalt import nodes
from gestalt import interfaces
from gestalt import machines
from gestalt import functions
from gestalt.machines import elements
from gestalt.machines import kinematics
from gestalt.machines import state
from gestalt.utilities import notice
from gestalt.publish import rpc #remote procedure call dispatcher
# Classes
# A class for each Node / Axis
class wxNode():
def __init__(self):
self.linear = True
self.rotary = False
# A basic class for each Machine
class wxMachine():
def __init__(self):
self.axes = []
# Solo/Independent Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 35
class wxSolo_Independent():
def __init__(self):
pass
# Solo/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 36
class wxSolo_Gestalt():
def __init__(self):
pass
# Networked/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 36
class wxNetworked_Gestalt(wxSolo_Gestalt):
def __init__(self):
pass
# Managed/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 37
class wxManaged_Gestalt():
def __init__(self):
pass
# Compound Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 38
class wxCompound_Nodes():
def __init__(self):
pass
if __name__ == '__main__':
pass
|
<commit_before># -*- coding: utf-8 -*-
# Import
# Import for changing the Python Path for importing Gestalt
import sys
import os
# Change the Python Path
base_dir = os.path.dirname(__file__) or '.'
appdir = os.path.abspath(os.path.join(base_dir, os.pardir))
sys.path.insert(0, appdir)
# Import Gestalt
from gestalt import nodes
from gestalt import interfaces
from gestalt import machines
from gestalt import functions
from gestalt.machines import elements
from gestalt.machines import kinematics
from gestalt.machines import state
from gestalt.utilities import notice
from gestalt.publish import rpc #remote procedure call dispatcher
# Classes
class Axis():
def __init__(self):
self.linear = True
self.rotary = False
class Machine():
def __init__(self):
self.axes = []
if __name__ == '__main__':
pass
<commit_msg>Improve general classes with input from Ilan Ellison Moyer's thesis<commit_after>
|
# -*- coding: utf-8 -*-
# Import
# Import for changing the Python Path for importing Gestalt
import sys
import os
# Change the Python Path
base_dir = os.path.dirname(__file__) or '.'
appdir = os.path.abspath(os.path.join(base_dir, os.pardir))
sys.path.insert(0, appdir)
# Import Gestalt
from gestalt import nodes
from gestalt import interfaces
from gestalt import machines
from gestalt import functions
from gestalt.machines import elements
from gestalt.machines import kinematics
from gestalt.machines import state
from gestalt.utilities import notice
from gestalt.publish import rpc #remote procedure call dispatcher
# Classes
# A class for each Node / Axis
class wxNode():
def __init__(self):
self.linear = True
self.rotary = False
# A basic class for each Machine
class wxMachine():
def __init__(self):
self.axes = []
# Solo/Independent Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 35
class wxSolo_Independent():
def __init__(self):
pass
# Solo/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 36
class wxSolo_Gestalt():
def __init__(self):
pass
# Networked/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 36
class wxNetworked_Gestalt(wxSolo_Gestalt):
def __init__(self):
pass
# Managed/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 37
class wxManaged_Gestalt():
def __init__(self):
pass
# Compound Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 38
class wxCompound_Nodes():
def __init__(self):
pass
if __name__ == '__main__':
pass
|
# -*- coding: utf-8 -*-
# Import
# Import for changing the Python Path for importing Gestalt
import sys
import os
# Change the Python Path
base_dir = os.path.dirname(__file__) or '.'
appdir = os.path.abspath(os.path.join(base_dir, os.pardir))
sys.path.insert(0, appdir)
# Import Gestalt
from gestalt import nodes
from gestalt import interfaces
from gestalt import machines
from gestalt import functions
from gestalt.machines import elements
from gestalt.machines import kinematics
from gestalt.machines import state
from gestalt.utilities import notice
from gestalt.publish import rpc #remote procedure call dispatcher
# Classes
class Axis():
def __init__(self):
self.linear = True
self.rotary = False
class Machine():
def __init__(self):
self.axes = []
if __name__ == '__main__':
pass
Improve general classes with input from Ilan Ellison Moyer's thesis# -*- coding: utf-8 -*-
# Import
# Import for changing the Python Path for importing Gestalt
import sys
import os
# Change the Python Path
base_dir = os.path.dirname(__file__) or '.'
appdir = os.path.abspath(os.path.join(base_dir, os.pardir))
sys.path.insert(0, appdir)
# Import Gestalt
from gestalt import nodes
from gestalt import interfaces
from gestalt import machines
from gestalt import functions
from gestalt.machines import elements
from gestalt.machines import kinematics
from gestalt.machines import state
from gestalt.utilities import notice
from gestalt.publish import rpc #remote procedure call dispatcher
# Classes
# A class for each Node / Axis
class wxNode():
def __init__(self):
self.linear = True
self.rotary = False
# A basic class for each Machine
class wxMachine():
def __init__(self):
self.axes = []
# Solo/Independent Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 35
class wxSolo_Independent():
def __init__(self):
pass
# Solo/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 36
class wxSolo_Gestalt():
def __init__(self):
pass
# Networked/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 36
class wxNetworked_Gestalt(wxSolo_Gestalt):
def __init__(self):
pass
# Managed/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 37
class wxManaged_Gestalt():
def __init__(self):
pass
# Compound Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 38
class wxCompound_Nodes():
def __init__(self):
pass
if __name__ == '__main__':
pass
|
<commit_before># -*- coding: utf-8 -*-
# Import
# Import for changing the Python Path for importing Gestalt
import sys
import os
# Change the Python Path
base_dir = os.path.dirname(__file__) or '.'
appdir = os.path.abspath(os.path.join(base_dir, os.pardir))
sys.path.insert(0, appdir)
# Import Gestalt
from gestalt import nodes
from gestalt import interfaces
from gestalt import machines
from gestalt import functions
from gestalt.machines import elements
from gestalt.machines import kinematics
from gestalt.machines import state
from gestalt.utilities import notice
from gestalt.publish import rpc #remote procedure call dispatcher
# Classes
class Axis():
def __init__(self):
self.linear = True
self.rotary = False
class Machine():
def __init__(self):
self.axes = []
if __name__ == '__main__':
pass
<commit_msg>Improve general classes with input from Ilan Ellison Moyer's thesis<commit_after># -*- coding: utf-8 -*-
# Import
# Import for changing the Python Path for importing Gestalt
import sys
import os
# Change the Python Path
base_dir = os.path.dirname(__file__) or '.'
appdir = os.path.abspath(os.path.join(base_dir, os.pardir))
sys.path.insert(0, appdir)
# Import Gestalt
from gestalt import nodes
from gestalt import interfaces
from gestalt import machines
from gestalt import functions
from gestalt.machines import elements
from gestalt.machines import kinematics
from gestalt.machines import state
from gestalt.utilities import notice
from gestalt.publish import rpc #remote procedure call dispatcher
# Classes
# A class for each Node / Axis
class wxNode():
def __init__(self):
self.linear = True
self.rotary = False
# A basic class for each Machine
class wxMachine():
def __init__(self):
self.axes = []
# Solo/Independent Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 35
class wxSolo_Independent():
def __init__(self):
pass
# Solo/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 36
class wxSolo_Gestalt():
def __init__(self):
pass
# Networked/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 36
class wxNetworked_Gestalt(wxSolo_Gestalt):
def __init__(self):
pass
# Managed/Gestalt Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 37
class wxManaged_Gestalt():
def __init__(self):
pass
# Compound Nodes
# http://pygestalt.org/VMC_IEM.pdf
# p. 38
class wxCompound_Nodes():
def __init__(self):
pass
if __name__ == '__main__':
pass
|
610d9a3c58f70d8b2002403003b705dd57513d92
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import os
import sys
from django.core.management import execute_from_command_line
from wger.main import get_user_config_path, setup_django_environment
if __name__ == "__main__":
setup_django_environment(
get_user_config_path('wger', 'settings.py'))
#os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wger.workout_manager.settings")
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import sys
from django.core.management import execute_from_command_line
from wger.utils.main import (
setup_django_environment,
get_user_config_path
)
if __name__ == "__main__":
setup_django_environment(
get_user_config_path('wger', 'settings.py'))
#os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wger.workout_manager.settings")
execute_from_command_line(sys.argv)
|
Change imports of helper functions
|
Change imports of helper functions
These are now in the utils app
|
Python
|
agpl-3.0
|
rolandgeider/wger,kjagoo/wger_stark,rolandgeider/wger,wger-project/wger,kjagoo/wger_stark,kjagoo/wger_stark,petervanderdoes/wger,petervanderdoes/wger,rolandgeider/wger,petervanderdoes/wger,DeveloperMal/wger,wger-project/wger,wger-project/wger,rolandgeider/wger,DeveloperMal/wger,DeveloperMal/wger,wger-project/wger,petervanderdoes/wger,kjagoo/wger_stark,DeveloperMal/wger
|
#!/usr/bin/env python
import os
import sys
from django.core.management import execute_from_command_line
from wger.main import get_user_config_path, setup_django_environment
if __name__ == "__main__":
setup_django_environment(
get_user_config_path('wger', 'settings.py'))
#os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wger.workout_manager.settings")
execute_from_command_line(sys.argv)
Change imports of helper functions
These are now in the utils app
|
#!/usr/bin/env python
import sys
from django.core.management import execute_from_command_line
from wger.utils.main import (
setup_django_environment,
get_user_config_path
)
if __name__ == "__main__":
setup_django_environment(
get_user_config_path('wger', 'settings.py'))
#os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wger.workout_manager.settings")
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
from django.core.management import execute_from_command_line
from wger.main import get_user_config_path, setup_django_environment
if __name__ == "__main__":
setup_django_environment(
get_user_config_path('wger', 'settings.py'))
#os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wger.workout_manager.settings")
execute_from_command_line(sys.argv)
<commit_msg>Change imports of helper functions
These are now in the utils app<commit_after>
|
#!/usr/bin/env python
import sys
from django.core.management import execute_from_command_line
from wger.utils.main import (
setup_django_environment,
get_user_config_path
)
if __name__ == "__main__":
setup_django_environment(
get_user_config_path('wger', 'settings.py'))
#os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wger.workout_manager.settings")
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
from django.core.management import execute_from_command_line
from wger.main import get_user_config_path, setup_django_environment
if __name__ == "__main__":
setup_django_environment(
get_user_config_path('wger', 'settings.py'))
#os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wger.workout_manager.settings")
execute_from_command_line(sys.argv)
Change imports of helper functions
These are now in the utils app#!/usr/bin/env python
import sys
from django.core.management import execute_from_command_line
from wger.utils.main import (
setup_django_environment,
get_user_config_path
)
if __name__ == "__main__":
setup_django_environment(
get_user_config_path('wger', 'settings.py'))
#os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wger.workout_manager.settings")
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
from django.core.management import execute_from_command_line
from wger.main import get_user_config_path, setup_django_environment
if __name__ == "__main__":
setup_django_environment(
get_user_config_path('wger', 'settings.py'))
#os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wger.workout_manager.settings")
execute_from_command_line(sys.argv)
<commit_msg>Change imports of helper functions
These are now in the utils app<commit_after>#!/usr/bin/env python
import sys
from django.core.management import execute_from_command_line
from wger.utils.main import (
setup_django_environment,
get_user_config_path
)
if __name__ == "__main__":
setup_django_environment(
get_user_config_path('wger', 'settings.py'))
#os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wger.workout_manager.settings")
execute_from_command_line(sys.argv)
|
8ae1a0793e1938cf845b249d0133e7fc352cda5b
|
django/website/logframe/tests/test_period_utils.py
|
django/website/logframe/tests/test_period_utils.py
|
from datetime import date
from ..period_utils import get_month_shift, get_periods
def test_get_month_shift_handles_december():
new_month, _ = get_month_shift(12, 1)
assert 12 == new_month
def test_get_periods_when_end_date_before_period_end():
# This should produce eight periods, 2 for each of the years from
# 2015 to 2018 inclusive
periods = get_periods(date(2015, 01, 01), date(2018, 12, 31), 1, 2)
assert 8 == len(periods)
|
from datetime import date, timedelta
from ..period_utils import get_month_shift, get_periods, periods_intersect
def test_get_month_shift_handles_december():
new_month, _ = get_month_shift(12, 1)
assert 12 == new_month
def test_get_periods_when_end_date_before_period_end():
# This should produce eight periods, 2 for each of the years from
# 2015 to 2018 inclusive
periods = get_periods(date(2015, 01, 01), date(2018, 12, 31), 1, 2)
assert 8 == len(periods)
def test_periods_instersect_for_no_second_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
assert periods_intersect(s, e, None, None)
def test_periods_intersect_for_when_second_start_date_less_than_first_end_date():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today()
assert periods_intersect(s, e, x, None)
def test_periods_intersect_for_when_second_end_date_greater_than_first_start_date():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
y = date.today()
assert periods_intersect(s, e, None, y)
def test_periods_intersect_for_when_second_start_date_in_first_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today()
y = date.today()
assert periods_intersect(s, e, x, y)
def test_periods_intersect_for_when_period_contains_first_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today() - timedelta(days=2)
y = date.today() + timedelta(days=2)
assert periods_intersect(s, e, x, y)
|
Add tests for period_intersect logic
|
Add tests for period_intersect logic
|
Python
|
agpl-3.0
|
aptivate/alfie,daniell/kashana,aptivate/alfie,daniell/kashana,aptivate/kashana,aptivate/kashana,daniell/kashana,daniell/kashana,aptivate/kashana,aptivate/alfie,aptivate/alfie,aptivate/kashana
|
from datetime import date
from ..period_utils import get_month_shift, get_periods
def test_get_month_shift_handles_december():
new_month, _ = get_month_shift(12, 1)
assert 12 == new_month
def test_get_periods_when_end_date_before_period_end():
# This should produce eight periods, 2 for each of the years from
# 2015 to 2018 inclusive
periods = get_periods(date(2015, 01, 01), date(2018, 12, 31), 1, 2)
assert 8 == len(periods)
Add tests for period_intersect logic
|
from datetime import date, timedelta
from ..period_utils import get_month_shift, get_periods, periods_intersect
def test_get_month_shift_handles_december():
new_month, _ = get_month_shift(12, 1)
assert 12 == new_month
def test_get_periods_when_end_date_before_period_end():
# This should produce eight periods, 2 for each of the years from
# 2015 to 2018 inclusive
periods = get_periods(date(2015, 01, 01), date(2018, 12, 31), 1, 2)
assert 8 == len(periods)
def test_periods_instersect_for_no_second_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
assert periods_intersect(s, e, None, None)
def test_periods_intersect_for_when_second_start_date_less_than_first_end_date():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today()
assert periods_intersect(s, e, x, None)
def test_periods_intersect_for_when_second_end_date_greater_than_first_start_date():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
y = date.today()
assert periods_intersect(s, e, None, y)
def test_periods_intersect_for_when_second_start_date_in_first_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today()
y = date.today()
assert periods_intersect(s, e, x, y)
def test_periods_intersect_for_when_period_contains_first_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today() - timedelta(days=2)
y = date.today() + timedelta(days=2)
assert periods_intersect(s, e, x, y)
|
<commit_before>from datetime import date
from ..period_utils import get_month_shift, get_periods
def test_get_month_shift_handles_december():
new_month, _ = get_month_shift(12, 1)
assert 12 == new_month
def test_get_periods_when_end_date_before_period_end():
# This should produce eight periods, 2 for each of the years from
# 2015 to 2018 inclusive
periods = get_periods(date(2015, 01, 01), date(2018, 12, 31), 1, 2)
assert 8 == len(periods)
<commit_msg>Add tests for period_intersect logic<commit_after>
|
from datetime import date, timedelta
from ..period_utils import get_month_shift, get_periods, periods_intersect
def test_get_month_shift_handles_december():
new_month, _ = get_month_shift(12, 1)
assert 12 == new_month
def test_get_periods_when_end_date_before_period_end():
# This should produce eight periods, 2 for each of the years from
# 2015 to 2018 inclusive
periods = get_periods(date(2015, 01, 01), date(2018, 12, 31), 1, 2)
assert 8 == len(periods)
def test_periods_instersect_for_no_second_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
assert periods_intersect(s, e, None, None)
def test_periods_intersect_for_when_second_start_date_less_than_first_end_date():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today()
assert periods_intersect(s, e, x, None)
def test_periods_intersect_for_when_second_end_date_greater_than_first_start_date():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
y = date.today()
assert periods_intersect(s, e, None, y)
def test_periods_intersect_for_when_second_start_date_in_first_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today()
y = date.today()
assert periods_intersect(s, e, x, y)
def test_periods_intersect_for_when_period_contains_first_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today() - timedelta(days=2)
y = date.today() + timedelta(days=2)
assert periods_intersect(s, e, x, y)
|
from datetime import date
from ..period_utils import get_month_shift, get_periods
def test_get_month_shift_handles_december():
new_month, _ = get_month_shift(12, 1)
assert 12 == new_month
def test_get_periods_when_end_date_before_period_end():
# This should produce eight periods, 2 for each of the years from
# 2015 to 2018 inclusive
periods = get_periods(date(2015, 01, 01), date(2018, 12, 31), 1, 2)
assert 8 == len(periods)
Add tests for period_intersect logicfrom datetime import date, timedelta
from ..period_utils import get_month_shift, get_periods, periods_intersect
def test_get_month_shift_handles_december():
new_month, _ = get_month_shift(12, 1)
assert 12 == new_month
def test_get_periods_when_end_date_before_period_end():
# This should produce eight periods, 2 for each of the years from
# 2015 to 2018 inclusive
periods = get_periods(date(2015, 01, 01), date(2018, 12, 31), 1, 2)
assert 8 == len(periods)
def test_periods_instersect_for_no_second_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
assert periods_intersect(s, e, None, None)
def test_periods_intersect_for_when_second_start_date_less_than_first_end_date():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today()
assert periods_intersect(s, e, x, None)
def test_periods_intersect_for_when_second_end_date_greater_than_first_start_date():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
y = date.today()
assert periods_intersect(s, e, None, y)
def test_periods_intersect_for_when_second_start_date_in_first_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today()
y = date.today()
assert periods_intersect(s, e, x, y)
def test_periods_intersect_for_when_period_contains_first_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today() - timedelta(days=2)
y = date.today() + timedelta(days=2)
assert periods_intersect(s, e, x, y)
|
<commit_before>from datetime import date
from ..period_utils import get_month_shift, get_periods
def test_get_month_shift_handles_december():
new_month, _ = get_month_shift(12, 1)
assert 12 == new_month
def test_get_periods_when_end_date_before_period_end():
# This should produce eight periods, 2 for each of the years from
# 2015 to 2018 inclusive
periods = get_periods(date(2015, 01, 01), date(2018, 12, 31), 1, 2)
assert 8 == len(periods)
<commit_msg>Add tests for period_intersect logic<commit_after>from datetime import date, timedelta
from ..period_utils import get_month_shift, get_periods, periods_intersect
def test_get_month_shift_handles_december():
new_month, _ = get_month_shift(12, 1)
assert 12 == new_month
def test_get_periods_when_end_date_before_period_end():
# This should produce eight periods, 2 for each of the years from
# 2015 to 2018 inclusive
periods = get_periods(date(2015, 01, 01), date(2018, 12, 31), 1, 2)
assert 8 == len(periods)
def test_periods_instersect_for_no_second_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
assert periods_intersect(s, e, None, None)
def test_periods_intersect_for_when_second_start_date_less_than_first_end_date():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today()
assert periods_intersect(s, e, x, None)
def test_periods_intersect_for_when_second_end_date_greater_than_first_start_date():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
y = date.today()
assert periods_intersect(s, e, None, y)
def test_periods_intersect_for_when_second_start_date_in_first_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today()
y = date.today()
assert periods_intersect(s, e, x, y)
def test_periods_intersect_for_when_period_contains_first_period():
s = date.today() - timedelta(days=1)
e = date.today() + timedelta(days=1)
x = date.today() - timedelta(days=2)
y = date.today() + timedelta(days=2)
assert periods_intersect(s, e, x, y)
|
74ededafa70c7ec5548d86289c6dbfc5e4cff6f2
|
tests/integration/ssh/test_deploy.py
|
tests/integration/ssh/test_deploy.py
|
# -*- coding: utf-8 -*-
'''
salt-ssh testing
'''
# Import Python libs
from __future__ import absolute_import
# Import salt testing libs
from tests.support.case import SSHCase
class SSHTest(SSHCase):
'''
Test general salt-ssh functionality
'''
def test_ping(self):
'''
Test a simple ping
'''
ret = self.run_function('test.ping')
self.assertTrue(ret, 'Ping did not return true')
|
# -*- coding: utf-8 -*-
'''
salt-ssh testing
'''
# Import Python libs
from __future__ import absolute_import
import os
import shutil
# Import salt testing libs
from tests.support.case import SSHCase
class SSHTest(SSHCase):
'''
Test general salt-ssh functionality
'''
def test_ping(self):
'''
Test a simple ping
'''
ret = self.run_function('test.ping')
self.assertTrue(ret, 'Ping did not return true')
def test_thin_dir(self):
'''
test to make sure thin_dir is created
and salt-call file is included
'''
thin_dir = self.run_function('config.get', ['thin_dir'], wipe=False)
os.path.isdir(thin_dir)
os.path.exists(os.path.join(thin_dir, 'salt-call'))
os.path.exists(os.path.join(thin_dir, 'running_data'))
def tearDown(self):
'''
make sure to clean up any old ssh directories
'''
salt_dir = self.run_function('config.get', ['thin_dir'], wipe=False)
if os.path.exists(salt_dir):
shutil.rmtree(salt_dir)
|
Add ssh thin_dir integration test
|
Add ssh thin_dir integration test
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# -*- coding: utf-8 -*-
'''
salt-ssh testing
'''
# Import Python libs
from __future__ import absolute_import
# Import salt testing libs
from tests.support.case import SSHCase
class SSHTest(SSHCase):
'''
Test general salt-ssh functionality
'''
def test_ping(self):
'''
Test a simple ping
'''
ret = self.run_function('test.ping')
self.assertTrue(ret, 'Ping did not return true')
Add ssh thin_dir integration test
|
# -*- coding: utf-8 -*-
'''
salt-ssh testing
'''
# Import Python libs
from __future__ import absolute_import
import os
import shutil
# Import salt testing libs
from tests.support.case import SSHCase
class SSHTest(SSHCase):
'''
Test general salt-ssh functionality
'''
def test_ping(self):
'''
Test a simple ping
'''
ret = self.run_function('test.ping')
self.assertTrue(ret, 'Ping did not return true')
def test_thin_dir(self):
'''
test to make sure thin_dir is created
and salt-call file is included
'''
thin_dir = self.run_function('config.get', ['thin_dir'], wipe=False)
os.path.isdir(thin_dir)
os.path.exists(os.path.join(thin_dir, 'salt-call'))
os.path.exists(os.path.join(thin_dir, 'running_data'))
def tearDown(self):
'''
make sure to clean up any old ssh directories
'''
salt_dir = self.run_function('config.get', ['thin_dir'], wipe=False)
if os.path.exists(salt_dir):
shutil.rmtree(salt_dir)
|
<commit_before># -*- coding: utf-8 -*-
'''
salt-ssh testing
'''
# Import Python libs
from __future__ import absolute_import
# Import salt testing libs
from tests.support.case import SSHCase
class SSHTest(SSHCase):
'''
Test general salt-ssh functionality
'''
def test_ping(self):
'''
Test a simple ping
'''
ret = self.run_function('test.ping')
self.assertTrue(ret, 'Ping did not return true')
<commit_msg>Add ssh thin_dir integration test<commit_after>
|
# -*- coding: utf-8 -*-
'''
salt-ssh testing
'''
# Import Python libs
from __future__ import absolute_import
import os
import shutil
# Import salt testing libs
from tests.support.case import SSHCase
class SSHTest(SSHCase):
'''
Test general salt-ssh functionality
'''
def test_ping(self):
'''
Test a simple ping
'''
ret = self.run_function('test.ping')
self.assertTrue(ret, 'Ping did not return true')
def test_thin_dir(self):
'''
test to make sure thin_dir is created
and salt-call file is included
'''
thin_dir = self.run_function('config.get', ['thin_dir'], wipe=False)
os.path.isdir(thin_dir)
os.path.exists(os.path.join(thin_dir, 'salt-call'))
os.path.exists(os.path.join(thin_dir, 'running_data'))
def tearDown(self):
'''
make sure to clean up any old ssh directories
'''
salt_dir = self.run_function('config.get', ['thin_dir'], wipe=False)
if os.path.exists(salt_dir):
shutil.rmtree(salt_dir)
|
# -*- coding: utf-8 -*-
'''
salt-ssh testing
'''
# Import Python libs
from __future__ import absolute_import
# Import salt testing libs
from tests.support.case import SSHCase
class SSHTest(SSHCase):
'''
Test general salt-ssh functionality
'''
def test_ping(self):
'''
Test a simple ping
'''
ret = self.run_function('test.ping')
self.assertTrue(ret, 'Ping did not return true')
Add ssh thin_dir integration test# -*- coding: utf-8 -*-
'''
salt-ssh testing
'''
# Import Python libs
from __future__ import absolute_import
import os
import shutil
# Import salt testing libs
from tests.support.case import SSHCase
class SSHTest(SSHCase):
'''
Test general salt-ssh functionality
'''
def test_ping(self):
'''
Test a simple ping
'''
ret = self.run_function('test.ping')
self.assertTrue(ret, 'Ping did not return true')
def test_thin_dir(self):
'''
test to make sure thin_dir is created
and salt-call file is included
'''
thin_dir = self.run_function('config.get', ['thin_dir'], wipe=False)
os.path.isdir(thin_dir)
os.path.exists(os.path.join(thin_dir, 'salt-call'))
os.path.exists(os.path.join(thin_dir, 'running_data'))
def tearDown(self):
'''
make sure to clean up any old ssh directories
'''
salt_dir = self.run_function('config.get', ['thin_dir'], wipe=False)
if os.path.exists(salt_dir):
shutil.rmtree(salt_dir)
|
<commit_before># -*- coding: utf-8 -*-
'''
salt-ssh testing
'''
# Import Python libs
from __future__ import absolute_import
# Import salt testing libs
from tests.support.case import SSHCase
class SSHTest(SSHCase):
'''
Test general salt-ssh functionality
'''
def test_ping(self):
'''
Test a simple ping
'''
ret = self.run_function('test.ping')
self.assertTrue(ret, 'Ping did not return true')
<commit_msg>Add ssh thin_dir integration test<commit_after># -*- coding: utf-8 -*-
'''
salt-ssh testing
'''
# Import Python libs
from __future__ import absolute_import
import os
import shutil
# Import salt testing libs
from tests.support.case import SSHCase
class SSHTest(SSHCase):
'''
Test general salt-ssh functionality
'''
def test_ping(self):
'''
Test a simple ping
'''
ret = self.run_function('test.ping')
self.assertTrue(ret, 'Ping did not return true')
def test_thin_dir(self):
'''
test to make sure thin_dir is created
and salt-call file is included
'''
thin_dir = self.run_function('config.get', ['thin_dir'], wipe=False)
os.path.isdir(thin_dir)
os.path.exists(os.path.join(thin_dir, 'salt-call'))
os.path.exists(os.path.join(thin_dir, 'running_data'))
def tearDown(self):
'''
make sure to clean up any old ssh directories
'''
salt_dir = self.run_function('config.get', ['thin_dir'], wipe=False)
if os.path.exists(salt_dir):
shutil.rmtree(salt_dir)
|
1ef311a2bef956acf09c8aae21f2e1e27c02e511
|
dsub/_dsub_version.py
|
dsub/_dsub_version.py
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
"""
DSUB_VERSION = '0.1.2'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.1.3.dev0'
|
Update dsub version to 0.1.3.dev0
|
Update dsub version to 0.1.3.dev0
PiperOrigin-RevId: 173965107
|
Python
|
apache-2.0
|
DataBiosphere/dsub,DataBiosphere/dsub
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
"""
DSUB_VERSION = '0.1.2'
Update dsub version to 0.1.3.dev0
PiperOrigin-RevId: 173965107
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.1.3.dev0'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
"""
DSUB_VERSION = '0.1.2'
<commit_msg>Update dsub version to 0.1.3.dev0
PiperOrigin-RevId: 173965107<commit_after>
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.1.3.dev0'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
"""
DSUB_VERSION = '0.1.2'
Update dsub version to 0.1.3.dev0
PiperOrigin-RevId: 173965107# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.1.3.dev0'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
"""
DSUB_VERSION = '0.1.2'
<commit_msg>Update dsub version to 0.1.3.dev0
PiperOrigin-RevId: 173965107<commit_after># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.1.3.dev0'
|
7a732c70fb5e07181aeb8f2386230fbecf0667e9
|
test/test_historynode.py
|
test/test_historynode.py
|
""" Tests for the HistoryNode module """
pass
|
""" Tests for the HistoryNode module """
from contextlib import contextmanager
from io import StringIO
import sys
import unittest
from src import historynode
@contextmanager
def captured_output():
""" Redirects stdout to StringIO so we can inspect Print statements """
new_out = StringIO()
old_out = sys.stdout
try:
sys.stdout = new_out
yield sys.stdout
finally:
sys.stdout = old_out
class TestHistoryNode(unittest.TestCase):
""" Tests for the historynode module, containing the HistoryNode class """
def test_print_board(self):
"""Check that print_board works"""
with captured_output() as out:
hn_obj = historynode.HistoryNode()
hn_obj.print_board()
actual_print = out.getvalue().strip()
expected_print = ("Player 1: None\n"
"Player 2: None\n"
"Result: None\n"
"Game Type: None\n"
"Fox Search: None\n"
"Goose Search: None\n"
"Half Move: None\n"
" -1 -1 -1 \n"
" -1 -1 -1 \n"
"-1 -1 -1 -1 -1 -1 -1\n"
"-1 -1 -1 -1 -1 -1 -1\n"
"-1 -1 -1 -1 -1 -1 -1\n"
" -1 -1 -1 \n"
" -1 -1 -1")
self.assertEqual(actual_print, expected_print)
|
Add unit test for print_board()
|
Add unit test for print_board()
|
Python
|
mit
|
blairck/jaeger
|
""" Tests for the HistoryNode module """
pass
Add unit test for print_board()
|
""" Tests for the HistoryNode module """
from contextlib import contextmanager
from io import StringIO
import sys
import unittest
from src import historynode
@contextmanager
def captured_output():
""" Redirects stdout to StringIO so we can inspect Print statements """
new_out = StringIO()
old_out = sys.stdout
try:
sys.stdout = new_out
yield sys.stdout
finally:
sys.stdout = old_out
class TestHistoryNode(unittest.TestCase):
""" Tests for the historynode module, containing the HistoryNode class """
def test_print_board(self):
"""Check that print_board works"""
with captured_output() as out:
hn_obj = historynode.HistoryNode()
hn_obj.print_board()
actual_print = out.getvalue().strip()
expected_print = ("Player 1: None\n"
"Player 2: None\n"
"Result: None\n"
"Game Type: None\n"
"Fox Search: None\n"
"Goose Search: None\n"
"Half Move: None\n"
" -1 -1 -1 \n"
" -1 -1 -1 \n"
"-1 -1 -1 -1 -1 -1 -1\n"
"-1 -1 -1 -1 -1 -1 -1\n"
"-1 -1 -1 -1 -1 -1 -1\n"
" -1 -1 -1 \n"
" -1 -1 -1")
self.assertEqual(actual_print, expected_print)
|
<commit_before>""" Tests for the HistoryNode module """
pass
<commit_msg>Add unit test for print_board()<commit_after>
|
""" Tests for the HistoryNode module """
from contextlib import contextmanager
from io import StringIO
import sys
import unittest
from src import historynode
@contextmanager
def captured_output():
""" Redirects stdout to StringIO so we can inspect Print statements """
new_out = StringIO()
old_out = sys.stdout
try:
sys.stdout = new_out
yield sys.stdout
finally:
sys.stdout = old_out
class TestHistoryNode(unittest.TestCase):
""" Tests for the historynode module, containing the HistoryNode class """
def test_print_board(self):
"""Check that print_board works"""
with captured_output() as out:
hn_obj = historynode.HistoryNode()
hn_obj.print_board()
actual_print = out.getvalue().strip()
expected_print = ("Player 1: None\n"
"Player 2: None\n"
"Result: None\n"
"Game Type: None\n"
"Fox Search: None\n"
"Goose Search: None\n"
"Half Move: None\n"
" -1 -1 -1 \n"
" -1 -1 -1 \n"
"-1 -1 -1 -1 -1 -1 -1\n"
"-1 -1 -1 -1 -1 -1 -1\n"
"-1 -1 -1 -1 -1 -1 -1\n"
" -1 -1 -1 \n"
" -1 -1 -1")
self.assertEqual(actual_print, expected_print)
|
""" Tests for the HistoryNode module """
pass
Add unit test for print_board()""" Tests for the HistoryNode module """
from contextlib import contextmanager
from io import StringIO
import sys
import unittest
from src import historynode
@contextmanager
def captured_output():
""" Redirects stdout to StringIO so we can inspect Print statements """
new_out = StringIO()
old_out = sys.stdout
try:
sys.stdout = new_out
yield sys.stdout
finally:
sys.stdout = old_out
class TestHistoryNode(unittest.TestCase):
""" Tests for the historynode module, containing the HistoryNode class """
def test_print_board(self):
"""Check that print_board works"""
with captured_output() as out:
hn_obj = historynode.HistoryNode()
hn_obj.print_board()
actual_print = out.getvalue().strip()
expected_print = ("Player 1: None\n"
"Player 2: None\n"
"Result: None\n"
"Game Type: None\n"
"Fox Search: None\n"
"Goose Search: None\n"
"Half Move: None\n"
" -1 -1 -1 \n"
" -1 -1 -1 \n"
"-1 -1 -1 -1 -1 -1 -1\n"
"-1 -1 -1 -1 -1 -1 -1\n"
"-1 -1 -1 -1 -1 -1 -1\n"
" -1 -1 -1 \n"
" -1 -1 -1")
self.assertEqual(actual_print, expected_print)
|
<commit_before>""" Tests for the HistoryNode module """
pass
<commit_msg>Add unit test for print_board()<commit_after>""" Tests for the HistoryNode module """
from contextlib import contextmanager
from io import StringIO
import sys
import unittest
from src import historynode
@contextmanager
def captured_output():
""" Redirects stdout to StringIO so we can inspect Print statements """
new_out = StringIO()
old_out = sys.stdout
try:
sys.stdout = new_out
yield sys.stdout
finally:
sys.stdout = old_out
class TestHistoryNode(unittest.TestCase):
""" Tests for the historynode module, containing the HistoryNode class """
def test_print_board(self):
"""Check that print_board works"""
with captured_output() as out:
hn_obj = historynode.HistoryNode()
hn_obj.print_board()
actual_print = out.getvalue().strip()
expected_print = ("Player 1: None\n"
"Player 2: None\n"
"Result: None\n"
"Game Type: None\n"
"Fox Search: None\n"
"Goose Search: None\n"
"Half Move: None\n"
" -1 -1 -1 \n"
" -1 -1 -1 \n"
"-1 -1 -1 -1 -1 -1 -1\n"
"-1 -1 -1 -1 -1 -1 -1\n"
"-1 -1 -1 -1 -1 -1 -1\n"
" -1 -1 -1 \n"
" -1 -1 -1")
self.assertEqual(actual_print, expected_print)
|
b5ca3dd7b5c743987223b42e302a4044367d4dc9
|
opps/core/admin/article.py
|
opps/core/admin/article.py
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'credit', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage, PostSource
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostSourceInline(admin.TabularInline):
model = PostSource
fk_name = 'post'
raw_id_fields = ['source']
actions = None
extra = 1
fieldsets = [(None, {
'classes': ('collapse',),
'fields': ('source', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline, PostSourceInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
|
Create post source inline (admin Tabular Inline) on core post
|
Create post source inline (admin Tabular Inline) on core post
|
Python
|
mit
|
YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,opps/opps,williamroot/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,opps/opps,opps/opps,williamroot/opps,jeanmask/opps,jeanmask/opps
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'credit', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
Create post source inline (admin Tabular Inline) on core post
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage, PostSource
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostSourceInline(admin.TabularInline):
model = PostSource
fk_name = 'post'
raw_id_fields = ['source']
actions = None
extra = 1
fieldsets = [(None, {
'classes': ('collapse',),
'fields': ('source', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline, PostSourceInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
|
<commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'credit', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
<commit_msg>Create post source inline (admin Tabular Inline) on core post<commit_after>
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage, PostSource
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostSourceInline(admin.TabularInline):
model = PostSource
fk_name = 'post'
raw_id_fields = ['source']
actions = None
extra = 1
fieldsets = [(None, {
'classes': ('collapse',),
'fields': ('source', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline, PostSourceInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'credit', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
Create post source inline (admin Tabular Inline) on core post# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage, PostSource
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostSourceInline(admin.TabularInline):
model = PostSource
fk_name = 'post'
raw_id_fields = ['source']
actions = None
extra = 1
fieldsets = [(None, {
'classes': ('collapse',),
'fields': ('source', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline, PostSourceInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
|
<commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'credit', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
<commit_msg>Create post source inline (admin Tabular Inline) on core post<commit_after># -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage, PostSource
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostSourceInline(admin.TabularInline):
model = PostSource
fk_name = 'post'
raw_id_fields = ['source']
actions = None
extra = 1
fieldsets = [(None, {
'classes': ('collapse',),
'fields': ('source', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor()}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline, PostSourceInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)}),
(None, {'fields': ('main_image', 'slug',)})
)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
|
f627a76e8dac96282b0a9f76eeda8c7db70cc030
|
telemetry/telemetry/internal/actions/javascript_click.py
|
telemetry/telemetry/internal/actions/javascript_click.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
self.EvaluateCallback(tab, code)
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
self.EvaluateCallback(tab, code, user_gesture=True)
|
Fix a regression where the user_gesture bit isn't set for ClickElement.
|
Fix a regression where the user_gesture bit isn't set for ClickElement.
The regrssion was introduced in
https://chromium-review.googlesource.com/c/catapult/+/1335627
Once this rolls into Chromium, I'll add a chromium side test to prevent
it from regress again in the future.
Bug: chromium:885912
TEST=manual
R=58bdb6276c01e3ed53e660bf706aa902204feb0d@chromium.org,kbr@chromium.org
Change-Id: Ic1c7e83a3e7d7318baa81531925dab07db9450ca
Reviewed-on: https://chromium-review.googlesource.com/c/1476957
Reviewed-by: Caleb Rouleau <58bdb6276c01e3ed53e660bf706aa902204feb0d@chromium.org>
Commit-Queue: Zhenyao Mo <655bdf1d95ff956cd68886dd9e86ee5e987c9dbb@chromium.org>
|
Python
|
bsd-3-clause
|
catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
self.EvaluateCallback(tab, code)
Fix a regression where the user_gesture bit isn't set for ClickElement.
The regrssion was introduced in
https://chromium-review.googlesource.com/c/catapult/+/1335627
Once this rolls into Chromium, I'll add a chromium side test to prevent
it from regress again in the future.
Bug: chromium:885912
TEST=manual
R=58bdb6276c01e3ed53e660bf706aa902204feb0d@chromium.org,kbr@chromium.org
Change-Id: Ic1c7e83a3e7d7318baa81531925dab07db9450ca
Reviewed-on: https://chromium-review.googlesource.com/c/1476957
Reviewed-by: Caleb Rouleau <58bdb6276c01e3ed53e660bf706aa902204feb0d@chromium.org>
Commit-Queue: Zhenyao Mo <655bdf1d95ff956cd68886dd9e86ee5e987c9dbb@chromium.org>
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
self.EvaluateCallback(tab, code, user_gesture=True)
|
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
self.EvaluateCallback(tab, code)
<commit_msg>Fix a regression where the user_gesture bit isn't set for ClickElement.
The regrssion was introduced in
https://chromium-review.googlesource.com/c/catapult/+/1335627
Once this rolls into Chromium, I'll add a chromium side test to prevent
it from regress again in the future.
Bug: chromium:885912
TEST=manual
R=58bdb6276c01e3ed53e660bf706aa902204feb0d@chromium.org,kbr@chromium.org
Change-Id: Ic1c7e83a3e7d7318baa81531925dab07db9450ca
Reviewed-on: https://chromium-review.googlesource.com/c/1476957
Reviewed-by: Caleb Rouleau <58bdb6276c01e3ed53e660bf706aa902204feb0d@chromium.org>
Commit-Queue: Zhenyao Mo <655bdf1d95ff956cd68886dd9e86ee5e987c9dbb@chromium.org><commit_after>
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
self.EvaluateCallback(tab, code, user_gesture=True)
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
self.EvaluateCallback(tab, code)
Fix a regression where the user_gesture bit isn't set for ClickElement.
The regrssion was introduced in
https://chromium-review.googlesource.com/c/catapult/+/1335627
Once this rolls into Chromium, I'll add a chromium side test to prevent
it from regress again in the future.
Bug: chromium:885912
TEST=manual
R=58bdb6276c01e3ed53e660bf706aa902204feb0d@chromium.org,kbr@chromium.org
Change-Id: Ic1c7e83a3e7d7318baa81531925dab07db9450ca
Reviewed-on: https://chromium-review.googlesource.com/c/1476957
Reviewed-by: Caleb Rouleau <58bdb6276c01e3ed53e660bf706aa902204feb0d@chromium.org>
Commit-Queue: Zhenyao Mo <655bdf1d95ff956cd68886dd9e86ee5e987c9dbb@chromium.org># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
self.EvaluateCallback(tab, code, user_gesture=True)
|
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
self.EvaluateCallback(tab, code)
<commit_msg>Fix a regression where the user_gesture bit isn't set for ClickElement.
The regrssion was introduced in
https://chromium-review.googlesource.com/c/catapult/+/1335627
Once this rolls into Chromium, I'll add a chromium side test to prevent
it from regress again in the future.
Bug: chromium:885912
TEST=manual
R=58bdb6276c01e3ed53e660bf706aa902204feb0d@chromium.org,kbr@chromium.org
Change-Id: Ic1c7e83a3e7d7318baa81531925dab07db9450ca
Reviewed-on: https://chromium-review.googlesource.com/c/1476957
Reviewed-by: Caleb Rouleau <58bdb6276c01e3ed53e660bf706aa902204feb0d@chromium.org>
Commit-Queue: Zhenyao Mo <655bdf1d95ff956cd68886dd9e86ee5e987c9dbb@chromium.org><commit_after># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.actions import page_action
class ClickElementAction(page_action.ElementPageAction):
def RunAction(self, tab):
code = '''
function(element, errorMsg) {
if (!element) {
throw Error('Cannot find element: ' + errorMsg);
}
element.click();
}'''
# Click handler that plays media or requests fullscreen may not take
# effects without user_gesture set to True.
self.EvaluateCallback(tab, code, user_gesture=True)
|
9e7dc537d09555d9c77ff5e1f16f5577721910f9
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'wagtail.wagtailcore',
'wagtail.wagtailsites',
'wagtail.wagtailusers',
'wagtail.wagtailimages',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
|
#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'wagtail.core',
'wagtail.sites',
'wagtail.users',
'wagtail.images',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
|
Fix issue with old wagtail core paths
|
Fix issue with old wagtail core paths
|
Python
|
mit
|
Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget
|
#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'wagtail.wagtailcore',
'wagtail.wagtailsites',
'wagtail.wagtailusers',
'wagtail.wagtailimages',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
Fix issue with old wagtail core paths
|
#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'wagtail.core',
'wagtail.sites',
'wagtail.users',
'wagtail.images',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
|
<commit_before>#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'wagtail.wagtailcore',
'wagtail.wagtailsites',
'wagtail.wagtailusers',
'wagtail.wagtailimages',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
<commit_msg>Fix issue with old wagtail core paths<commit_after>
|
#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'wagtail.core',
'wagtail.sites',
'wagtail.users',
'wagtail.images',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
|
#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'wagtail.wagtailcore',
'wagtail.wagtailsites',
'wagtail.wagtailusers',
'wagtail.wagtailimages',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
Fix issue with old wagtail core paths#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'wagtail.core',
'wagtail.sites',
'wagtail.users',
'wagtail.images',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
|
<commit_before>#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'wagtail.wagtailcore',
'wagtail.wagtailsites',
'wagtail.wagtailusers',
'wagtail.wagtailimages',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
<commit_msg>Fix issue with old wagtail core paths<commit_after>#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'wagtail.core',
'wagtail.sites',
'wagtail.users',
'wagtail.images',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
|
6f5784e516f2f523ce83ab3fe4e7dda9d7f6b602
|
examples/demo/demo.py
|
examples/demo/demo.py
|
# coding=utf8
"""
A mini-demo of what wsme can do.
To run it::
python setup.py develop
Then::
paster serve demo.cfg
"""
from wsme import *
from wsme.wsgi import adapt
import logging
class Person(object):
id = int
firstname = unicode
lastname = unicode
class DemoRoot(WSRoot):
@expose(int)
@validate(int, int)
def multiply(self, a, b):
return a * b
@expose(unicode)
def helloworld(self):
return u"こんにちは世界 (<- Hello World in Japanese !)"
@expose(Person)
def getperson(self):
p = Person()
p.id = 12
p.firstname = u'Ross'
p.lastname = u'Geler'
return p
def app_factory(global_config, **local_conf):
root = DemoRoot()
root.addprotocol('soap',
tns='http://example.com/demo',
typenamespace='http://example.com/demo/types',
baseURL='http://127.0.0.1:8989/',
)
root.addprotocol('restjson')
return adapt(root)
logging.basicConfig(level=logging.DEBUG)
|
# coding=utf8
"""
A mini-demo of what wsme can do.
To run it::
python setup.py develop
Then::
paster serve demo.cfg
"""
from wsme import WSRoot, expose, validate
from wsme.wsgi import adapt
import logging
class Person(object):
id = int
firstname = unicode
lastname = unicode
class DemoRoot(WSRoot):
@expose(int)
@validate(int, int)
def multiply(self, a, b):
return a * b
@expose(unicode)
def helloworld(self):
return u"こんにちは世界 (<- Hello World in Japanese !)"
@expose(Person)
def getperson(self):
p = Person()
p.id = 12
p.firstname = u'Ross'
p.lastname = u'Geler'
return p
@expose(Person)
@validate(Person)
def setperson(self, person):
return person
def app_factory(global_config, **local_conf):
root = DemoRoot()
root.addprotocol('soap',
tns='http://example.com/demo',
typenamespace='http://example.com/demo/types',
baseURL='http://127.0.0.1:8989/',
)
root.addprotocol('restjson')
return adapt(root)
logging.basicConfig(level=logging.DEBUG)
|
Add a setperson function to test complex function arguments
|
Add a setperson function to test complex function arguments
|
Python
|
mit
|
stackforge/wsme
|
# coding=utf8
"""
A mini-demo of what wsme can do.
To run it::
python setup.py develop
Then::
paster serve demo.cfg
"""
from wsme import *
from wsme.wsgi import adapt
import logging
class Person(object):
id = int
firstname = unicode
lastname = unicode
class DemoRoot(WSRoot):
@expose(int)
@validate(int, int)
def multiply(self, a, b):
return a * b
@expose(unicode)
def helloworld(self):
return u"こんにちは世界 (<- Hello World in Japanese !)"
@expose(Person)
def getperson(self):
p = Person()
p.id = 12
p.firstname = u'Ross'
p.lastname = u'Geler'
return p
def app_factory(global_config, **local_conf):
root = DemoRoot()
root.addprotocol('soap',
tns='http://example.com/demo',
typenamespace='http://example.com/demo/types',
baseURL='http://127.0.0.1:8989/',
)
root.addprotocol('restjson')
return adapt(root)
logging.basicConfig(level=logging.DEBUG)
Add a setperson function to test complex function arguments
|
# coding=utf8
"""
A mini-demo of what wsme can do.
To run it::
python setup.py develop
Then::
paster serve demo.cfg
"""
from wsme import WSRoot, expose, validate
from wsme.wsgi import adapt
import logging
class Person(object):
id = int
firstname = unicode
lastname = unicode
class DemoRoot(WSRoot):
@expose(int)
@validate(int, int)
def multiply(self, a, b):
return a * b
@expose(unicode)
def helloworld(self):
return u"こんにちは世界 (<- Hello World in Japanese !)"
@expose(Person)
def getperson(self):
p = Person()
p.id = 12
p.firstname = u'Ross'
p.lastname = u'Geler'
return p
@expose(Person)
@validate(Person)
def setperson(self, person):
return person
def app_factory(global_config, **local_conf):
root = DemoRoot()
root.addprotocol('soap',
tns='http://example.com/demo',
typenamespace='http://example.com/demo/types',
baseURL='http://127.0.0.1:8989/',
)
root.addprotocol('restjson')
return adapt(root)
logging.basicConfig(level=logging.DEBUG)
|
<commit_before># coding=utf8
"""
A mini-demo of what wsme can do.
To run it::
python setup.py develop
Then::
paster serve demo.cfg
"""
from wsme import *
from wsme.wsgi import adapt
import logging
class Person(object):
id = int
firstname = unicode
lastname = unicode
class DemoRoot(WSRoot):
@expose(int)
@validate(int, int)
def multiply(self, a, b):
return a * b
@expose(unicode)
def helloworld(self):
return u"こんにちは世界 (<- Hello World in Japanese !)"
@expose(Person)
def getperson(self):
p = Person()
p.id = 12
p.firstname = u'Ross'
p.lastname = u'Geler'
return p
def app_factory(global_config, **local_conf):
root = DemoRoot()
root.addprotocol('soap',
tns='http://example.com/demo',
typenamespace='http://example.com/demo/types',
baseURL='http://127.0.0.1:8989/',
)
root.addprotocol('restjson')
return adapt(root)
logging.basicConfig(level=logging.DEBUG)
<commit_msg>Add a setperson function to test complex function arguments<commit_after>
|
# coding=utf8
"""
A mini-demo of what wsme can do.
To run it::
python setup.py develop
Then::
paster serve demo.cfg
"""
from wsme import WSRoot, expose, validate
from wsme.wsgi import adapt
import logging
class Person(object):
id = int
firstname = unicode
lastname = unicode
class DemoRoot(WSRoot):
@expose(int)
@validate(int, int)
def multiply(self, a, b):
return a * b
@expose(unicode)
def helloworld(self):
return u"こんにちは世界 (<- Hello World in Japanese !)"
@expose(Person)
def getperson(self):
p = Person()
p.id = 12
p.firstname = u'Ross'
p.lastname = u'Geler'
return p
@expose(Person)
@validate(Person)
def setperson(self, person):
return person
def app_factory(global_config, **local_conf):
root = DemoRoot()
root.addprotocol('soap',
tns='http://example.com/demo',
typenamespace='http://example.com/demo/types',
baseURL='http://127.0.0.1:8989/',
)
root.addprotocol('restjson')
return adapt(root)
logging.basicConfig(level=logging.DEBUG)
|
# coding=utf8
"""
A mini-demo of what wsme can do.
To run it::
python setup.py develop
Then::
paster serve demo.cfg
"""
from wsme import *
from wsme.wsgi import adapt
import logging
class Person(object):
id = int
firstname = unicode
lastname = unicode
class DemoRoot(WSRoot):
@expose(int)
@validate(int, int)
def multiply(self, a, b):
return a * b
@expose(unicode)
def helloworld(self):
return u"こんにちは世界 (<- Hello World in Japanese !)"
@expose(Person)
def getperson(self):
p = Person()
p.id = 12
p.firstname = u'Ross'
p.lastname = u'Geler'
return p
def app_factory(global_config, **local_conf):
root = DemoRoot()
root.addprotocol('soap',
tns='http://example.com/demo',
typenamespace='http://example.com/demo/types',
baseURL='http://127.0.0.1:8989/',
)
root.addprotocol('restjson')
return adapt(root)
logging.basicConfig(level=logging.DEBUG)
Add a setperson function to test complex function arguments# coding=utf8
"""
A mini-demo of what wsme can do.
To run it::
python setup.py develop
Then::
paster serve demo.cfg
"""
from wsme import WSRoot, expose, validate
from wsme.wsgi import adapt
import logging
class Person(object):
id = int
firstname = unicode
lastname = unicode
class DemoRoot(WSRoot):
@expose(int)
@validate(int, int)
def multiply(self, a, b):
return a * b
@expose(unicode)
def helloworld(self):
return u"こんにちは世界 (<- Hello World in Japanese !)"
@expose(Person)
def getperson(self):
p = Person()
p.id = 12
p.firstname = u'Ross'
p.lastname = u'Geler'
return p
@expose(Person)
@validate(Person)
def setperson(self, person):
return person
def app_factory(global_config, **local_conf):
root = DemoRoot()
root.addprotocol('soap',
tns='http://example.com/demo',
typenamespace='http://example.com/demo/types',
baseURL='http://127.0.0.1:8989/',
)
root.addprotocol('restjson')
return adapt(root)
logging.basicConfig(level=logging.DEBUG)
|
<commit_before># coding=utf8
"""
A mini-demo of what wsme can do.
To run it::
python setup.py develop
Then::
paster serve demo.cfg
"""
from wsme import *
from wsme.wsgi import adapt
import logging
class Person(object):
id = int
firstname = unicode
lastname = unicode
class DemoRoot(WSRoot):
@expose(int)
@validate(int, int)
def multiply(self, a, b):
return a * b
@expose(unicode)
def helloworld(self):
return u"こんにちは世界 (<- Hello World in Japanese !)"
@expose(Person)
def getperson(self):
p = Person()
p.id = 12
p.firstname = u'Ross'
p.lastname = u'Geler'
return p
def app_factory(global_config, **local_conf):
root = DemoRoot()
root.addprotocol('soap',
tns='http://example.com/demo',
typenamespace='http://example.com/demo/types',
baseURL='http://127.0.0.1:8989/',
)
root.addprotocol('restjson')
return adapt(root)
logging.basicConfig(level=logging.DEBUG)
<commit_msg>Add a setperson function to test complex function arguments<commit_after># coding=utf8
"""
A mini-demo of what wsme can do.
To run it::
python setup.py develop
Then::
paster serve demo.cfg
"""
from wsme import WSRoot, expose, validate
from wsme.wsgi import adapt
import logging
class Person(object):
id = int
firstname = unicode
lastname = unicode
class DemoRoot(WSRoot):
@expose(int)
@validate(int, int)
def multiply(self, a, b):
return a * b
@expose(unicode)
def helloworld(self):
return u"こんにちは世界 (<- Hello World in Japanese !)"
@expose(Person)
def getperson(self):
p = Person()
p.id = 12
p.firstname = u'Ross'
p.lastname = u'Geler'
return p
@expose(Person)
@validate(Person)
def setperson(self, person):
return person
def app_factory(global_config, **local_conf):
root = DemoRoot()
root.addprotocol('soap',
tns='http://example.com/demo',
typenamespace='http://example.com/demo/types',
baseURL='http://127.0.0.1:8989/',
)
root.addprotocol('restjson')
return adapt(root)
logging.basicConfig(level=logging.DEBUG)
|
145158b5a1693a831d2d198473d24b9d4ef6e24e
|
sherlock.py
|
sherlock.py
|
# -*- coding: utf-8 -*-
import sys, datetime, getopt
from reddit_user import RedditUser
longopts, shortopts = getopt.getopt(sys.argv[2:], shortopts="", longopts=["file="])
args = dict(longopts)
file_mode = ""
if len(sys.argv) < 2:
sys.exit("Usage: python sherlock.py <username> --file=(read|write)")
if args.has_key("--file") and args["--file"] == "write":
file_mode = "write"
elif args.has_key("--file") and args["--file"] == "read":
file_mode = "read"
start = datetime.datetime.now()
u = RedditUser(sys.argv[1])
if file_mode == "write":
u.save_comments_to_file()
u.process_comments_from_file()
elif file_mode == "read":
u.process_comments_from_file()
else:
u.process_all_comments()
print u
print "Processing complete... %s" % (datetime.datetime.now()-start)
|
# -*- coding: utf-8 -*-
import sys, datetime, getopt
from reddit_user import RedditUser
longopts, shortopts = getopt.getopt(sys.argv[2:], shortopts="", longopts=["file="])
args = dict(longopts)
file_mode = ""
if len(sys.argv) < 2:
sys.exit("Usage: python sherlock.py <username> --file=(read|write)")
if args.has_key("--file") and args["--file"] == "write":
file_mode = "write"
elif args.has_key("--file") and args["--file"] == "read":
file_mode = "read"
start = datetime.datetime.now()
u = RedditUser(sys.argv[1])
print "Processing user %s" % u.username
if file_mode == "write":
u.save_comments_to_file()
u.process_comments_from_file()
elif file_mode == "read":
u.process_comments_from_file()
else:
u.process_all_submissions()
u.process_all_comments()
with open("results/%s.txt" % u.username,"w") as o:
o.write(str(u))
print
print u
print "\nProcessing complete... %s" % (datetime.datetime.now()-start)
|
Write output to text file
|
Write output to text file
|
Python
|
mit
|
orionmelt/sherlock
|
# -*- coding: utf-8 -*-
import sys, datetime, getopt
from reddit_user import RedditUser
longopts, shortopts = getopt.getopt(sys.argv[2:], shortopts="", longopts=["file="])
args = dict(longopts)
file_mode = ""
if len(sys.argv) < 2:
sys.exit("Usage: python sherlock.py <username> --file=(read|write)")
if args.has_key("--file") and args["--file"] == "write":
file_mode = "write"
elif args.has_key("--file") and args["--file"] == "read":
file_mode = "read"
start = datetime.datetime.now()
u = RedditUser(sys.argv[1])
if file_mode == "write":
u.save_comments_to_file()
u.process_comments_from_file()
elif file_mode == "read":
u.process_comments_from_file()
else:
u.process_all_comments()
print u
print "Processing complete... %s" % (datetime.datetime.now()-start)Write output to text file
|
# -*- coding: utf-8 -*-
import sys, datetime, getopt
from reddit_user import RedditUser
longopts, shortopts = getopt.getopt(sys.argv[2:], shortopts="", longopts=["file="])
args = dict(longopts)
file_mode = ""
if len(sys.argv) < 2:
sys.exit("Usage: python sherlock.py <username> --file=(read|write)")
if args.has_key("--file") and args["--file"] == "write":
file_mode = "write"
elif args.has_key("--file") and args["--file"] == "read":
file_mode = "read"
start = datetime.datetime.now()
u = RedditUser(sys.argv[1])
print "Processing user %s" % u.username
if file_mode == "write":
u.save_comments_to_file()
u.process_comments_from_file()
elif file_mode == "read":
u.process_comments_from_file()
else:
u.process_all_submissions()
u.process_all_comments()
with open("results/%s.txt" % u.username,"w") as o:
o.write(str(u))
print
print u
print "\nProcessing complete... %s" % (datetime.datetime.now()-start)
|
<commit_before># -*- coding: utf-8 -*-
import sys, datetime, getopt
from reddit_user import RedditUser
longopts, shortopts = getopt.getopt(sys.argv[2:], shortopts="", longopts=["file="])
args = dict(longopts)
file_mode = ""
if len(sys.argv) < 2:
sys.exit("Usage: python sherlock.py <username> --file=(read|write)")
if args.has_key("--file") and args["--file"] == "write":
file_mode = "write"
elif args.has_key("--file") and args["--file"] == "read":
file_mode = "read"
start = datetime.datetime.now()
u = RedditUser(sys.argv[1])
if file_mode == "write":
u.save_comments_to_file()
u.process_comments_from_file()
elif file_mode == "read":
u.process_comments_from_file()
else:
u.process_all_comments()
print u
print "Processing complete... %s" % (datetime.datetime.now()-start)<commit_msg>Write output to text file<commit_after>
|
# -*- coding: utf-8 -*-
import sys, datetime, getopt
from reddit_user import RedditUser
longopts, shortopts = getopt.getopt(sys.argv[2:], shortopts="", longopts=["file="])
args = dict(longopts)
file_mode = ""
if len(sys.argv) < 2:
sys.exit("Usage: python sherlock.py <username> --file=(read|write)")
if args.has_key("--file") and args["--file"] == "write":
file_mode = "write"
elif args.has_key("--file") and args["--file"] == "read":
file_mode = "read"
start = datetime.datetime.now()
u = RedditUser(sys.argv[1])
print "Processing user %s" % u.username
if file_mode == "write":
u.save_comments_to_file()
u.process_comments_from_file()
elif file_mode == "read":
u.process_comments_from_file()
else:
u.process_all_submissions()
u.process_all_comments()
with open("results/%s.txt" % u.username,"w") as o:
o.write(str(u))
print
print u
print "\nProcessing complete... %s" % (datetime.datetime.now()-start)
|
# -*- coding: utf-8 -*-
import sys, datetime, getopt
from reddit_user import RedditUser
longopts, shortopts = getopt.getopt(sys.argv[2:], shortopts="", longopts=["file="])
args = dict(longopts)
file_mode = ""
if len(sys.argv) < 2:
sys.exit("Usage: python sherlock.py <username> --file=(read|write)")
if args.has_key("--file") and args["--file"] == "write":
file_mode = "write"
elif args.has_key("--file") and args["--file"] == "read":
file_mode = "read"
start = datetime.datetime.now()
u = RedditUser(sys.argv[1])
if file_mode == "write":
u.save_comments_to_file()
u.process_comments_from_file()
elif file_mode == "read":
u.process_comments_from_file()
else:
u.process_all_comments()
print u
print "Processing complete... %s" % (datetime.datetime.now()-start)Write output to text file# -*- coding: utf-8 -*-
import sys, datetime, getopt
from reddit_user import RedditUser
longopts, shortopts = getopt.getopt(sys.argv[2:], shortopts="", longopts=["file="])
args = dict(longopts)
file_mode = ""
if len(sys.argv) < 2:
sys.exit("Usage: python sherlock.py <username> --file=(read|write)")
if args.has_key("--file") and args["--file"] == "write":
file_mode = "write"
elif args.has_key("--file") and args["--file"] == "read":
file_mode = "read"
start = datetime.datetime.now()
u = RedditUser(sys.argv[1])
print "Processing user %s" % u.username
if file_mode == "write":
u.save_comments_to_file()
u.process_comments_from_file()
elif file_mode == "read":
u.process_comments_from_file()
else:
u.process_all_submissions()
u.process_all_comments()
with open("results/%s.txt" % u.username,"w") as o:
o.write(str(u))
print
print u
print "\nProcessing complete... %s" % (datetime.datetime.now()-start)
|
<commit_before># -*- coding: utf-8 -*-
import sys, datetime, getopt
from reddit_user import RedditUser
longopts, shortopts = getopt.getopt(sys.argv[2:], shortopts="", longopts=["file="])
args = dict(longopts)
file_mode = ""
if len(sys.argv) < 2:
sys.exit("Usage: python sherlock.py <username> --file=(read|write)")
if args.has_key("--file") and args["--file"] == "write":
file_mode = "write"
elif args.has_key("--file") and args["--file"] == "read":
file_mode = "read"
start = datetime.datetime.now()
u = RedditUser(sys.argv[1])
if file_mode == "write":
u.save_comments_to_file()
u.process_comments_from_file()
elif file_mode == "read":
u.process_comments_from_file()
else:
u.process_all_comments()
print u
print "Processing complete... %s" % (datetime.datetime.now()-start)<commit_msg>Write output to text file<commit_after># -*- coding: utf-8 -*-
import sys, datetime, getopt
from reddit_user import RedditUser
longopts, shortopts = getopt.getopt(sys.argv[2:], shortopts="", longopts=["file="])
args = dict(longopts)
file_mode = ""
if len(sys.argv) < 2:
sys.exit("Usage: python sherlock.py <username> --file=(read|write)")
if args.has_key("--file") and args["--file"] == "write":
file_mode = "write"
elif args.has_key("--file") and args["--file"] == "read":
file_mode = "read"
start = datetime.datetime.now()
u = RedditUser(sys.argv[1])
print "Processing user %s" % u.username
if file_mode == "write":
u.save_comments_to_file()
u.process_comments_from_file()
elif file_mode == "read":
u.process_comments_from_file()
else:
u.process_all_submissions()
u.process_all_comments()
with open("results/%s.txt" % u.username,"w") as o:
o.write(str(u))
print
print u
print "\nProcessing complete... %s" % (datetime.datetime.now()-start)
|
4c6784bd17113261b95178deadd037ef3c8ea830
|
normandy/recipes/tests/__init__.py
|
normandy/recipes/tests/__init__.py
|
import factory
from normandy.base.tests import FuzzyUnicode
from normandy.recipes.models import Action, Recipe, RecipeAction
class RecipeFactory(factory.DjangoModelFactory):
class Meta:
model = Recipe
name = FuzzyUnicode()
enabled = True
class ActionFactory(factory.DjangoModelFactory):
class Meta:
model = Action
name = FuzzyUnicode()
implementation = FuzzyUnicode(prefix='// ')
class RecipeActionFactory(factory.DjangoModelFactory):
class Meta:
model = RecipeAction
action = factory.SubFactory(ActionFactory)
recipe = factory.SubFactory(RecipeFactory)
|
import factory
from normandy.base.tests import FuzzyUnicode
from normandy.recipes.models import Action, Locale, Recipe, RecipeAction
class RecipeFactory(factory.DjangoModelFactory):
class Meta:
model = Recipe
name = FuzzyUnicode()
enabled = True
@factory.post_generation
def locale(self, create, extracted, **kwargs):
if not create:
return
if extracted and isinstance(extracted, str):
self.locale, _ = Locale.objects.get_or_create(code=extracted)
class ActionFactory(factory.DjangoModelFactory):
class Meta:
model = Action
name = FuzzyUnicode()
implementation = FuzzyUnicode(prefix='// ')
class RecipeActionFactory(factory.DjangoModelFactory):
class Meta:
model = RecipeAction
action = factory.SubFactory(ActionFactory)
recipe = factory.SubFactory(RecipeFactory)
class LocaleFactory(factory.DjangoModelFactory):
class Meta:
model = Locale
|
Fix tests to handle Locale as a foreign key.
|
Fix tests to handle Locale as a foreign key.
|
Python
|
mpl-2.0
|
mozilla/normandy,Osmose/normandy,Osmose/normandy,Osmose/normandy,mozilla/normandy,mozilla/normandy,mozilla/normandy,Osmose/normandy
|
import factory
from normandy.base.tests import FuzzyUnicode
from normandy.recipes.models import Action, Recipe, RecipeAction
class RecipeFactory(factory.DjangoModelFactory):
class Meta:
model = Recipe
name = FuzzyUnicode()
enabled = True
class ActionFactory(factory.DjangoModelFactory):
class Meta:
model = Action
name = FuzzyUnicode()
implementation = FuzzyUnicode(prefix='// ')
class RecipeActionFactory(factory.DjangoModelFactory):
class Meta:
model = RecipeAction
action = factory.SubFactory(ActionFactory)
recipe = factory.SubFactory(RecipeFactory)
Fix tests to handle Locale as a foreign key.
|
import factory
from normandy.base.tests import FuzzyUnicode
from normandy.recipes.models import Action, Locale, Recipe, RecipeAction
class RecipeFactory(factory.DjangoModelFactory):
class Meta:
model = Recipe
name = FuzzyUnicode()
enabled = True
@factory.post_generation
def locale(self, create, extracted, **kwargs):
if not create:
return
if extracted and isinstance(extracted, str):
self.locale, _ = Locale.objects.get_or_create(code=extracted)
class ActionFactory(factory.DjangoModelFactory):
class Meta:
model = Action
name = FuzzyUnicode()
implementation = FuzzyUnicode(prefix='// ')
class RecipeActionFactory(factory.DjangoModelFactory):
class Meta:
model = RecipeAction
action = factory.SubFactory(ActionFactory)
recipe = factory.SubFactory(RecipeFactory)
class LocaleFactory(factory.DjangoModelFactory):
class Meta:
model = Locale
|
<commit_before>import factory
from normandy.base.tests import FuzzyUnicode
from normandy.recipes.models import Action, Recipe, RecipeAction
class RecipeFactory(factory.DjangoModelFactory):
class Meta:
model = Recipe
name = FuzzyUnicode()
enabled = True
class ActionFactory(factory.DjangoModelFactory):
class Meta:
model = Action
name = FuzzyUnicode()
implementation = FuzzyUnicode(prefix='// ')
class RecipeActionFactory(factory.DjangoModelFactory):
class Meta:
model = RecipeAction
action = factory.SubFactory(ActionFactory)
recipe = factory.SubFactory(RecipeFactory)
<commit_msg>Fix tests to handle Locale as a foreign key.<commit_after>
|
import factory
from normandy.base.tests import FuzzyUnicode
from normandy.recipes.models import Action, Locale, Recipe, RecipeAction
class RecipeFactory(factory.DjangoModelFactory):
class Meta:
model = Recipe
name = FuzzyUnicode()
enabled = True
@factory.post_generation
def locale(self, create, extracted, **kwargs):
if not create:
return
if extracted and isinstance(extracted, str):
self.locale, _ = Locale.objects.get_or_create(code=extracted)
class ActionFactory(factory.DjangoModelFactory):
class Meta:
model = Action
name = FuzzyUnicode()
implementation = FuzzyUnicode(prefix='// ')
class RecipeActionFactory(factory.DjangoModelFactory):
class Meta:
model = RecipeAction
action = factory.SubFactory(ActionFactory)
recipe = factory.SubFactory(RecipeFactory)
class LocaleFactory(factory.DjangoModelFactory):
class Meta:
model = Locale
|
import factory
from normandy.base.tests import FuzzyUnicode
from normandy.recipes.models import Action, Recipe, RecipeAction
class RecipeFactory(factory.DjangoModelFactory):
class Meta:
model = Recipe
name = FuzzyUnicode()
enabled = True
class ActionFactory(factory.DjangoModelFactory):
class Meta:
model = Action
name = FuzzyUnicode()
implementation = FuzzyUnicode(prefix='// ')
class RecipeActionFactory(factory.DjangoModelFactory):
class Meta:
model = RecipeAction
action = factory.SubFactory(ActionFactory)
recipe = factory.SubFactory(RecipeFactory)
Fix tests to handle Locale as a foreign key.import factory
from normandy.base.tests import FuzzyUnicode
from normandy.recipes.models import Action, Locale, Recipe, RecipeAction
class RecipeFactory(factory.DjangoModelFactory):
class Meta:
model = Recipe
name = FuzzyUnicode()
enabled = True
@factory.post_generation
def locale(self, create, extracted, **kwargs):
if not create:
return
if extracted and isinstance(extracted, str):
self.locale, _ = Locale.objects.get_or_create(code=extracted)
class ActionFactory(factory.DjangoModelFactory):
class Meta:
model = Action
name = FuzzyUnicode()
implementation = FuzzyUnicode(prefix='// ')
class RecipeActionFactory(factory.DjangoModelFactory):
class Meta:
model = RecipeAction
action = factory.SubFactory(ActionFactory)
recipe = factory.SubFactory(RecipeFactory)
class LocaleFactory(factory.DjangoModelFactory):
class Meta:
model = Locale
|
<commit_before>import factory
from normandy.base.tests import FuzzyUnicode
from normandy.recipes.models import Action, Recipe, RecipeAction
class RecipeFactory(factory.DjangoModelFactory):
class Meta:
model = Recipe
name = FuzzyUnicode()
enabled = True
class ActionFactory(factory.DjangoModelFactory):
class Meta:
model = Action
name = FuzzyUnicode()
implementation = FuzzyUnicode(prefix='// ')
class RecipeActionFactory(factory.DjangoModelFactory):
class Meta:
model = RecipeAction
action = factory.SubFactory(ActionFactory)
recipe = factory.SubFactory(RecipeFactory)
<commit_msg>Fix tests to handle Locale as a foreign key.<commit_after>import factory
from normandy.base.tests import FuzzyUnicode
from normandy.recipes.models import Action, Locale, Recipe, RecipeAction
class RecipeFactory(factory.DjangoModelFactory):
class Meta:
model = Recipe
name = FuzzyUnicode()
enabled = True
@factory.post_generation
def locale(self, create, extracted, **kwargs):
if not create:
return
if extracted and isinstance(extracted, str):
self.locale, _ = Locale.objects.get_or_create(code=extracted)
class ActionFactory(factory.DjangoModelFactory):
class Meta:
model = Action
name = FuzzyUnicode()
implementation = FuzzyUnicode(prefix='// ')
class RecipeActionFactory(factory.DjangoModelFactory):
class Meta:
model = RecipeAction
action = factory.SubFactory(ActionFactory)
recipe = factory.SubFactory(RecipeFactory)
class LocaleFactory(factory.DjangoModelFactory):
class Meta:
model = Locale
|
618dcb272a2b19e0cd3b973e65d74085775cf4dd
|
api/base/exceptions.py
|
api/base/exceptions.py
|
from rest_framework import status
from rest_framework.exceptions import APIException, ParseError
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, str):
value = [value]
errors.extend([{'source': {'pointer': '/data/attributes/' + key}, 'detail': reason} for reason in value])
else:
if isinstance(message, str):
message = [message]
errors.extend([{'detail': error} for error in message])
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
class InvalidFilterError(ParseError):
"""Raised when client passes an invalid filter in the querystring."""
default_detail = 'Querystring contains an invalid filter.'
|
from rest_framework import status
from rest_framework.exceptions import APIException, ParseError
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for error_key, error_description in message.iteritems():
if error_key in top_level_error_keys:
errors.append({error_key: error_description})
else:
if isinstance(error_description, str):
error_description = [error_description]
errors.extend([{'source': {'pointer': '/data/attributes/' + error_key}, 'detail': reason}
for reason in error_description])
else:
if isinstance(message, str):
message = [message]
errors.extend([{'detail': error} for error in message])
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
class InvalidFilterError(ParseError):
"""Raised when client passes an invalid filter in the querystring."""
default_detail = 'Querystring contains an invalid filter.'
|
Change key and value to more descriptive names
|
Change key and value to more descriptive names
|
Python
|
apache-2.0
|
billyhunt/osf.io,CenterForOpenScience/osf.io,GageGaskins/osf.io,RomanZWang/osf.io,zachjanicki/osf.io,jnayak1/osf.io,alexschiller/osf.io,samchrisinger/osf.io,monikagrabowska/osf.io,binoculars/osf.io,Ghalko/osf.io,doublebits/osf.io,leb2dg/osf.io,TomHeatwole/osf.io,laurenrevere/osf.io,adlius/osf.io,cosenal/osf.io,samanehsan/osf.io,acshi/osf.io,saradbowman/osf.io,chrisseto/osf.io,asanfilippo7/osf.io,adlius/osf.io,sloria/osf.io,erinspace/osf.io,laurenrevere/osf.io,GageGaskins/osf.io,caseyrygt/osf.io,emetsger/osf.io,brianjgeiger/osf.io,alexschiller/osf.io,alexschiller/osf.io,wearpants/osf.io,monikagrabowska/osf.io,arpitar/osf.io,cslzchen/osf.io,wearpants/osf.io,alexschiller/osf.io,arpitar/osf.io,caseyrollins/osf.io,haoyuchen1992/osf.io,rdhyee/osf.io,icereval/osf.io,petermalcolm/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,danielneis/osf.io,arpitar/osf.io,haoyuchen1992/osf.io,icereval/osf.io,mluo613/osf.io,cslzchen/osf.io,mluo613/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,ticklemepierce/osf.io,chrisseto/osf.io,wearpants/osf.io,kch8qx/osf.io,DanielSBrown/osf.io,TomHeatwole/osf.io,RomanZWang/osf.io,samchrisinger/osf.io,chrisseto/osf.io,amyshi188/osf.io,rdhyee/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,samanehsan/osf.io,zamattiac/osf.io,kch8qx/osf.io,ZobairAlijan/osf.io,chennan47/osf.io,felliott/osf.io,zamattiac/osf.io,caseyrollins/osf.io,cwisecarver/osf.io,kwierman/osf.io,cwisecarver/osf.io,Ghalko/osf.io,DanielSBrown/osf.io,brandonPurvis/osf.io,Johnetordoff/osf.io,petermalcolm/osf.io,RomanZWang/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,Ghalko/osf.io,amyshi188/osf.io,ZobairAlijan/osf.io,mfraezz/osf.io,hmoco/osf.io,mfraezz/osf.io,felliott/osf.io,asanfilippo7/osf.io,saradbowman/osf.io,kch8qx/osf.io,mluke93/osf.io,abought/osf.io,njantrania/osf.io,mluo613/osf.io,cosenal/osf.io,cwisecarver/osf.io,kwierman/osf.io,Johnetordoff/osf.io,SSJohns/osf.io,mluke93/osf.io,Nesiehr/osf.io,abought/osf.io,petermalcolm/osf.io,erinspace/osf.io,caneruguz/osf.io,danielneis/osf.io,KAsante95/osf.io,mfraezz/osf.io,Nesiehr/osf.io,KAsante95/osf.io,billyhunt/osf.io,mattclark/osf.io,amyshi188/osf.io,petermalcolm/osf.io,samanehsan/osf.io,asanfilippo7/osf.io,pattisdr/osf.io,Ghalko/osf.io,mluke93/osf.io,adlius/osf.io,pattisdr/osf.io,ZobairAlijan/osf.io,caneruguz/osf.io,sloria/osf.io,pattisdr/osf.io,samchrisinger/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,TomHeatwole/osf.io,caseyrygt/osf.io,SSJohns/osf.io,emetsger/osf.io,hmoco/osf.io,acshi/osf.io,Nesiehr/osf.io,haoyuchen1992/osf.io,abought/osf.io,cosenal/osf.io,brandonPurvis/osf.io,mfraezz/osf.io,cwisecarver/osf.io,doublebits/osf.io,mluo613/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,ticklemepierce/osf.io,erinspace/osf.io,KAsante95/osf.io,SSJohns/osf.io,acshi/osf.io,DanielSBrown/osf.io,kwierman/osf.io,zachjanicki/osf.io,felliott/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,kch8qx/osf.io,aaxelb/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,caseyrygt/osf.io,TomBaxter/osf.io,ticklemepierce/osf.io,GageGaskins/osf.io,ZobairAlijan/osf.io,GageGaskins/osf.io,haoyuchen1992/osf.io,acshi/osf.io,doublebits/osf.io,mattclark/osf.io,billyhunt/osf.io,jnayak1/osf.io,arpitar/osf.io,sloria/osf.io,danielneis/osf.io,billyhunt/osf.io,Nesiehr/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,kwierman/osf.io,acshi/osf.io,hmoco/osf.io,zachjanicki/osf.io,brianjgeiger/osf.io,danielneis/osf.io,wearpants/osf.io,chennan47/osf.io,monikagrabowska/osf.io,zachjanicki/osf.io,binoculars/osf.io,TomBaxter/osf.io,amyshi188/osf.io,emetsger/osf.io,jnayak1/osf.io,jnayak1/osf.io,monikagrabowska/osf.io,baylee-d/osf.io,SSJohns/osf.io,Johnetordoff/osf.io,emetsger/osf.io,aaxelb/osf.io,brandonPurvis/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,kch8qx/osf.io,cosenal/osf.io,brandonPurvis/osf.io,asanfilippo7/osf.io,mluke93/osf.io,hmoco/osf.io,zamattiac/osf.io,zamattiac/osf.io,njantrania/osf.io,caseyrygt/osf.io,binoculars/osf.io,RomanZWang/osf.io,rdhyee/osf.io,GageGaskins/osf.io,ticklemepierce/osf.io,crcresearch/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,TomBaxter/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,icereval/osf.io,samchrisinger/osf.io,KAsante95/osf.io,leb2dg/osf.io,TomHeatwole/osf.io,felliott/osf.io,rdhyee/osf.io,aaxelb/osf.io,mattclark/osf.io,alexschiller/osf.io,njantrania/osf.io,doublebits/osf.io,RomanZWang/osf.io,njantrania/osf.io,mluo613/osf.io,crcresearch/osf.io,baylee-d/osf.io,adlius/osf.io,samanehsan/osf.io,abought/osf.io,monikagrabowska/osf.io,chennan47/osf.io,billyhunt/osf.io,baylee-d/osf.io,KAsante95/osf.io
|
from rest_framework import status
from rest_framework.exceptions import APIException, ParseError
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, str):
value = [value]
errors.extend([{'source': {'pointer': '/data/attributes/' + key}, 'detail': reason} for reason in value])
else:
if isinstance(message, str):
message = [message]
errors.extend([{'detail': error} for error in message])
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
class InvalidFilterError(ParseError):
"""Raised when client passes an invalid filter in the querystring."""
default_detail = 'Querystring contains an invalid filter.'
Change key and value to more descriptive names
|
from rest_framework import status
from rest_framework.exceptions import APIException, ParseError
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for error_key, error_description in message.iteritems():
if error_key in top_level_error_keys:
errors.append({error_key: error_description})
else:
if isinstance(error_description, str):
error_description = [error_description]
errors.extend([{'source': {'pointer': '/data/attributes/' + error_key}, 'detail': reason}
for reason in error_description])
else:
if isinstance(message, str):
message = [message]
errors.extend([{'detail': error} for error in message])
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
class InvalidFilterError(ParseError):
"""Raised when client passes an invalid filter in the querystring."""
default_detail = 'Querystring contains an invalid filter.'
|
<commit_before>
from rest_framework import status
from rest_framework.exceptions import APIException, ParseError
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, str):
value = [value]
errors.extend([{'source': {'pointer': '/data/attributes/' + key}, 'detail': reason} for reason in value])
else:
if isinstance(message, str):
message = [message]
errors.extend([{'detail': error} for error in message])
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
class InvalidFilterError(ParseError):
"""Raised when client passes an invalid filter in the querystring."""
default_detail = 'Querystring contains an invalid filter.'
<commit_msg>Change key and value to more descriptive names<commit_after>
|
from rest_framework import status
from rest_framework.exceptions import APIException, ParseError
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for error_key, error_description in message.iteritems():
if error_key in top_level_error_keys:
errors.append({error_key: error_description})
else:
if isinstance(error_description, str):
error_description = [error_description]
errors.extend([{'source': {'pointer': '/data/attributes/' + error_key}, 'detail': reason}
for reason in error_description])
else:
if isinstance(message, str):
message = [message]
errors.extend([{'detail': error} for error in message])
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
class InvalidFilterError(ParseError):
"""Raised when client passes an invalid filter in the querystring."""
default_detail = 'Querystring contains an invalid filter.'
|
from rest_framework import status
from rest_framework.exceptions import APIException, ParseError
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, str):
value = [value]
errors.extend([{'source': {'pointer': '/data/attributes/' + key}, 'detail': reason} for reason in value])
else:
if isinstance(message, str):
message = [message]
errors.extend([{'detail': error} for error in message])
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
class InvalidFilterError(ParseError):
"""Raised when client passes an invalid filter in the querystring."""
default_detail = 'Querystring contains an invalid filter.'
Change key and value to more descriptive names
from rest_framework import status
from rest_framework.exceptions import APIException, ParseError
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for error_key, error_description in message.iteritems():
if error_key in top_level_error_keys:
errors.append({error_key: error_description})
else:
if isinstance(error_description, str):
error_description = [error_description]
errors.extend([{'source': {'pointer': '/data/attributes/' + error_key}, 'detail': reason}
for reason in error_description])
else:
if isinstance(message, str):
message = [message]
errors.extend([{'detail': error} for error in message])
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
class InvalidFilterError(ParseError):
"""Raised when client passes an invalid filter in the querystring."""
default_detail = 'Querystring contains an invalid filter.'
|
<commit_before>
from rest_framework import status
from rest_framework.exceptions import APIException, ParseError
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, str):
value = [value]
errors.extend([{'source': {'pointer': '/data/attributes/' + key}, 'detail': reason} for reason in value])
else:
if isinstance(message, str):
message = [message]
errors.extend([{'detail': error} for error in message])
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
class InvalidFilterError(ParseError):
"""Raised when client passes an invalid filter in the querystring."""
default_detail = 'Querystring contains an invalid filter.'
<commit_msg>Change key and value to more descriptive names<commit_after>
from rest_framework import status
from rest_framework.exceptions import APIException, ParseError
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for error_key, error_description in message.iteritems():
if error_key in top_level_error_keys:
errors.append({error_key: error_description})
else:
if isinstance(error_description, str):
error_description = [error_description]
errors.extend([{'source': {'pointer': '/data/attributes/' + error_key}, 'detail': reason}
for reason in error_description])
else:
if isinstance(message, str):
message = [message]
errors.extend([{'detail': error} for error in message])
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
class InvalidFilterError(ParseError):
"""Raised when client passes an invalid filter in the querystring."""
default_detail = 'Querystring contains an invalid filter.'
|
a1e1f0661331f5bf8faa81210eae2cad0c2ad7b3
|
calico_containers/tests/st/__init__.py
|
calico_containers/tests/st/__init__.py
|
import os
import sh
from sh import docker
def setup_package():
"""
Sets up docker images and host containers for running the STs.
"""
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
# Create the calicoctl binary here so it will be in the volume mounted on the hosts.
print sh.bash("./create_binary.sh")
def teardown_package():
pass
|
import os
import sh
from sh import docker
def setup_package():
"""
Sets up docker images and host containers for running the STs.
"""
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("calico_containers/busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
# Create the calicoctl binary here so it will be in the volume mounted on the hosts.
print sh.bash("./create_binary.sh")
def teardown_package():
pass
|
Fix bug in file path.
|
Fix bug in file path.
|
Python
|
apache-2.0
|
dalanlan/calico-docker,projectcalico/calico-docker,L-MA/calico-docker,robbrockbank/calicoctl,insequent/calico-docker,TeaBough/calico-docker,Metaswitch/calico-docker,CiscoCloud/calico-docker,frostynova/calico-docker,L-MA/calico-docker,insequent/calico-docker,webwurst/calico-docker,Symmetric/calico-docker,TrimBiggs/calico-containers,projectcalico/calico-docker,alexhersh/calico-docker,robbrockbank/calico-docker,webwurst/calico-docker,quater/calico-containers,tomdee/calico-containers,tomdee/calico-docker,fasaxc/calicoctl,CiscoCloud/calico-docker,TeaBough/calico-docker,robbrockbank/calicoctl,robbrockbank/calico-containers,fasaxc/calicoctl,fasaxc/calico-docker,projectcalico/calico-containers,tomdee/calico-containers,robbrockbank/calico-containers,TrimBiggs/calico-docker,projectcalico/calico-containers,johscheuer/calico-docker,caseydavenport/calico-docker,caseydavenport/calico-containers,TrimBiggs/calico-docker,johscheuer/calico-docker,frostynova/calico-docker,alexhersh/calico-docker,quater/calico-containers,Symmetric/calico-docker,fasaxc/calico-docker,caseydavenport/calico-docker,caseydavenport/calico-containers,tomdee/calico-docker,Metaswitch/calico-docker,dalanlan/calico-docker,caseydavenport/calico-containers,robbrockbank/calico-docker,projectcalico/calico-containers,TrimBiggs/calico-containers
|
import os
import sh
from sh import docker
def setup_package():
"""
Sets up docker images and host containers for running the STs.
"""
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
# Create the calicoctl binary here so it will be in the volume mounted on the hosts.
print sh.bash("./create_binary.sh")
def teardown_package():
pass
Fix bug in file path.
|
import os
import sh
from sh import docker
def setup_package():
"""
Sets up docker images and host containers for running the STs.
"""
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("calico_containers/busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
# Create the calicoctl binary here so it will be in the volume mounted on the hosts.
print sh.bash("./create_binary.sh")
def teardown_package():
pass
|
<commit_before>import os
import sh
from sh import docker
def setup_package():
"""
Sets up docker images and host containers for running the STs.
"""
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
# Create the calicoctl binary here so it will be in the volume mounted on the hosts.
print sh.bash("./create_binary.sh")
def teardown_package():
pass
<commit_msg>Fix bug in file path.<commit_after>
|
import os
import sh
from sh import docker
def setup_package():
"""
Sets up docker images and host containers for running the STs.
"""
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("calico_containers/busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
# Create the calicoctl binary here so it will be in the volume mounted on the hosts.
print sh.bash("./create_binary.sh")
def teardown_package():
pass
|
import os
import sh
from sh import docker
def setup_package():
"""
Sets up docker images and host containers for running the STs.
"""
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
# Create the calicoctl binary here so it will be in the volume mounted on the hosts.
print sh.bash("./create_binary.sh")
def teardown_package():
pass
Fix bug in file path.import os
import sh
from sh import docker
def setup_package():
"""
Sets up docker images and host containers for running the STs.
"""
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("calico_containers/busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
# Create the calicoctl binary here so it will be in the volume mounted on the hosts.
print sh.bash("./create_binary.sh")
def teardown_package():
pass
|
<commit_before>import os
import sh
from sh import docker
def setup_package():
"""
Sets up docker images and host containers for running the STs.
"""
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
# Create the calicoctl binary here so it will be in the volume mounted on the hosts.
print sh.bash("./create_binary.sh")
def teardown_package():
pass
<commit_msg>Fix bug in file path.<commit_after>import os
import sh
from sh import docker
def setup_package():
"""
Sets up docker images and host containers for running the STs.
"""
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("calico_containers/busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
# Create the calicoctl binary here so it will be in the volume mounted on the hosts.
print sh.bash("./create_binary.sh")
def teardown_package():
pass
|
934a2f1da43cd0fbcc6a074c70c73406dfc2ad14
|
gsensors/databases.py
|
gsensors/databases.py
|
#-*- coding:utf-8 -*-
import sys
import logging
from influxdb import InfluxDBClient
class InfluxDBPublish(object):
def __init__(self, influxdb, measurement, tags):
assert(isinstance(influxdb, InfluxDBClient))
self.influxdb = influxdb
self.tags = tags
self.measurement = measurement
self._logger = logging.getLogger("gsensors.InfluxDBPublish")
def __call__(self, source, value):
#TODO what when error ?
json_body = [
{
"measurement": self.measurement,
"tags": self.tags,
"fields": {
"value": value
}
}
]
self.influxdb.write_points(json_body)
self._logger.debug("Write for measurement '%s'" % self.measurement)
|
#-*- coding:utf-8 -*-
import sys
import logging
from influxdb import InfluxDBClient as OriginalInfluxDBClient
class InfluxDBClient(OriginalInfluxDBClient):
def Publish(self, measurement, tags):
return InfluxDBPublish(self, measurement, tags)
class InfluxDBPublish(object):
def __init__(self, influxdb, measurement, tags):
assert(isinstance(influxdb, InfluxDBClient))
self.influxdb = influxdb
self.tags = tags
self.measurement = measurement
self._logger = logging.getLogger("gsensors.InfluxDBPublish")
def __call__(self, source, value):
#TODO what when error ?
json_body = [
{
"measurement": self.measurement,
"tags": self.tags,
"fields": {
"value": value
}
}
]
self.influxdb.write_points(json_body)
self._logger.debug("Write for measurement '%s'" % self.measurement)
|
Add pubish action helper for influxdb
|
Add pubish action helper for influxdb
|
Python
|
agpl-3.0
|
enavarro222/gsensors
|
#-*- coding:utf-8 -*-
import sys
import logging
from influxdb import InfluxDBClient
class InfluxDBPublish(object):
def __init__(self, influxdb, measurement, tags):
assert(isinstance(influxdb, InfluxDBClient))
self.influxdb = influxdb
self.tags = tags
self.measurement = measurement
self._logger = logging.getLogger("gsensors.InfluxDBPublish")
def __call__(self, source, value):
#TODO what when error ?
json_body = [
{
"measurement": self.measurement,
"tags": self.tags,
"fields": {
"value": value
}
}
]
self.influxdb.write_points(json_body)
self._logger.debug("Write for measurement '%s'" % self.measurement)
Add pubish action helper for influxdb
|
#-*- coding:utf-8 -*-
import sys
import logging
from influxdb import InfluxDBClient as OriginalInfluxDBClient
class InfluxDBClient(OriginalInfluxDBClient):
def Publish(self, measurement, tags):
return InfluxDBPublish(self, measurement, tags)
class InfluxDBPublish(object):
def __init__(self, influxdb, measurement, tags):
assert(isinstance(influxdb, InfluxDBClient))
self.influxdb = influxdb
self.tags = tags
self.measurement = measurement
self._logger = logging.getLogger("gsensors.InfluxDBPublish")
def __call__(self, source, value):
#TODO what when error ?
json_body = [
{
"measurement": self.measurement,
"tags": self.tags,
"fields": {
"value": value
}
}
]
self.influxdb.write_points(json_body)
self._logger.debug("Write for measurement '%s'" % self.measurement)
|
<commit_before>#-*- coding:utf-8 -*-
import sys
import logging
from influxdb import InfluxDBClient
class InfluxDBPublish(object):
def __init__(self, influxdb, measurement, tags):
assert(isinstance(influxdb, InfluxDBClient))
self.influxdb = influxdb
self.tags = tags
self.measurement = measurement
self._logger = logging.getLogger("gsensors.InfluxDBPublish")
def __call__(self, source, value):
#TODO what when error ?
json_body = [
{
"measurement": self.measurement,
"tags": self.tags,
"fields": {
"value": value
}
}
]
self.influxdb.write_points(json_body)
self._logger.debug("Write for measurement '%s'" % self.measurement)
<commit_msg>Add pubish action helper for influxdb<commit_after>
|
#-*- coding:utf-8 -*-
import sys
import logging
from influxdb import InfluxDBClient as OriginalInfluxDBClient
class InfluxDBClient(OriginalInfluxDBClient):
def Publish(self, measurement, tags):
return InfluxDBPublish(self, measurement, tags)
class InfluxDBPublish(object):
def __init__(self, influxdb, measurement, tags):
assert(isinstance(influxdb, InfluxDBClient))
self.influxdb = influxdb
self.tags = tags
self.measurement = measurement
self._logger = logging.getLogger("gsensors.InfluxDBPublish")
def __call__(self, source, value):
#TODO what when error ?
json_body = [
{
"measurement": self.measurement,
"tags": self.tags,
"fields": {
"value": value
}
}
]
self.influxdb.write_points(json_body)
self._logger.debug("Write for measurement '%s'" % self.measurement)
|
#-*- coding:utf-8 -*-
import sys
import logging
from influxdb import InfluxDBClient
class InfluxDBPublish(object):
def __init__(self, influxdb, measurement, tags):
assert(isinstance(influxdb, InfluxDBClient))
self.influxdb = influxdb
self.tags = tags
self.measurement = measurement
self._logger = logging.getLogger("gsensors.InfluxDBPublish")
def __call__(self, source, value):
#TODO what when error ?
json_body = [
{
"measurement": self.measurement,
"tags": self.tags,
"fields": {
"value": value
}
}
]
self.influxdb.write_points(json_body)
self._logger.debug("Write for measurement '%s'" % self.measurement)
Add pubish action helper for influxdb#-*- coding:utf-8 -*-
import sys
import logging
from influxdb import InfluxDBClient as OriginalInfluxDBClient
class InfluxDBClient(OriginalInfluxDBClient):
def Publish(self, measurement, tags):
return InfluxDBPublish(self, measurement, tags)
class InfluxDBPublish(object):
def __init__(self, influxdb, measurement, tags):
assert(isinstance(influxdb, InfluxDBClient))
self.influxdb = influxdb
self.tags = tags
self.measurement = measurement
self._logger = logging.getLogger("gsensors.InfluxDBPublish")
def __call__(self, source, value):
#TODO what when error ?
json_body = [
{
"measurement": self.measurement,
"tags": self.tags,
"fields": {
"value": value
}
}
]
self.influxdb.write_points(json_body)
self._logger.debug("Write for measurement '%s'" % self.measurement)
|
<commit_before>#-*- coding:utf-8 -*-
import sys
import logging
from influxdb import InfluxDBClient
class InfluxDBPublish(object):
def __init__(self, influxdb, measurement, tags):
assert(isinstance(influxdb, InfluxDBClient))
self.influxdb = influxdb
self.tags = tags
self.measurement = measurement
self._logger = logging.getLogger("gsensors.InfluxDBPublish")
def __call__(self, source, value):
#TODO what when error ?
json_body = [
{
"measurement": self.measurement,
"tags": self.tags,
"fields": {
"value": value
}
}
]
self.influxdb.write_points(json_body)
self._logger.debug("Write for measurement '%s'" % self.measurement)
<commit_msg>Add pubish action helper for influxdb<commit_after>#-*- coding:utf-8 -*-
import sys
import logging
from influxdb import InfluxDBClient as OriginalInfluxDBClient
class InfluxDBClient(OriginalInfluxDBClient):
def Publish(self, measurement, tags):
return InfluxDBPublish(self, measurement, tags)
class InfluxDBPublish(object):
def __init__(self, influxdb, measurement, tags):
assert(isinstance(influxdb, InfluxDBClient))
self.influxdb = influxdb
self.tags = tags
self.measurement = measurement
self._logger = logging.getLogger("gsensors.InfluxDBPublish")
def __call__(self, source, value):
#TODO what when error ?
json_body = [
{
"measurement": self.measurement,
"tags": self.tags,
"fields": {
"value": value
}
}
]
self.influxdb.write_points(json_body)
self._logger.debug("Write for measurement '%s'" % self.measurement)
|
4aeb2496eb02e130b7dbc37baef787669f8dd1e7
|
typesetter/typesetter.py
|
typesetter/typesetter.py
|
from flask import Flask, render_template, jsonify
app = Flask(__name__)
app.config.update(
JSONIFY_PRETTYPRINT_REGULAR=False,
)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/api/search/<fragment>')
def search(fragment):
results = []
with open('typesetter/data/words.txt') as words:
for word in words:
word = word.strip('\n')
if fragment in word:
results.append({
'word': word,
'category': classify(word),
})
return jsonify(results)
def classify(word):
length = len(word)
if length < 7:
return 'short'
elif length < 10:
return 'medium'
else:
return 'long'
|
from flask import Flask, render_template, jsonify
app = Flask(__name__)
app.config.update(
JSONIFY_PRETTYPRINT_REGULAR=False,
)
# Read in the entire wordlist at startup and keep it in memory.
# Optimization for improving search response time.
with open('typesetter/data/words.txt') as f:
WORDS = f.read().split('\n')
@app.route('/')
def index():
return render_template('index.html')
@app.route('/api/search/<fragment>')
def search(fragment):
results = []
for word in WORDS:
if fragment in word:
results.append({
'word': word,
'category': classify(word),
})
return jsonify(results)
def classify(word):
length = len(word)
if length < 7:
return 'short'
elif length < 10:
return 'medium'
else:
return 'long'
|
Reduce search response time by keeping wordlist in memory
|
Reduce search response time by keeping wordlist in memory
|
Python
|
mit
|
rlucioni/typesetter,rlucioni/typesetter,rlucioni/typesetter
|
from flask import Flask, render_template, jsonify
app = Flask(__name__)
app.config.update(
JSONIFY_PRETTYPRINT_REGULAR=False,
)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/api/search/<fragment>')
def search(fragment):
results = []
with open('typesetter/data/words.txt') as words:
for word in words:
word = word.strip('\n')
if fragment in word:
results.append({
'word': word,
'category': classify(word),
})
return jsonify(results)
def classify(word):
length = len(word)
if length < 7:
return 'short'
elif length < 10:
return 'medium'
else:
return 'long'
Reduce search response time by keeping wordlist in memory
|
from flask import Flask, render_template, jsonify
app = Flask(__name__)
app.config.update(
JSONIFY_PRETTYPRINT_REGULAR=False,
)
# Read in the entire wordlist at startup and keep it in memory.
# Optimization for improving search response time.
with open('typesetter/data/words.txt') as f:
WORDS = f.read().split('\n')
@app.route('/')
def index():
return render_template('index.html')
@app.route('/api/search/<fragment>')
def search(fragment):
results = []
for word in WORDS:
if fragment in word:
results.append({
'word': word,
'category': classify(word),
})
return jsonify(results)
def classify(word):
length = len(word)
if length < 7:
return 'short'
elif length < 10:
return 'medium'
else:
return 'long'
|
<commit_before>from flask import Flask, render_template, jsonify
app = Flask(__name__)
app.config.update(
JSONIFY_PRETTYPRINT_REGULAR=False,
)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/api/search/<fragment>')
def search(fragment):
results = []
with open('typesetter/data/words.txt') as words:
for word in words:
word = word.strip('\n')
if fragment in word:
results.append({
'word': word,
'category': classify(word),
})
return jsonify(results)
def classify(word):
length = len(word)
if length < 7:
return 'short'
elif length < 10:
return 'medium'
else:
return 'long'
<commit_msg>Reduce search response time by keeping wordlist in memory<commit_after>
|
from flask import Flask, render_template, jsonify
app = Flask(__name__)
app.config.update(
JSONIFY_PRETTYPRINT_REGULAR=False,
)
# Read in the entire wordlist at startup and keep it in memory.
# Optimization for improving search response time.
with open('typesetter/data/words.txt') as f:
WORDS = f.read().split('\n')
@app.route('/')
def index():
return render_template('index.html')
@app.route('/api/search/<fragment>')
def search(fragment):
results = []
for word in WORDS:
if fragment in word:
results.append({
'word': word,
'category': classify(word),
})
return jsonify(results)
def classify(word):
length = len(word)
if length < 7:
return 'short'
elif length < 10:
return 'medium'
else:
return 'long'
|
from flask import Flask, render_template, jsonify
app = Flask(__name__)
app.config.update(
JSONIFY_PRETTYPRINT_REGULAR=False,
)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/api/search/<fragment>')
def search(fragment):
results = []
with open('typesetter/data/words.txt') as words:
for word in words:
word = word.strip('\n')
if fragment in word:
results.append({
'word': word,
'category': classify(word),
})
return jsonify(results)
def classify(word):
length = len(word)
if length < 7:
return 'short'
elif length < 10:
return 'medium'
else:
return 'long'
Reduce search response time by keeping wordlist in memoryfrom flask import Flask, render_template, jsonify
app = Flask(__name__)
app.config.update(
JSONIFY_PRETTYPRINT_REGULAR=False,
)
# Read in the entire wordlist at startup and keep it in memory.
# Optimization for improving search response time.
with open('typesetter/data/words.txt') as f:
WORDS = f.read().split('\n')
@app.route('/')
def index():
return render_template('index.html')
@app.route('/api/search/<fragment>')
def search(fragment):
results = []
for word in WORDS:
if fragment in word:
results.append({
'word': word,
'category': classify(word),
})
return jsonify(results)
def classify(word):
length = len(word)
if length < 7:
return 'short'
elif length < 10:
return 'medium'
else:
return 'long'
|
<commit_before>from flask import Flask, render_template, jsonify
app = Flask(__name__)
app.config.update(
JSONIFY_PRETTYPRINT_REGULAR=False,
)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/api/search/<fragment>')
def search(fragment):
results = []
with open('typesetter/data/words.txt') as words:
for word in words:
word = word.strip('\n')
if fragment in word:
results.append({
'word': word,
'category': classify(word),
})
return jsonify(results)
def classify(word):
length = len(word)
if length < 7:
return 'short'
elif length < 10:
return 'medium'
else:
return 'long'
<commit_msg>Reduce search response time by keeping wordlist in memory<commit_after>from flask import Flask, render_template, jsonify
app = Flask(__name__)
app.config.update(
JSONIFY_PRETTYPRINT_REGULAR=False,
)
# Read in the entire wordlist at startup and keep it in memory.
# Optimization for improving search response time.
with open('typesetter/data/words.txt') as f:
WORDS = f.read().split('\n')
@app.route('/')
def index():
return render_template('index.html')
@app.route('/api/search/<fragment>')
def search(fragment):
results = []
for word in WORDS:
if fragment in word:
results.append({
'word': word,
'category': classify(word),
})
return jsonify(results)
def classify(word):
length = len(word)
if length < 7:
return 'short'
elif length < 10:
return 'medium'
else:
return 'long'
|
dac4ef0e30fb5dd26ef41eb74854919cf5295450
|
subprocrunner/error.py
|
subprocrunner/error.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
def cmd(self):
return self.__cmd
@property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
|
Add a property to an exception class
|
Add a property to an exception class
|
Python
|
mit
|
thombashi/subprocrunner,thombashi/subprocrunner
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
Add a property to an exception class
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
def cmd(self):
return self.__cmd
@property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
|
<commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
<commit_msg>Add a property to an exception class<commit_after>
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
def cmd(self):
return self.__cmd
@property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
Add a property to an exception class# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
def cmd(self):
return self.__cmd
@property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
|
<commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
<commit_msg>Add a property to an exception class<commit_after># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
class CommandError(Exception):
@property
def cmd(self):
return self.__cmd
@property
def errno(self):
return self.__errno
def __init__(self, *args, **kwargs):
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super(CommandError, self).__init__(*args, **kwargs)
class InvalidCommandError(CommandError):
# Deprecate in the future
pass
class CommandNotFoundError(CommandError):
# Deprecate in the future
pass
|
c17b8e6141d2832b9920eb143de2937993fb8865
|
linguist/models/base.py
|
linguist/models/base.py
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from .. import settings
@python_2_unicode_compatible
class Translation(models.Model):
"""
A Translation.
"""
identifier = models.CharField(
max_length=100,
verbose_name=_('identifier'),
help_text=_('The registered model identifier.'))
object_id = models.IntegerField(
verbose_name=_('The object ID'),
help_text=_('The object ID of this translation'))
locale = models.CharField(
max_length=10,
verbose_name=_('locale'),
choices=settings.SUPPORTED_LOCALES,
default=settings.DEFAULT_LOCALE,
help_text=_('The locale for this translation'))
field_name = models.CharField(
max_length=100,
verbose_name=_('field name'),
help_text=_('The model field name for this translation.'))
content = models.TextField(
verbose_name=_('content'),
null=True,
help_text=_('The translated content for the field.'))
class Meta:
abstract = True
app_label = 'linguist'
verbose_name = _('translation')
verbose_name_plural = _('translations')
unique_together = (('identifier', 'object_id', 'locale', 'field_name'),)
def __str__(self):
return '%s:%d:%s:%s' % (
self.identifier,
self.object_id,
self.field_name,
self.locale)
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from .. import settings
@python_2_unicode_compatible
class Translation(models.Model):
"""
A Translation.
"""
identifier = models.CharField(
max_length=100,
verbose_name=_('identifier'),
help_text=_('The registered model identifier.'))
object_id = models.IntegerField(
verbose_name=_('The object ID'),
help_text=_('The object ID of this translation'))
language = models.CharField(
max_length=10,
verbose_name=_('locale'),
choices=settings.SUPPORTED_LANGUAGES,
default=settings.DEFAULT_LANGUAGE,
help_text=_('The language for this translation'))
field_name = models.CharField(
max_length=100,
verbose_name=_('field name'),
help_text=_('The model field name for this translation.'))
content = models.TextField(
verbose_name=_('content'),
null=True,
help_text=_('The translated content for the field.'))
class Meta:
abstract = True
app_label = 'linguist'
verbose_name = _('translation')
verbose_name_plural = _('translations')
unique_together = (('identifier', 'object_id', 'language', 'field_name'),)
def __str__(self):
return '%s:%d:%s:%s' % (
self.identifier,
self.object_id,
self.field_name,
self.language)
|
Rename locale field to language.
|
Rename locale field to language.
|
Python
|
mit
|
ulule/django-linguist
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from .. import settings
@python_2_unicode_compatible
class Translation(models.Model):
"""
A Translation.
"""
identifier = models.CharField(
max_length=100,
verbose_name=_('identifier'),
help_text=_('The registered model identifier.'))
object_id = models.IntegerField(
verbose_name=_('The object ID'),
help_text=_('The object ID of this translation'))
locale = models.CharField(
max_length=10,
verbose_name=_('locale'),
choices=settings.SUPPORTED_LOCALES,
default=settings.DEFAULT_LOCALE,
help_text=_('The locale for this translation'))
field_name = models.CharField(
max_length=100,
verbose_name=_('field name'),
help_text=_('The model field name for this translation.'))
content = models.TextField(
verbose_name=_('content'),
null=True,
help_text=_('The translated content for the field.'))
class Meta:
abstract = True
app_label = 'linguist'
verbose_name = _('translation')
verbose_name_plural = _('translations')
unique_together = (('identifier', 'object_id', 'locale', 'field_name'),)
def __str__(self):
return '%s:%d:%s:%s' % (
self.identifier,
self.object_id,
self.field_name,
self.locale)
Rename locale field to language.
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from .. import settings
@python_2_unicode_compatible
class Translation(models.Model):
"""
A Translation.
"""
identifier = models.CharField(
max_length=100,
verbose_name=_('identifier'),
help_text=_('The registered model identifier.'))
object_id = models.IntegerField(
verbose_name=_('The object ID'),
help_text=_('The object ID of this translation'))
language = models.CharField(
max_length=10,
verbose_name=_('locale'),
choices=settings.SUPPORTED_LANGUAGES,
default=settings.DEFAULT_LANGUAGE,
help_text=_('The language for this translation'))
field_name = models.CharField(
max_length=100,
verbose_name=_('field name'),
help_text=_('The model field name for this translation.'))
content = models.TextField(
verbose_name=_('content'),
null=True,
help_text=_('The translated content for the field.'))
class Meta:
abstract = True
app_label = 'linguist'
verbose_name = _('translation')
verbose_name_plural = _('translations')
unique_together = (('identifier', 'object_id', 'language', 'field_name'),)
def __str__(self):
return '%s:%d:%s:%s' % (
self.identifier,
self.object_id,
self.field_name,
self.language)
|
<commit_before># -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from .. import settings
@python_2_unicode_compatible
class Translation(models.Model):
"""
A Translation.
"""
identifier = models.CharField(
max_length=100,
verbose_name=_('identifier'),
help_text=_('The registered model identifier.'))
object_id = models.IntegerField(
verbose_name=_('The object ID'),
help_text=_('The object ID of this translation'))
locale = models.CharField(
max_length=10,
verbose_name=_('locale'),
choices=settings.SUPPORTED_LOCALES,
default=settings.DEFAULT_LOCALE,
help_text=_('The locale for this translation'))
field_name = models.CharField(
max_length=100,
verbose_name=_('field name'),
help_text=_('The model field name for this translation.'))
content = models.TextField(
verbose_name=_('content'),
null=True,
help_text=_('The translated content for the field.'))
class Meta:
abstract = True
app_label = 'linguist'
verbose_name = _('translation')
verbose_name_plural = _('translations')
unique_together = (('identifier', 'object_id', 'locale', 'field_name'),)
def __str__(self):
return '%s:%d:%s:%s' % (
self.identifier,
self.object_id,
self.field_name,
self.locale)
<commit_msg>Rename locale field to language.<commit_after>
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from .. import settings
@python_2_unicode_compatible
class Translation(models.Model):
"""
A Translation.
"""
identifier = models.CharField(
max_length=100,
verbose_name=_('identifier'),
help_text=_('The registered model identifier.'))
object_id = models.IntegerField(
verbose_name=_('The object ID'),
help_text=_('The object ID of this translation'))
language = models.CharField(
max_length=10,
verbose_name=_('locale'),
choices=settings.SUPPORTED_LANGUAGES,
default=settings.DEFAULT_LANGUAGE,
help_text=_('The language for this translation'))
field_name = models.CharField(
max_length=100,
verbose_name=_('field name'),
help_text=_('The model field name for this translation.'))
content = models.TextField(
verbose_name=_('content'),
null=True,
help_text=_('The translated content for the field.'))
class Meta:
abstract = True
app_label = 'linguist'
verbose_name = _('translation')
verbose_name_plural = _('translations')
unique_together = (('identifier', 'object_id', 'language', 'field_name'),)
def __str__(self):
return '%s:%d:%s:%s' % (
self.identifier,
self.object_id,
self.field_name,
self.language)
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from .. import settings
@python_2_unicode_compatible
class Translation(models.Model):
"""
A Translation.
"""
identifier = models.CharField(
max_length=100,
verbose_name=_('identifier'),
help_text=_('The registered model identifier.'))
object_id = models.IntegerField(
verbose_name=_('The object ID'),
help_text=_('The object ID of this translation'))
locale = models.CharField(
max_length=10,
verbose_name=_('locale'),
choices=settings.SUPPORTED_LOCALES,
default=settings.DEFAULT_LOCALE,
help_text=_('The locale for this translation'))
field_name = models.CharField(
max_length=100,
verbose_name=_('field name'),
help_text=_('The model field name for this translation.'))
content = models.TextField(
verbose_name=_('content'),
null=True,
help_text=_('The translated content for the field.'))
class Meta:
abstract = True
app_label = 'linguist'
verbose_name = _('translation')
verbose_name_plural = _('translations')
unique_together = (('identifier', 'object_id', 'locale', 'field_name'),)
def __str__(self):
return '%s:%d:%s:%s' % (
self.identifier,
self.object_id,
self.field_name,
self.locale)
Rename locale field to language.# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from .. import settings
@python_2_unicode_compatible
class Translation(models.Model):
"""
A Translation.
"""
identifier = models.CharField(
max_length=100,
verbose_name=_('identifier'),
help_text=_('The registered model identifier.'))
object_id = models.IntegerField(
verbose_name=_('The object ID'),
help_text=_('The object ID of this translation'))
language = models.CharField(
max_length=10,
verbose_name=_('locale'),
choices=settings.SUPPORTED_LANGUAGES,
default=settings.DEFAULT_LANGUAGE,
help_text=_('The language for this translation'))
field_name = models.CharField(
max_length=100,
verbose_name=_('field name'),
help_text=_('The model field name for this translation.'))
content = models.TextField(
verbose_name=_('content'),
null=True,
help_text=_('The translated content for the field.'))
class Meta:
abstract = True
app_label = 'linguist'
verbose_name = _('translation')
verbose_name_plural = _('translations')
unique_together = (('identifier', 'object_id', 'language', 'field_name'),)
def __str__(self):
return '%s:%d:%s:%s' % (
self.identifier,
self.object_id,
self.field_name,
self.language)
|
<commit_before># -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from .. import settings
@python_2_unicode_compatible
class Translation(models.Model):
"""
A Translation.
"""
identifier = models.CharField(
max_length=100,
verbose_name=_('identifier'),
help_text=_('The registered model identifier.'))
object_id = models.IntegerField(
verbose_name=_('The object ID'),
help_text=_('The object ID of this translation'))
locale = models.CharField(
max_length=10,
verbose_name=_('locale'),
choices=settings.SUPPORTED_LOCALES,
default=settings.DEFAULT_LOCALE,
help_text=_('The locale for this translation'))
field_name = models.CharField(
max_length=100,
verbose_name=_('field name'),
help_text=_('The model field name for this translation.'))
content = models.TextField(
verbose_name=_('content'),
null=True,
help_text=_('The translated content for the field.'))
class Meta:
abstract = True
app_label = 'linguist'
verbose_name = _('translation')
verbose_name_plural = _('translations')
unique_together = (('identifier', 'object_id', 'locale', 'field_name'),)
def __str__(self):
return '%s:%d:%s:%s' % (
self.identifier,
self.object_id,
self.field_name,
self.locale)
<commit_msg>Rename locale field to language.<commit_after># -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from .. import settings
@python_2_unicode_compatible
class Translation(models.Model):
"""
A Translation.
"""
identifier = models.CharField(
max_length=100,
verbose_name=_('identifier'),
help_text=_('The registered model identifier.'))
object_id = models.IntegerField(
verbose_name=_('The object ID'),
help_text=_('The object ID of this translation'))
language = models.CharField(
max_length=10,
verbose_name=_('locale'),
choices=settings.SUPPORTED_LANGUAGES,
default=settings.DEFAULT_LANGUAGE,
help_text=_('The language for this translation'))
field_name = models.CharField(
max_length=100,
verbose_name=_('field name'),
help_text=_('The model field name for this translation.'))
content = models.TextField(
verbose_name=_('content'),
null=True,
help_text=_('The translated content for the field.'))
class Meta:
abstract = True
app_label = 'linguist'
verbose_name = _('translation')
verbose_name_plural = _('translations')
unique_together = (('identifier', 'object_id', 'language', 'field_name'),)
def __str__(self):
return '%s:%d:%s:%s' % (
self.identifier,
self.object_id,
self.field_name,
self.language)
|
caf90dce76e361531077840f570602a625c22ccb
|
argus/backends/base.py
|
argus/backends/base.py
|
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
|
# Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
|
Add the license header where it's missing.
|
Add the license header where it's missing.
|
Python
|
apache-2.0
|
PCManticore/argus-ci,cmin764/argus-ci,AlexandruTudose/cloudbase-init-ci,micumatei/cloudbase-init-ci,stefan-caraiman/cloudbase-init-ci,cloudbase/cloudbase-init-ci
|
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
Add the license header where it's missing.
|
# Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
|
<commit_before>import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
<commit_msg>Add the license header where it's missing.<commit_after>
|
# Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
|
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
Add the license header where it's missing.# Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
|
<commit_before>import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
<commit_msg>Add the license header where it's missing.<commit_after># Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
|
73d7377d0ba6c5ac768d547aaa957b48a6b1d46a
|
menu_generator/utils.py
|
menu_generator/utils.py
|
from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
"The application {0} is not in the configured apps or does" +
"not have the pattern app.apps.AppConfig".format(app_path)
)
|
from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
"The application {0} is not in the configured apps or does".format(app_path) +
"not have the pattern app.apps.AppConfig"
)
|
Fix exception message if app path is invalid
|
Fix exception message if app path is invalid
|
Python
|
mit
|
yamijuan/django-menu-generator
|
from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
"The application {0} is not in the configured apps or does" +
"not have the pattern app.apps.AppConfig".format(app_path)
)
Fix exception message if app path is invalid
|
from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
"The application {0} is not in the configured apps or does".format(app_path) +
"not have the pattern app.apps.AppConfig"
)
|
<commit_before>from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
"The application {0} is not in the configured apps or does" +
"not have the pattern app.apps.AppConfig".format(app_path)
)
<commit_msg>Fix exception message if app path is invalid<commit_after>
|
from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
"The application {0} is not in the configured apps or does".format(app_path) +
"not have the pattern app.apps.AppConfig"
)
|
from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
"The application {0} is not in the configured apps or does" +
"not have the pattern app.apps.AppConfig".format(app_path)
)
Fix exception message if app path is invalidfrom importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
"The application {0} is not in the configured apps or does".format(app_path) +
"not have the pattern app.apps.AppConfig"
)
|
<commit_before>from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
"The application {0} is not in the configured apps or does" +
"not have the pattern app.apps.AppConfig".format(app_path)
)
<commit_msg>Fix exception message if app path is invalid<commit_after>from importlib import import_module
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
def get_callable(func_or_path):
"""
Receives a dotted path or a callable, Returns a callable or None
"""
if callable(func_or_path):
return func_or_path
module_name = '.'.join(func_or_path.split('.')[:-1])
function_name = func_or_path.split('.')[-1]
_module = import_module(module_name)
func = getattr(_module, function_name)
return func
def clean_app_config(app_path):
"""
Removes the AppConfig path for this app and returns the new string
"""
apps_names = [app.name for app in apps.get_app_configs()]
if app_path in apps_names:
return app_path
else:
app_split = app_path.split('.')
new_app = '.'.join(app_split[:-2])
if new_app in apps_names:
return new_app
else: # pragma: no cover
raise ImproperlyConfigured(
"The application {0} is not in the configured apps or does".format(app_path) +
"not have the pattern app.apps.AppConfig"
)
|
05f65ad42967f1499fb1ec37d37c76674e4d413a
|
biosys/apps/main/api/urls.py
|
biosys/apps/main/api/urls.py
|
from __future__ import absolute_import, unicode_literals, print_function, division
from django.conf.urls import url
from rest_framework import routers
from main.api import views as main_views
router = routers.DefaultRouter()
router.register(r'projects?', main_views.ProjectViewSet, 'project')
router.register(r'sites?', main_views.SiteViewSet, 'site')
router.register(r'datasets?', main_views.DatasetViewSet, 'dataset')
router.register(r'genericRecords?', main_views.GenericRecordViewSet, 'genericRecord')
router.register(r'observations?', main_views.ObservationViewSet, 'observation')
router.register(r'speciesObservations?', main_views.SpeciesObservationViewSet, 'speciesObservation')
url_patterns = [
url(r'dataset/(?P<pk>\d+)/data?', main_views.DatasetDataView.as_view(), name='dataset-data')
]
urls = router.urls + url_patterns
|
from __future__ import absolute_import, unicode_literals, print_function, division
from django.conf.urls import url
from rest_framework import routers
from main.api import views as main_views
router = routers.DefaultRouter()
router.register(r'projects?', main_views.ProjectViewSet, 'project')
router.register(r'sites?', main_views.SiteViewSet, 'site')
router.register(r'datasets?', main_views.DatasetViewSet, 'dataset')
router.register(r'generic_records?', main_views.GenericRecordViewSet, 'genericRecord')
router.register(r'observations?', main_views.ObservationViewSet, 'observation')
router.register(r'species_observations?', main_views.SpeciesObservationViewSet, 'speciesObservation')
url_patterns = [
url(r'dataset/(?P<pk>\d+)/data?', main_views.DatasetDataView.as_view(), name='dataset-data')
]
urls = router.urls + url_patterns
|
Use snake case for API's URL: generic_records instead of genericRecord and species_observation instead of speciesObservation.
|
Use snake case for API's URL: generic_records instead of genericRecord and species_observation instead of speciesObservation.
|
Python
|
apache-2.0
|
gaiaresources/biosys,parksandwildlife/biosys,serge-gaia/biosys,ropable/biosys,parksandwildlife/biosys,parksandwildlife/biosys,serge-gaia/biosys,serge-gaia/biosys,gaiaresources/biosys,ropable/biosys,ropable/biosys,gaiaresources/biosys
|
from __future__ import absolute_import, unicode_literals, print_function, division
from django.conf.urls import url
from rest_framework import routers
from main.api import views as main_views
router = routers.DefaultRouter()
router.register(r'projects?', main_views.ProjectViewSet, 'project')
router.register(r'sites?', main_views.SiteViewSet, 'site')
router.register(r'datasets?', main_views.DatasetViewSet, 'dataset')
router.register(r'genericRecords?', main_views.GenericRecordViewSet, 'genericRecord')
router.register(r'observations?', main_views.ObservationViewSet, 'observation')
router.register(r'speciesObservations?', main_views.SpeciesObservationViewSet, 'speciesObservation')
url_patterns = [
url(r'dataset/(?P<pk>\d+)/data?', main_views.DatasetDataView.as_view(), name='dataset-data')
]
urls = router.urls + url_patterns
Use snake case for API's URL: generic_records instead of genericRecord and species_observation instead of speciesObservation.
|
from __future__ import absolute_import, unicode_literals, print_function, division
from django.conf.urls import url
from rest_framework import routers
from main.api import views as main_views
router = routers.DefaultRouter()
router.register(r'projects?', main_views.ProjectViewSet, 'project')
router.register(r'sites?', main_views.SiteViewSet, 'site')
router.register(r'datasets?', main_views.DatasetViewSet, 'dataset')
router.register(r'generic_records?', main_views.GenericRecordViewSet, 'genericRecord')
router.register(r'observations?', main_views.ObservationViewSet, 'observation')
router.register(r'species_observations?', main_views.SpeciesObservationViewSet, 'speciesObservation')
url_patterns = [
url(r'dataset/(?P<pk>\d+)/data?', main_views.DatasetDataView.as_view(), name='dataset-data')
]
urls = router.urls + url_patterns
|
<commit_before>from __future__ import absolute_import, unicode_literals, print_function, division
from django.conf.urls import url
from rest_framework import routers
from main.api import views as main_views
router = routers.DefaultRouter()
router.register(r'projects?', main_views.ProjectViewSet, 'project')
router.register(r'sites?', main_views.SiteViewSet, 'site')
router.register(r'datasets?', main_views.DatasetViewSet, 'dataset')
router.register(r'genericRecords?', main_views.GenericRecordViewSet, 'genericRecord')
router.register(r'observations?', main_views.ObservationViewSet, 'observation')
router.register(r'speciesObservations?', main_views.SpeciesObservationViewSet, 'speciesObservation')
url_patterns = [
url(r'dataset/(?P<pk>\d+)/data?', main_views.DatasetDataView.as_view(), name='dataset-data')
]
urls = router.urls + url_patterns
<commit_msg>Use snake case for API's URL: generic_records instead of genericRecord and species_observation instead of speciesObservation.<commit_after>
|
from __future__ import absolute_import, unicode_literals, print_function, division
from django.conf.urls import url
from rest_framework import routers
from main.api import views as main_views
router = routers.DefaultRouter()
router.register(r'projects?', main_views.ProjectViewSet, 'project')
router.register(r'sites?', main_views.SiteViewSet, 'site')
router.register(r'datasets?', main_views.DatasetViewSet, 'dataset')
router.register(r'generic_records?', main_views.GenericRecordViewSet, 'genericRecord')
router.register(r'observations?', main_views.ObservationViewSet, 'observation')
router.register(r'species_observations?', main_views.SpeciesObservationViewSet, 'speciesObservation')
url_patterns = [
url(r'dataset/(?P<pk>\d+)/data?', main_views.DatasetDataView.as_view(), name='dataset-data')
]
urls = router.urls + url_patterns
|
from __future__ import absolute_import, unicode_literals, print_function, division
from django.conf.urls import url
from rest_framework import routers
from main.api import views as main_views
router = routers.DefaultRouter()
router.register(r'projects?', main_views.ProjectViewSet, 'project')
router.register(r'sites?', main_views.SiteViewSet, 'site')
router.register(r'datasets?', main_views.DatasetViewSet, 'dataset')
router.register(r'genericRecords?', main_views.GenericRecordViewSet, 'genericRecord')
router.register(r'observations?', main_views.ObservationViewSet, 'observation')
router.register(r'speciesObservations?', main_views.SpeciesObservationViewSet, 'speciesObservation')
url_patterns = [
url(r'dataset/(?P<pk>\d+)/data?', main_views.DatasetDataView.as_view(), name='dataset-data')
]
urls = router.urls + url_patterns
Use snake case for API's URL: generic_records instead of genericRecord and species_observation instead of speciesObservation.from __future__ import absolute_import, unicode_literals, print_function, division
from django.conf.urls import url
from rest_framework import routers
from main.api import views as main_views
router = routers.DefaultRouter()
router.register(r'projects?', main_views.ProjectViewSet, 'project')
router.register(r'sites?', main_views.SiteViewSet, 'site')
router.register(r'datasets?', main_views.DatasetViewSet, 'dataset')
router.register(r'generic_records?', main_views.GenericRecordViewSet, 'genericRecord')
router.register(r'observations?', main_views.ObservationViewSet, 'observation')
router.register(r'species_observations?', main_views.SpeciesObservationViewSet, 'speciesObservation')
url_patterns = [
url(r'dataset/(?P<pk>\d+)/data?', main_views.DatasetDataView.as_view(), name='dataset-data')
]
urls = router.urls + url_patterns
|
<commit_before>from __future__ import absolute_import, unicode_literals, print_function, division
from django.conf.urls import url
from rest_framework import routers
from main.api import views as main_views
router = routers.DefaultRouter()
router.register(r'projects?', main_views.ProjectViewSet, 'project')
router.register(r'sites?', main_views.SiteViewSet, 'site')
router.register(r'datasets?', main_views.DatasetViewSet, 'dataset')
router.register(r'genericRecords?', main_views.GenericRecordViewSet, 'genericRecord')
router.register(r'observations?', main_views.ObservationViewSet, 'observation')
router.register(r'speciesObservations?', main_views.SpeciesObservationViewSet, 'speciesObservation')
url_patterns = [
url(r'dataset/(?P<pk>\d+)/data?', main_views.DatasetDataView.as_view(), name='dataset-data')
]
urls = router.urls + url_patterns
<commit_msg>Use snake case for API's URL: generic_records instead of genericRecord and species_observation instead of speciesObservation.<commit_after>from __future__ import absolute_import, unicode_literals, print_function, division
from django.conf.urls import url
from rest_framework import routers
from main.api import views as main_views
router = routers.DefaultRouter()
router.register(r'projects?', main_views.ProjectViewSet, 'project')
router.register(r'sites?', main_views.SiteViewSet, 'site')
router.register(r'datasets?', main_views.DatasetViewSet, 'dataset')
router.register(r'generic_records?', main_views.GenericRecordViewSet, 'genericRecord')
router.register(r'observations?', main_views.ObservationViewSet, 'observation')
router.register(r'species_observations?', main_views.SpeciesObservationViewSet, 'speciesObservation')
url_patterns = [
url(r'dataset/(?P<pk>\d+)/data?', main_views.DatasetDataView.as_view(), name='dataset-data')
]
urls = router.urls + url_patterns
|
e73bb8cecf516f4379dd7d90282ef2412d348ac8
|
autotranslate/utils.py
|
autotranslate/utils.py
|
import six
from autotranslate.compat import importlib
from django.conf import settings
def perform_import(val, setting_name):
"""
If the given setting is a string import notation,
then perform the necessary import or imports.
Credits: https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/settings.py#L138
"""
if val is None:
return None
elif isinstance(val, six.string_types):
return import_from_string(val, setting_name)
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val]
return val
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except (ImportError, AttributeError) as e:
raise ImportError('Could not import {} for API setting {}. {}: {}.'
.format(val, setting_name, e.__class__.__name__, e))
TranslatorService = getattr(settings, 'AUTOTRANSLATE_TRANSLATOR_SERVICE',
'autotranslate.services.GoSlateTranslatorService')
translator = perform_import(TranslatorService, 'AUTOTRANSLATE_TRANSLATOR_SERVICE')()
translate_string = translator.translate_string
translate_strings = translator.translate_strings
|
import six
from autotranslate.compat import importlib
from django.conf import settings
def perform_import(val, setting_name):
"""
If the given setting is a string import notation,
then perform the necessary import or imports.
Credits: https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/settings.py#L138
"""
if val is None:
return None
elif isinstance(val, six.string_types):
return import_from_string(val, setting_name)
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val]
return val
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except (ImportError, AttributeError) as e:
raise ImportError('Could not import {} for API setting {}. {}: {}.'
.format(val, setting_name, e.__class__.__name__, e))
def get_translator():
"""
Returns the default translator.
"""
TranslatorService = getattr(settings, 'AUTOTRANSLATE_TRANSLATOR_SERVICE',
'autotranslate.services.GoSlateTranslatorService')
translator = perform_import(TranslatorService, 'AUTOTRANSLATE_TRANSLATOR_SERVICE')()
return translator
|
Make sure we don't expose translator as global
|
Make sure we don't expose translator as global
|
Python
|
mit
|
ankitpopli1891/django-autotranslate
|
import six
from autotranslate.compat import importlib
from django.conf import settings
def perform_import(val, setting_name):
"""
If the given setting is a string import notation,
then perform the necessary import or imports.
Credits: https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/settings.py#L138
"""
if val is None:
return None
elif isinstance(val, six.string_types):
return import_from_string(val, setting_name)
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val]
return val
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except (ImportError, AttributeError) as e:
raise ImportError('Could not import {} for API setting {}. {}: {}.'
.format(val, setting_name, e.__class__.__name__, e))
TranslatorService = getattr(settings, 'AUTOTRANSLATE_TRANSLATOR_SERVICE',
'autotranslate.services.GoSlateTranslatorService')
translator = perform_import(TranslatorService, 'AUTOTRANSLATE_TRANSLATOR_SERVICE')()
translate_string = translator.translate_string
translate_strings = translator.translate_strings
Make sure we don't expose translator as global
|
import six
from autotranslate.compat import importlib
from django.conf import settings
def perform_import(val, setting_name):
"""
If the given setting is a string import notation,
then perform the necessary import or imports.
Credits: https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/settings.py#L138
"""
if val is None:
return None
elif isinstance(val, six.string_types):
return import_from_string(val, setting_name)
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val]
return val
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except (ImportError, AttributeError) as e:
raise ImportError('Could not import {} for API setting {}. {}: {}.'
.format(val, setting_name, e.__class__.__name__, e))
def get_translator():
"""
Returns the default translator.
"""
TranslatorService = getattr(settings, 'AUTOTRANSLATE_TRANSLATOR_SERVICE',
'autotranslate.services.GoSlateTranslatorService')
translator = perform_import(TranslatorService, 'AUTOTRANSLATE_TRANSLATOR_SERVICE')()
return translator
|
<commit_before>import six
from autotranslate.compat import importlib
from django.conf import settings
def perform_import(val, setting_name):
"""
If the given setting is a string import notation,
then perform the necessary import or imports.
Credits: https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/settings.py#L138
"""
if val is None:
return None
elif isinstance(val, six.string_types):
return import_from_string(val, setting_name)
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val]
return val
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except (ImportError, AttributeError) as e:
raise ImportError('Could not import {} for API setting {}. {}: {}.'
.format(val, setting_name, e.__class__.__name__, e))
TranslatorService = getattr(settings, 'AUTOTRANSLATE_TRANSLATOR_SERVICE',
'autotranslate.services.GoSlateTranslatorService')
translator = perform_import(TranslatorService, 'AUTOTRANSLATE_TRANSLATOR_SERVICE')()
translate_string = translator.translate_string
translate_strings = translator.translate_strings
<commit_msg>Make sure we don't expose translator as global<commit_after>
|
import six
from autotranslate.compat import importlib
from django.conf import settings
def perform_import(val, setting_name):
"""
If the given setting is a string import notation,
then perform the necessary import or imports.
Credits: https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/settings.py#L138
"""
if val is None:
return None
elif isinstance(val, six.string_types):
return import_from_string(val, setting_name)
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val]
return val
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except (ImportError, AttributeError) as e:
raise ImportError('Could not import {} for API setting {}. {}: {}.'
.format(val, setting_name, e.__class__.__name__, e))
def get_translator():
"""
Returns the default translator.
"""
TranslatorService = getattr(settings, 'AUTOTRANSLATE_TRANSLATOR_SERVICE',
'autotranslate.services.GoSlateTranslatorService')
translator = perform_import(TranslatorService, 'AUTOTRANSLATE_TRANSLATOR_SERVICE')()
return translator
|
import six
from autotranslate.compat import importlib
from django.conf import settings
def perform_import(val, setting_name):
"""
If the given setting is a string import notation,
then perform the necessary import or imports.
Credits: https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/settings.py#L138
"""
if val is None:
return None
elif isinstance(val, six.string_types):
return import_from_string(val, setting_name)
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val]
return val
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except (ImportError, AttributeError) as e:
raise ImportError('Could not import {} for API setting {}. {}: {}.'
.format(val, setting_name, e.__class__.__name__, e))
TranslatorService = getattr(settings, 'AUTOTRANSLATE_TRANSLATOR_SERVICE',
'autotranslate.services.GoSlateTranslatorService')
translator = perform_import(TranslatorService, 'AUTOTRANSLATE_TRANSLATOR_SERVICE')()
translate_string = translator.translate_string
translate_strings = translator.translate_strings
Make sure we don't expose translator as globalimport six
from autotranslate.compat import importlib
from django.conf import settings
def perform_import(val, setting_name):
"""
If the given setting is a string import notation,
then perform the necessary import or imports.
Credits: https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/settings.py#L138
"""
if val is None:
return None
elif isinstance(val, six.string_types):
return import_from_string(val, setting_name)
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val]
return val
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except (ImportError, AttributeError) as e:
raise ImportError('Could not import {} for API setting {}. {}: {}.'
.format(val, setting_name, e.__class__.__name__, e))
def get_translator():
"""
Returns the default translator.
"""
TranslatorService = getattr(settings, 'AUTOTRANSLATE_TRANSLATOR_SERVICE',
'autotranslate.services.GoSlateTranslatorService')
translator = perform_import(TranslatorService, 'AUTOTRANSLATE_TRANSLATOR_SERVICE')()
return translator
|
<commit_before>import six
from autotranslate.compat import importlib
from django.conf import settings
def perform_import(val, setting_name):
"""
If the given setting is a string import notation,
then perform the necessary import or imports.
Credits: https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/settings.py#L138
"""
if val is None:
return None
elif isinstance(val, six.string_types):
return import_from_string(val, setting_name)
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val]
return val
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except (ImportError, AttributeError) as e:
raise ImportError('Could not import {} for API setting {}. {}: {}.'
.format(val, setting_name, e.__class__.__name__, e))
TranslatorService = getattr(settings, 'AUTOTRANSLATE_TRANSLATOR_SERVICE',
'autotranslate.services.GoSlateTranslatorService')
translator = perform_import(TranslatorService, 'AUTOTRANSLATE_TRANSLATOR_SERVICE')()
translate_string = translator.translate_string
translate_strings = translator.translate_strings
<commit_msg>Make sure we don't expose translator as global<commit_after>import six
from autotranslate.compat import importlib
from django.conf import settings
def perform_import(val, setting_name):
"""
If the given setting is a string import notation,
then perform the necessary import or imports.
Credits: https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/settings.py#L138
"""
if val is None:
return None
elif isinstance(val, six.string_types):
return import_from_string(val, setting_name)
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val]
return val
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except (ImportError, AttributeError) as e:
raise ImportError('Could not import {} for API setting {}. {}: {}.'
.format(val, setting_name, e.__class__.__name__, e))
def get_translator():
"""
Returns the default translator.
"""
TranslatorService = getattr(settings, 'AUTOTRANSLATE_TRANSLATOR_SERVICE',
'autotranslate.services.GoSlateTranslatorService')
translator = perform_import(TranslatorService, 'AUTOTRANSLATE_TRANSLATOR_SERVICE')()
return translator
|
79edc5861e37de0970d2af46ba45e07b47d30837
|
test/test_retriever.py
|
test/test_retriever.py
|
"""Tests for the EcoData Retriever"""
from StringIO import StringIO
from engine import Engine
def test_escape_single_quotes():
"""Test escaping of single quotes"""
test_engine = Engine()
assert test_engine.escape_single_quotes("1,2,3,'a'") == "1,2,3,\\'a\\'"
def test_escape_double_quotes():
"""Test escaping of double quotes"""
test_engine = Engine()
assert test_engine.escape_double_quotes('"a",1,2,3') == '\\"a\\",1,2,3'
def test_drop_statement():
"Test the creation of drop statements"
test_engine = Engine()
assert test_engine.drop_statement('TABLE', 'tablename') == "DROP TABLE IF EXISTS tablename"
|
"""Tests for the EcoData Retriever"""
from StringIO import StringIO
from engine import Engine
from table import Table
def test_escape_single_quotes():
"""Test escaping of single quotes"""
test_engine = Engine()
assert test_engine.escape_single_quotes("1,2,3,'a'") == "1,2,3,\\'a\\'"
def test_escape_double_quotes():
"""Test escaping of double quotes"""
test_engine = Engine()
assert test_engine.escape_double_quotes('"a",1,2,3') == '\\"a\\",1,2,3'
def test_drop_statement():
"Test the creation of drop statements"
test_engine = Engine()
assert test_engine.drop_statement('TABLE', 'tablename') == "DROP TABLE IF EXISTS tablename"
def test_auto_get_delimiter_comma():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a,b,c;,d")
assert test_engine.table.delimiter == ","
def test_auto_get_delimiter_tab():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a\tb\tc\td,")
assert test_engine.table.delimiter == "\t"
def test_auto_get_delimiter_semicolon():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a;b;c;,d")
assert test_engine.table.delimiter == ";"
|
Add tests of automated identification of the delimiter
|
Add tests of automated identification of the delimiter
|
Python
|
mit
|
bendmorris/retriever,embaldridge/retriever,davharris/retriever,goelakash/retriever,embaldridge/retriever,henrykironde/deletedret,davharris/retriever,bendmorris/retriever,davharris/retriever,bendmorris/retriever,embaldridge/retriever,goelakash/retriever,henrykironde/deletedret
|
"""Tests for the EcoData Retriever"""
from StringIO import StringIO
from engine import Engine
def test_escape_single_quotes():
"""Test escaping of single quotes"""
test_engine = Engine()
assert test_engine.escape_single_quotes("1,2,3,'a'") == "1,2,3,\\'a\\'"
def test_escape_double_quotes():
"""Test escaping of double quotes"""
test_engine = Engine()
assert test_engine.escape_double_quotes('"a",1,2,3') == '\\"a\\",1,2,3'
def test_drop_statement():
"Test the creation of drop statements"
test_engine = Engine()
assert test_engine.drop_statement('TABLE', 'tablename') == "DROP TABLE IF EXISTS tablename"
Add tests of automated identification of the delimiter
|
"""Tests for the EcoData Retriever"""
from StringIO import StringIO
from engine import Engine
from table import Table
def test_escape_single_quotes():
"""Test escaping of single quotes"""
test_engine = Engine()
assert test_engine.escape_single_quotes("1,2,3,'a'") == "1,2,3,\\'a\\'"
def test_escape_double_quotes():
"""Test escaping of double quotes"""
test_engine = Engine()
assert test_engine.escape_double_quotes('"a",1,2,3') == '\\"a\\",1,2,3'
def test_drop_statement():
"Test the creation of drop statements"
test_engine = Engine()
assert test_engine.drop_statement('TABLE', 'tablename') == "DROP TABLE IF EXISTS tablename"
def test_auto_get_delimiter_comma():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a,b,c;,d")
assert test_engine.table.delimiter == ","
def test_auto_get_delimiter_tab():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a\tb\tc\td,")
assert test_engine.table.delimiter == "\t"
def test_auto_get_delimiter_semicolon():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a;b;c;,d")
assert test_engine.table.delimiter == ";"
|
<commit_before>"""Tests for the EcoData Retriever"""
from StringIO import StringIO
from engine import Engine
def test_escape_single_quotes():
"""Test escaping of single quotes"""
test_engine = Engine()
assert test_engine.escape_single_quotes("1,2,3,'a'") == "1,2,3,\\'a\\'"
def test_escape_double_quotes():
"""Test escaping of double quotes"""
test_engine = Engine()
assert test_engine.escape_double_quotes('"a",1,2,3') == '\\"a\\",1,2,3'
def test_drop_statement():
"Test the creation of drop statements"
test_engine = Engine()
assert test_engine.drop_statement('TABLE', 'tablename') == "DROP TABLE IF EXISTS tablename"
<commit_msg>Add tests of automated identification of the delimiter<commit_after>
|
"""Tests for the EcoData Retriever"""
from StringIO import StringIO
from engine import Engine
from table import Table
def test_escape_single_quotes():
"""Test escaping of single quotes"""
test_engine = Engine()
assert test_engine.escape_single_quotes("1,2,3,'a'") == "1,2,3,\\'a\\'"
def test_escape_double_quotes():
"""Test escaping of double quotes"""
test_engine = Engine()
assert test_engine.escape_double_quotes('"a",1,2,3') == '\\"a\\",1,2,3'
def test_drop_statement():
"Test the creation of drop statements"
test_engine = Engine()
assert test_engine.drop_statement('TABLE', 'tablename') == "DROP TABLE IF EXISTS tablename"
def test_auto_get_delimiter_comma():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a,b,c;,d")
assert test_engine.table.delimiter == ","
def test_auto_get_delimiter_tab():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a\tb\tc\td,")
assert test_engine.table.delimiter == "\t"
def test_auto_get_delimiter_semicolon():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a;b;c;,d")
assert test_engine.table.delimiter == ";"
|
"""Tests for the EcoData Retriever"""
from StringIO import StringIO
from engine import Engine
def test_escape_single_quotes():
"""Test escaping of single quotes"""
test_engine = Engine()
assert test_engine.escape_single_quotes("1,2,3,'a'") == "1,2,3,\\'a\\'"
def test_escape_double_quotes():
"""Test escaping of double quotes"""
test_engine = Engine()
assert test_engine.escape_double_quotes('"a",1,2,3') == '\\"a\\",1,2,3'
def test_drop_statement():
"Test the creation of drop statements"
test_engine = Engine()
assert test_engine.drop_statement('TABLE', 'tablename') == "DROP TABLE IF EXISTS tablename"
Add tests of automated identification of the delimiter"""Tests for the EcoData Retriever"""
from StringIO import StringIO
from engine import Engine
from table import Table
def test_escape_single_quotes():
"""Test escaping of single quotes"""
test_engine = Engine()
assert test_engine.escape_single_quotes("1,2,3,'a'") == "1,2,3,\\'a\\'"
def test_escape_double_quotes():
"""Test escaping of double quotes"""
test_engine = Engine()
assert test_engine.escape_double_quotes('"a",1,2,3') == '\\"a\\",1,2,3'
def test_drop_statement():
"Test the creation of drop statements"
test_engine = Engine()
assert test_engine.drop_statement('TABLE', 'tablename') == "DROP TABLE IF EXISTS tablename"
def test_auto_get_delimiter_comma():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a,b,c;,d")
assert test_engine.table.delimiter == ","
def test_auto_get_delimiter_tab():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a\tb\tc\td,")
assert test_engine.table.delimiter == "\t"
def test_auto_get_delimiter_semicolon():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a;b;c;,d")
assert test_engine.table.delimiter == ";"
|
<commit_before>"""Tests for the EcoData Retriever"""
from StringIO import StringIO
from engine import Engine
def test_escape_single_quotes():
"""Test escaping of single quotes"""
test_engine = Engine()
assert test_engine.escape_single_quotes("1,2,3,'a'") == "1,2,3,\\'a\\'"
def test_escape_double_quotes():
"""Test escaping of double quotes"""
test_engine = Engine()
assert test_engine.escape_double_quotes('"a",1,2,3') == '\\"a\\",1,2,3'
def test_drop_statement():
"Test the creation of drop statements"
test_engine = Engine()
assert test_engine.drop_statement('TABLE', 'tablename') == "DROP TABLE IF EXISTS tablename"
<commit_msg>Add tests of automated identification of the delimiter<commit_after>"""Tests for the EcoData Retriever"""
from StringIO import StringIO
from engine import Engine
from table import Table
def test_escape_single_quotes():
"""Test escaping of single quotes"""
test_engine = Engine()
assert test_engine.escape_single_quotes("1,2,3,'a'") == "1,2,3,\\'a\\'"
def test_escape_double_quotes():
"""Test escaping of double quotes"""
test_engine = Engine()
assert test_engine.escape_double_quotes('"a",1,2,3') == '\\"a\\",1,2,3'
def test_drop_statement():
"Test the creation of drop statements"
test_engine = Engine()
assert test_engine.drop_statement('TABLE', 'tablename') == "DROP TABLE IF EXISTS tablename"
def test_auto_get_delimiter_comma():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a,b,c;,d")
assert test_engine.table.delimiter == ","
def test_auto_get_delimiter_tab():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a\tb\tc\td,")
assert test_engine.table.delimiter == "\t"
def test_auto_get_delimiter_semicolon():
"""Test if commas are properly detected as delimiter"""
test_engine = Engine()
test_engine.table = Table("test")
test_engine.auto_get_delimiter("a;b;c;,d")
assert test_engine.table.delimiter == ";"
|
719777a0b2e3eed4f14355974c6673d20904ac83
|
models/shopping_item.py
|
models/shopping_item.py
|
"""
This is the sqlalchemy class for communicating with the shopping item table
"""
from sqlalchemy import Column, Integer, Unicode, ForeignKey
import base
class ShoppingItem(base.Base):
"""Sqlalchemy deals model"""
__tablename__ = "shopping_item"
catId = 'shopping_category.id'
visitId = 'visits.id'
id = Column(Integer, primary_key=True)
name = Column('name', Unicode)
category_id = Column('category', Integer, ForeignKey(catId))
visit_id = Column('visit', Integer, ForeignKey(visitId))
|
"""
This is the sqlalchemy class for communicating with the shopping item table
"""
from sqlalchemy import Column, Integer, Unicode, ForeignKey
import base
class ShoppingItem(base.Base):
"""Sqlalchemy deals model"""
__tablename__ = "shopping_item"
catId = 'shopping_category.id'
visitId = 'visits.id'
id = Column(Integer, primary_key=True)
name = Column('name', Unicode)
quantity = Column('name', Integer)
category_id = Column('category', Integer, ForeignKey(catId))
visit_id = Column('visit', Integer, ForeignKey(visitId))
|
Add quantity to shopping item model
|
Add quantity to shopping item model
|
Python
|
mit
|
jlutz777/FreeStore,jlutz777/FreeStore,jlutz777/FreeStore
|
"""
This is the sqlalchemy class for communicating with the shopping item table
"""
from sqlalchemy import Column, Integer, Unicode, ForeignKey
import base
class ShoppingItem(base.Base):
"""Sqlalchemy deals model"""
__tablename__ = "shopping_item"
catId = 'shopping_category.id'
visitId = 'visits.id'
id = Column(Integer, primary_key=True)
name = Column('name', Unicode)
category_id = Column('category', Integer, ForeignKey(catId))
visit_id = Column('visit', Integer, ForeignKey(visitId))
Add quantity to shopping item model
|
"""
This is the sqlalchemy class for communicating with the shopping item table
"""
from sqlalchemy import Column, Integer, Unicode, ForeignKey
import base
class ShoppingItem(base.Base):
"""Sqlalchemy deals model"""
__tablename__ = "shopping_item"
catId = 'shopping_category.id'
visitId = 'visits.id'
id = Column(Integer, primary_key=True)
name = Column('name', Unicode)
quantity = Column('name', Integer)
category_id = Column('category', Integer, ForeignKey(catId))
visit_id = Column('visit', Integer, ForeignKey(visitId))
|
<commit_before>"""
This is the sqlalchemy class for communicating with the shopping item table
"""
from sqlalchemy import Column, Integer, Unicode, ForeignKey
import base
class ShoppingItem(base.Base):
"""Sqlalchemy deals model"""
__tablename__ = "shopping_item"
catId = 'shopping_category.id'
visitId = 'visits.id'
id = Column(Integer, primary_key=True)
name = Column('name', Unicode)
category_id = Column('category', Integer, ForeignKey(catId))
visit_id = Column('visit', Integer, ForeignKey(visitId))
<commit_msg>Add quantity to shopping item model<commit_after>
|
"""
This is the sqlalchemy class for communicating with the shopping item table
"""
from sqlalchemy import Column, Integer, Unicode, ForeignKey
import base
class ShoppingItem(base.Base):
"""Sqlalchemy deals model"""
__tablename__ = "shopping_item"
catId = 'shopping_category.id'
visitId = 'visits.id'
id = Column(Integer, primary_key=True)
name = Column('name', Unicode)
quantity = Column('name', Integer)
category_id = Column('category', Integer, ForeignKey(catId))
visit_id = Column('visit', Integer, ForeignKey(visitId))
|
"""
This is the sqlalchemy class for communicating with the shopping item table
"""
from sqlalchemy import Column, Integer, Unicode, ForeignKey
import base
class ShoppingItem(base.Base):
"""Sqlalchemy deals model"""
__tablename__ = "shopping_item"
catId = 'shopping_category.id'
visitId = 'visits.id'
id = Column(Integer, primary_key=True)
name = Column('name', Unicode)
category_id = Column('category', Integer, ForeignKey(catId))
visit_id = Column('visit', Integer, ForeignKey(visitId))
Add quantity to shopping item model"""
This is the sqlalchemy class for communicating with the shopping item table
"""
from sqlalchemy import Column, Integer, Unicode, ForeignKey
import base
class ShoppingItem(base.Base):
"""Sqlalchemy deals model"""
__tablename__ = "shopping_item"
catId = 'shopping_category.id'
visitId = 'visits.id'
id = Column(Integer, primary_key=True)
name = Column('name', Unicode)
quantity = Column('name', Integer)
category_id = Column('category', Integer, ForeignKey(catId))
visit_id = Column('visit', Integer, ForeignKey(visitId))
|
<commit_before>"""
This is the sqlalchemy class for communicating with the shopping item table
"""
from sqlalchemy import Column, Integer, Unicode, ForeignKey
import base
class ShoppingItem(base.Base):
"""Sqlalchemy deals model"""
__tablename__ = "shopping_item"
catId = 'shopping_category.id'
visitId = 'visits.id'
id = Column(Integer, primary_key=True)
name = Column('name', Unicode)
category_id = Column('category', Integer, ForeignKey(catId))
visit_id = Column('visit', Integer, ForeignKey(visitId))
<commit_msg>Add quantity to shopping item model<commit_after>"""
This is the sqlalchemy class for communicating with the shopping item table
"""
from sqlalchemy import Column, Integer, Unicode, ForeignKey
import base
class ShoppingItem(base.Base):
"""Sqlalchemy deals model"""
__tablename__ = "shopping_item"
catId = 'shopping_category.id'
visitId = 'visits.id'
id = Column(Integer, primary_key=True)
name = Column('name', Unicode)
quantity = Column('name', Integer)
category_id = Column('category', Integer, ForeignKey(catId))
visit_id = Column('visit', Integer, ForeignKey(visitId))
|
a52fe667125d9fd126b050cd32f694b9c3a97cdf
|
nlppln/save_ner_data.py
|
nlppln/save_ner_data.py
|
#!/usr/bin/env python
import click
import os
import codecs
import json
import pandas as pd
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_file', nargs=1, type=click.Path())
def nerstats(input_files, output_file):
output_dir = os.path.dirname(output_file)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
frames = []
for fi in input_files:
with codecs.open(fi, encoding='utf-8') as f:
saf = json.load(f)
data = {}
data['word'] = [t['word'] for t in saf['tokens'] if 'ne' in t.keys()]
data['ner'] = [t['ne'] for t in saf['tokens'] if 'ne' in t.keys()]
data['w_id'] = [t['id'] for t in saf['tokens'] if 'ne' in t.keys()]
data['text'] = [fi for t in saf['tokens'] if 'ne' in t.keys()]
frames.append(pd.DataFrame(data=data))
df = pd.concat(frames, ignore_index=True)
df.to_csv(output_file)
if __name__ == '__main__':
nerstats()
|
#!/usr/bin/env python
import click
import os
import codecs
import json
import pandas as pd
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_file', nargs=1, type=click.Path())
def nerstats(input_files, output_file):
output_dir = os.path.dirname(output_file)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
frames = []
for fi in input_files:
with codecs.open(fi, encoding='utf-8') as f:
saf = json.load(f)
data = {}
data['word'] = [t['word'] for t in saf['tokens'] if 'ne' in t.keys()]
data['ner'] = [t['ne'] for t in saf['tokens'] if 'ne' in t.keys()]
data['w_id'] = [t['id'] for t in saf['tokens'] if 'ne' in t.keys()]
data['text'] = [os.path.basename(fi)
for t in saf['tokens'] if 'ne' in t.keys()]
frames.append(pd.DataFrame(data=data))
df = pd.concat(frames, ignore_index=True)
df.to_csv(output_file)
if __name__ == '__main__':
nerstats()
|
Update script to store the basename instead of the complete path
|
Update script to store the basename instead of the complete path
|
Python
|
apache-2.0
|
WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln
|
#!/usr/bin/env python
import click
import os
import codecs
import json
import pandas as pd
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_file', nargs=1, type=click.Path())
def nerstats(input_files, output_file):
output_dir = os.path.dirname(output_file)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
frames = []
for fi in input_files:
with codecs.open(fi, encoding='utf-8') as f:
saf = json.load(f)
data = {}
data['word'] = [t['word'] for t in saf['tokens'] if 'ne' in t.keys()]
data['ner'] = [t['ne'] for t in saf['tokens'] if 'ne' in t.keys()]
data['w_id'] = [t['id'] for t in saf['tokens'] if 'ne' in t.keys()]
data['text'] = [fi for t in saf['tokens'] if 'ne' in t.keys()]
frames.append(pd.DataFrame(data=data))
df = pd.concat(frames, ignore_index=True)
df.to_csv(output_file)
if __name__ == '__main__':
nerstats()
Update script to store the basename instead of the complete path
|
#!/usr/bin/env python
import click
import os
import codecs
import json
import pandas as pd
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_file', nargs=1, type=click.Path())
def nerstats(input_files, output_file):
output_dir = os.path.dirname(output_file)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
frames = []
for fi in input_files:
with codecs.open(fi, encoding='utf-8') as f:
saf = json.load(f)
data = {}
data['word'] = [t['word'] for t in saf['tokens'] if 'ne' in t.keys()]
data['ner'] = [t['ne'] for t in saf['tokens'] if 'ne' in t.keys()]
data['w_id'] = [t['id'] for t in saf['tokens'] if 'ne' in t.keys()]
data['text'] = [os.path.basename(fi)
for t in saf['tokens'] if 'ne' in t.keys()]
frames.append(pd.DataFrame(data=data))
df = pd.concat(frames, ignore_index=True)
df.to_csv(output_file)
if __name__ == '__main__':
nerstats()
|
<commit_before>#!/usr/bin/env python
import click
import os
import codecs
import json
import pandas as pd
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_file', nargs=1, type=click.Path())
def nerstats(input_files, output_file):
output_dir = os.path.dirname(output_file)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
frames = []
for fi in input_files:
with codecs.open(fi, encoding='utf-8') as f:
saf = json.load(f)
data = {}
data['word'] = [t['word'] for t in saf['tokens'] if 'ne' in t.keys()]
data['ner'] = [t['ne'] for t in saf['tokens'] if 'ne' in t.keys()]
data['w_id'] = [t['id'] for t in saf['tokens'] if 'ne' in t.keys()]
data['text'] = [fi for t in saf['tokens'] if 'ne' in t.keys()]
frames.append(pd.DataFrame(data=data))
df = pd.concat(frames, ignore_index=True)
df.to_csv(output_file)
if __name__ == '__main__':
nerstats()
<commit_msg>Update script to store the basename instead of the complete path<commit_after>
|
#!/usr/bin/env python
import click
import os
import codecs
import json
import pandas as pd
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_file', nargs=1, type=click.Path())
def nerstats(input_files, output_file):
output_dir = os.path.dirname(output_file)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
frames = []
for fi in input_files:
with codecs.open(fi, encoding='utf-8') as f:
saf = json.load(f)
data = {}
data['word'] = [t['word'] for t in saf['tokens'] if 'ne' in t.keys()]
data['ner'] = [t['ne'] for t in saf['tokens'] if 'ne' in t.keys()]
data['w_id'] = [t['id'] for t in saf['tokens'] if 'ne' in t.keys()]
data['text'] = [os.path.basename(fi)
for t in saf['tokens'] if 'ne' in t.keys()]
frames.append(pd.DataFrame(data=data))
df = pd.concat(frames, ignore_index=True)
df.to_csv(output_file)
if __name__ == '__main__':
nerstats()
|
#!/usr/bin/env python
import click
import os
import codecs
import json
import pandas as pd
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_file', nargs=1, type=click.Path())
def nerstats(input_files, output_file):
output_dir = os.path.dirname(output_file)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
frames = []
for fi in input_files:
with codecs.open(fi, encoding='utf-8') as f:
saf = json.load(f)
data = {}
data['word'] = [t['word'] for t in saf['tokens'] if 'ne' in t.keys()]
data['ner'] = [t['ne'] for t in saf['tokens'] if 'ne' in t.keys()]
data['w_id'] = [t['id'] for t in saf['tokens'] if 'ne' in t.keys()]
data['text'] = [fi for t in saf['tokens'] if 'ne' in t.keys()]
frames.append(pd.DataFrame(data=data))
df = pd.concat(frames, ignore_index=True)
df.to_csv(output_file)
if __name__ == '__main__':
nerstats()
Update script to store the basename instead of the complete path#!/usr/bin/env python
import click
import os
import codecs
import json
import pandas as pd
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_file', nargs=1, type=click.Path())
def nerstats(input_files, output_file):
output_dir = os.path.dirname(output_file)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
frames = []
for fi in input_files:
with codecs.open(fi, encoding='utf-8') as f:
saf = json.load(f)
data = {}
data['word'] = [t['word'] for t in saf['tokens'] if 'ne' in t.keys()]
data['ner'] = [t['ne'] for t in saf['tokens'] if 'ne' in t.keys()]
data['w_id'] = [t['id'] for t in saf['tokens'] if 'ne' in t.keys()]
data['text'] = [os.path.basename(fi)
for t in saf['tokens'] if 'ne' in t.keys()]
frames.append(pd.DataFrame(data=data))
df = pd.concat(frames, ignore_index=True)
df.to_csv(output_file)
if __name__ == '__main__':
nerstats()
|
<commit_before>#!/usr/bin/env python
import click
import os
import codecs
import json
import pandas as pd
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_file', nargs=1, type=click.Path())
def nerstats(input_files, output_file):
output_dir = os.path.dirname(output_file)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
frames = []
for fi in input_files:
with codecs.open(fi, encoding='utf-8') as f:
saf = json.load(f)
data = {}
data['word'] = [t['word'] for t in saf['tokens'] if 'ne' in t.keys()]
data['ner'] = [t['ne'] for t in saf['tokens'] if 'ne' in t.keys()]
data['w_id'] = [t['id'] for t in saf['tokens'] if 'ne' in t.keys()]
data['text'] = [fi for t in saf['tokens'] if 'ne' in t.keys()]
frames.append(pd.DataFrame(data=data))
df = pd.concat(frames, ignore_index=True)
df.to_csv(output_file)
if __name__ == '__main__':
nerstats()
<commit_msg>Update script to store the basename instead of the complete path<commit_after>#!/usr/bin/env python
import click
import os
import codecs
import json
import pandas as pd
@click.command()
@click.argument('input_files', nargs=-1, type=click.Path(exists=True))
@click.argument('output_file', nargs=1, type=click.Path())
def nerstats(input_files, output_file):
output_dir = os.path.dirname(output_file)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
frames = []
for fi in input_files:
with codecs.open(fi, encoding='utf-8') as f:
saf = json.load(f)
data = {}
data['word'] = [t['word'] for t in saf['tokens'] if 'ne' in t.keys()]
data['ner'] = [t['ne'] for t in saf['tokens'] if 'ne' in t.keys()]
data['w_id'] = [t['id'] for t in saf['tokens'] if 'ne' in t.keys()]
data['text'] = [os.path.basename(fi)
for t in saf['tokens'] if 'ne' in t.keys()]
frames.append(pd.DataFrame(data=data))
df = pd.concat(frames, ignore_index=True)
df.to_csv(output_file)
if __name__ == '__main__':
nerstats()
|
00fd5643e94cbe5543a22e804c050e979776ac6b
|
opps/flatpages/views.py
|
opps/flatpages/views.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.contrib.sites.models import get_current_site
from django import template
from django.utils import timezone
from .models import FlatPage
class PageDetail(DetailView):
model = FlatPage
context_object_name = "context"
type = 'pages'
@property
def template_name(self):
domain_folder = self.type
if self.site.id > 1:
domain_folder = "{0}/{1}".format(self.site, self.type)
try:
_template = '{0}/{1}.html'.format(
domain_folder, self.page.get().slug)
template.loader.get_template(_template)
except template.TemplateDoesNotExist:
_template = '{0}.html'.format(domain_folder)
return _template
@property
def queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.page = self.model.objects.filter(
site=self.site,
slug=self.slug,
date_available__lte=timezone.now(),
published=True)
return self.page
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from .models import FlatPage
class PageDetail(DetailView):
model = FlatPage
context_object_name = "context"
type = 'flatpages'
@property
def template_name(self):
domain_folder = self.type
if self.site.id > 1:
domain_folder = "{0}/{1}".format(self.site, self.type)
_template = '{0}/{1}.html'.format(
domain_folder, self.page.get().slug)
return _template
@property
def queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.page = self.model.objects.filter(
site=self.site,
slug=self.slug,
date_available__lte=timezone.now(),
published=True)
return self.page
|
Fix template load on PageDetail flatpages app
|
Fix template load on PageDetail flatpages app
|
Python
|
mit
|
opps/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,opps/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,opps/opps,williamroot/opps,opps/opps,williamroot/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.contrib.sites.models import get_current_site
from django import template
from django.utils import timezone
from .models import FlatPage
class PageDetail(DetailView):
model = FlatPage
context_object_name = "context"
type = 'pages'
@property
def template_name(self):
domain_folder = self.type
if self.site.id > 1:
domain_folder = "{0}/{1}".format(self.site, self.type)
try:
_template = '{0}/{1}.html'.format(
domain_folder, self.page.get().slug)
template.loader.get_template(_template)
except template.TemplateDoesNotExist:
_template = '{0}.html'.format(domain_folder)
return _template
@property
def queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.page = self.model.objects.filter(
site=self.site,
slug=self.slug,
date_available__lte=timezone.now(),
published=True)
return self.page
Fix template load on PageDetail flatpages app
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from .models import FlatPage
class PageDetail(DetailView):
model = FlatPage
context_object_name = "context"
type = 'flatpages'
@property
def template_name(self):
domain_folder = self.type
if self.site.id > 1:
domain_folder = "{0}/{1}".format(self.site, self.type)
_template = '{0}/{1}.html'.format(
domain_folder, self.page.get().slug)
return _template
@property
def queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.page = self.model.objects.filter(
site=self.site,
slug=self.slug,
date_available__lte=timezone.now(),
published=True)
return self.page
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.contrib.sites.models import get_current_site
from django import template
from django.utils import timezone
from .models import FlatPage
class PageDetail(DetailView):
model = FlatPage
context_object_name = "context"
type = 'pages'
@property
def template_name(self):
domain_folder = self.type
if self.site.id > 1:
domain_folder = "{0}/{1}".format(self.site, self.type)
try:
_template = '{0}/{1}.html'.format(
domain_folder, self.page.get().slug)
template.loader.get_template(_template)
except template.TemplateDoesNotExist:
_template = '{0}.html'.format(domain_folder)
return _template
@property
def queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.page = self.model.objects.filter(
site=self.site,
slug=self.slug,
date_available__lte=timezone.now(),
published=True)
return self.page
<commit_msg>Fix template load on PageDetail flatpages app<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from .models import FlatPage
class PageDetail(DetailView):
model = FlatPage
context_object_name = "context"
type = 'flatpages'
@property
def template_name(self):
domain_folder = self.type
if self.site.id > 1:
domain_folder = "{0}/{1}".format(self.site, self.type)
_template = '{0}/{1}.html'.format(
domain_folder, self.page.get().slug)
return _template
@property
def queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.page = self.model.objects.filter(
site=self.site,
slug=self.slug,
date_available__lte=timezone.now(),
published=True)
return self.page
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.contrib.sites.models import get_current_site
from django import template
from django.utils import timezone
from .models import FlatPage
class PageDetail(DetailView):
model = FlatPage
context_object_name = "context"
type = 'pages'
@property
def template_name(self):
domain_folder = self.type
if self.site.id > 1:
domain_folder = "{0}/{1}".format(self.site, self.type)
try:
_template = '{0}/{1}.html'.format(
domain_folder, self.page.get().slug)
template.loader.get_template(_template)
except template.TemplateDoesNotExist:
_template = '{0}.html'.format(domain_folder)
return _template
@property
def queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.page = self.model.objects.filter(
site=self.site,
slug=self.slug,
date_available__lte=timezone.now(),
published=True)
return self.page
Fix template load on PageDetail flatpages app#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from .models import FlatPage
class PageDetail(DetailView):
model = FlatPage
context_object_name = "context"
type = 'flatpages'
@property
def template_name(self):
domain_folder = self.type
if self.site.id > 1:
domain_folder = "{0}/{1}".format(self.site, self.type)
_template = '{0}/{1}.html'.format(
domain_folder, self.page.get().slug)
return _template
@property
def queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.page = self.model.objects.filter(
site=self.site,
slug=self.slug,
date_available__lte=timezone.now(),
published=True)
return self.page
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.contrib.sites.models import get_current_site
from django import template
from django.utils import timezone
from .models import FlatPage
class PageDetail(DetailView):
model = FlatPage
context_object_name = "context"
type = 'pages'
@property
def template_name(self):
domain_folder = self.type
if self.site.id > 1:
domain_folder = "{0}/{1}".format(self.site, self.type)
try:
_template = '{0}/{1}.html'.format(
domain_folder, self.page.get().slug)
template.loader.get_template(_template)
except template.TemplateDoesNotExist:
_template = '{0}.html'.format(domain_folder)
return _template
@property
def queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.page = self.model.objects.filter(
site=self.site,
slug=self.slug,
date_available__lte=timezone.now(),
published=True)
return self.page
<commit_msg>Fix template load on PageDetail flatpages app<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from .models import FlatPage
class PageDetail(DetailView):
model = FlatPage
context_object_name = "context"
type = 'flatpages'
@property
def template_name(self):
domain_folder = self.type
if self.site.id > 1:
domain_folder = "{0}/{1}".format(self.site, self.type)
_template = '{0}/{1}.html'.format(
domain_folder, self.page.get().slug)
return _template
@property
def queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.page = self.model.objects.filter(
site=self.site,
slug=self.slug,
date_available__lte=timezone.now(),
published=True)
return self.page
|
61e4af18ddef3723b49bc4e6e7a8ff00e8a755af
|
organise/views/todos.py
|
organise/views/todos.py
|
from flask import Blueprint, render_template
from organise.models import Todo
from organise import db
from flask import render_template, request, redirect, url_for, flash
todos = Blueprint('todos', __name__, template_folder='/../templates')
@todos.route('/')
def index():
all_todos = Todo.query.order_by(Todo.id.desc()).all()
return render_template('todos.html', all_todos=all_todos)
@todos.route('/add', methods=['POST'])
def add_todo():
todo = Todo(request.form['title'], request.form['description'])
db.session.add(todo)
db.session.commit()
flash('New todo was added!')
return redirect(url_for('todos.index'))
@todos.route('/todo/<int:t_id>')
def show_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
return render_template('todo.html', todo=todo)
@todos.route('/todo/<int:t_id>/edit')
def edit_todo(t_id):
pass
@todos.route('/todo/<int:t_id>/delete', methods=['POST'])
def delete_todo(t_id):
pass
|
from flask import Blueprint, render_template
from organise.models import Todo
from organise import db
from flask import render_template, request, redirect, url_for, flash
todos = Blueprint('todos', __name__, template_folder='/../templates')
@todos.route('/')
def index():
all_todos = Todo.query.order_by(Todo.id.desc()).all()
return render_template('todos.html', all_todos=all_todos)
@todos.route('/add', methods=['POST'])
def add_todo():
todo = Todo(request.form['title'], request.form['description'])
db.session.add(todo)
db.session.commit()
flash('New todo was added')
return redirect(url_for('todos.index'))
@todos.route('/todo/<int:t_id>')
def show_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
return render_template('todo.html', todo=todo)
@todos.route('/todo/<int:t_id>/edit', methods=['POST'])
def edit_todo(t_id):
changed_title = request.form['title']
changed_description = request.form['description']
todo = Todo.query.filter_by(id=t_id).first()
todo.title = changed_title
todo.description = changed_description
db.session.commit()
flash('All changes were saved')
return redirect(url_for('todos.show_todo', t_id=t_id))
@todos.route('/todo/<int:t_id>/delete', methods=['POST'])
def delete_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
db.session.delete(todo)
db.session.commit()
return redirect(url_for('todos.index'))
|
Add functionality for edit and delete functions
|
Add functionality for edit and delete functions
|
Python
|
mit
|
msanatan/organise,msanatan/organise
|
from flask import Blueprint, render_template
from organise.models import Todo
from organise import db
from flask import render_template, request, redirect, url_for, flash
todos = Blueprint('todos', __name__, template_folder='/../templates')
@todos.route('/')
def index():
all_todos = Todo.query.order_by(Todo.id.desc()).all()
return render_template('todos.html', all_todos=all_todos)
@todos.route('/add', methods=['POST'])
def add_todo():
todo = Todo(request.form['title'], request.form['description'])
db.session.add(todo)
db.session.commit()
flash('New todo was added!')
return redirect(url_for('todos.index'))
@todos.route('/todo/<int:t_id>')
def show_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
return render_template('todo.html', todo=todo)
@todos.route('/todo/<int:t_id>/edit')
def edit_todo(t_id):
pass
@todos.route('/todo/<int:t_id>/delete', methods=['POST'])
def delete_todo(t_id):
pass
Add functionality for edit and delete functions
|
from flask import Blueprint, render_template
from organise.models import Todo
from organise import db
from flask import render_template, request, redirect, url_for, flash
todos = Blueprint('todos', __name__, template_folder='/../templates')
@todos.route('/')
def index():
all_todos = Todo.query.order_by(Todo.id.desc()).all()
return render_template('todos.html', all_todos=all_todos)
@todos.route('/add', methods=['POST'])
def add_todo():
todo = Todo(request.form['title'], request.form['description'])
db.session.add(todo)
db.session.commit()
flash('New todo was added')
return redirect(url_for('todos.index'))
@todos.route('/todo/<int:t_id>')
def show_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
return render_template('todo.html', todo=todo)
@todos.route('/todo/<int:t_id>/edit', methods=['POST'])
def edit_todo(t_id):
changed_title = request.form['title']
changed_description = request.form['description']
todo = Todo.query.filter_by(id=t_id).first()
todo.title = changed_title
todo.description = changed_description
db.session.commit()
flash('All changes were saved')
return redirect(url_for('todos.show_todo', t_id=t_id))
@todos.route('/todo/<int:t_id>/delete', methods=['POST'])
def delete_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
db.session.delete(todo)
db.session.commit()
return redirect(url_for('todos.index'))
|
<commit_before>from flask import Blueprint, render_template
from organise.models import Todo
from organise import db
from flask import render_template, request, redirect, url_for, flash
todos = Blueprint('todos', __name__, template_folder='/../templates')
@todos.route('/')
def index():
all_todos = Todo.query.order_by(Todo.id.desc()).all()
return render_template('todos.html', all_todos=all_todos)
@todos.route('/add', methods=['POST'])
def add_todo():
todo = Todo(request.form['title'], request.form['description'])
db.session.add(todo)
db.session.commit()
flash('New todo was added!')
return redirect(url_for('todos.index'))
@todos.route('/todo/<int:t_id>')
def show_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
return render_template('todo.html', todo=todo)
@todos.route('/todo/<int:t_id>/edit')
def edit_todo(t_id):
pass
@todos.route('/todo/<int:t_id>/delete', methods=['POST'])
def delete_todo(t_id):
pass
<commit_msg>Add functionality for edit and delete functions<commit_after>
|
from flask import Blueprint, render_template
from organise.models import Todo
from organise import db
from flask import render_template, request, redirect, url_for, flash
todos = Blueprint('todos', __name__, template_folder='/../templates')
@todos.route('/')
def index():
all_todos = Todo.query.order_by(Todo.id.desc()).all()
return render_template('todos.html', all_todos=all_todos)
@todos.route('/add', methods=['POST'])
def add_todo():
todo = Todo(request.form['title'], request.form['description'])
db.session.add(todo)
db.session.commit()
flash('New todo was added')
return redirect(url_for('todos.index'))
@todos.route('/todo/<int:t_id>')
def show_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
return render_template('todo.html', todo=todo)
@todos.route('/todo/<int:t_id>/edit', methods=['POST'])
def edit_todo(t_id):
changed_title = request.form['title']
changed_description = request.form['description']
todo = Todo.query.filter_by(id=t_id).first()
todo.title = changed_title
todo.description = changed_description
db.session.commit()
flash('All changes were saved')
return redirect(url_for('todos.show_todo', t_id=t_id))
@todos.route('/todo/<int:t_id>/delete', methods=['POST'])
def delete_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
db.session.delete(todo)
db.session.commit()
return redirect(url_for('todos.index'))
|
from flask import Blueprint, render_template
from organise.models import Todo
from organise import db
from flask import render_template, request, redirect, url_for, flash
todos = Blueprint('todos', __name__, template_folder='/../templates')
@todos.route('/')
def index():
all_todos = Todo.query.order_by(Todo.id.desc()).all()
return render_template('todos.html', all_todos=all_todos)
@todos.route('/add', methods=['POST'])
def add_todo():
todo = Todo(request.form['title'], request.form['description'])
db.session.add(todo)
db.session.commit()
flash('New todo was added!')
return redirect(url_for('todos.index'))
@todos.route('/todo/<int:t_id>')
def show_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
return render_template('todo.html', todo=todo)
@todos.route('/todo/<int:t_id>/edit')
def edit_todo(t_id):
pass
@todos.route('/todo/<int:t_id>/delete', methods=['POST'])
def delete_todo(t_id):
pass
Add functionality for edit and delete functionsfrom flask import Blueprint, render_template
from organise.models import Todo
from organise import db
from flask import render_template, request, redirect, url_for, flash
todos = Blueprint('todos', __name__, template_folder='/../templates')
@todos.route('/')
def index():
all_todos = Todo.query.order_by(Todo.id.desc()).all()
return render_template('todos.html', all_todos=all_todos)
@todos.route('/add', methods=['POST'])
def add_todo():
todo = Todo(request.form['title'], request.form['description'])
db.session.add(todo)
db.session.commit()
flash('New todo was added')
return redirect(url_for('todos.index'))
@todos.route('/todo/<int:t_id>')
def show_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
return render_template('todo.html', todo=todo)
@todos.route('/todo/<int:t_id>/edit', methods=['POST'])
def edit_todo(t_id):
changed_title = request.form['title']
changed_description = request.form['description']
todo = Todo.query.filter_by(id=t_id).first()
todo.title = changed_title
todo.description = changed_description
db.session.commit()
flash('All changes were saved')
return redirect(url_for('todos.show_todo', t_id=t_id))
@todos.route('/todo/<int:t_id>/delete', methods=['POST'])
def delete_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
db.session.delete(todo)
db.session.commit()
return redirect(url_for('todos.index'))
|
<commit_before>from flask import Blueprint, render_template
from organise.models import Todo
from organise import db
from flask import render_template, request, redirect, url_for, flash
todos = Blueprint('todos', __name__, template_folder='/../templates')
@todos.route('/')
def index():
all_todos = Todo.query.order_by(Todo.id.desc()).all()
return render_template('todos.html', all_todos=all_todos)
@todos.route('/add', methods=['POST'])
def add_todo():
todo = Todo(request.form['title'], request.form['description'])
db.session.add(todo)
db.session.commit()
flash('New todo was added!')
return redirect(url_for('todos.index'))
@todos.route('/todo/<int:t_id>')
def show_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
return render_template('todo.html', todo=todo)
@todos.route('/todo/<int:t_id>/edit')
def edit_todo(t_id):
pass
@todos.route('/todo/<int:t_id>/delete', methods=['POST'])
def delete_todo(t_id):
pass
<commit_msg>Add functionality for edit and delete functions<commit_after>from flask import Blueprint, render_template
from organise.models import Todo
from organise import db
from flask import render_template, request, redirect, url_for, flash
todos = Blueprint('todos', __name__, template_folder='/../templates')
@todos.route('/')
def index():
all_todos = Todo.query.order_by(Todo.id.desc()).all()
return render_template('todos.html', all_todos=all_todos)
@todos.route('/add', methods=['POST'])
def add_todo():
todo = Todo(request.form['title'], request.form['description'])
db.session.add(todo)
db.session.commit()
flash('New todo was added')
return redirect(url_for('todos.index'))
@todos.route('/todo/<int:t_id>')
def show_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
return render_template('todo.html', todo=todo)
@todos.route('/todo/<int:t_id>/edit', methods=['POST'])
def edit_todo(t_id):
changed_title = request.form['title']
changed_description = request.form['description']
todo = Todo.query.filter_by(id=t_id).first()
todo.title = changed_title
todo.description = changed_description
db.session.commit()
flash('All changes were saved')
return redirect(url_for('todos.show_todo', t_id=t_id))
@todos.route('/todo/<int:t_id>/delete', methods=['POST'])
def delete_todo(t_id):
todo = Todo.query.filter_by(id=t_id).first()
db.session.delete(todo)
db.session.commit()
return redirect(url_for('todos.index'))
|
9bd09a225a1899d8cd4f8565986f23a8c3b44131
|
api/migrations/0001_create_application.py
|
api/migrations/0001_create_application.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris="http://localhost/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
|
Include webpack-dev-server URL as a valid redirect
|
Include webpack-dev-server URL as a valid redirect
|
Python
|
bsd-3-clause
|
hotosm/osm-export-tool2,hotosm/osm-export-tool2,hotosm/osm-export-tool2,hotosm/osm-export-tool2
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris="http://localhost/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
Include webpack-dev-server URL as a valid redirect
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris="http://localhost/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
<commit_msg>Include webpack-dev-server URL as a valid redirect<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris="http://localhost/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
Include webpack-dev-server URL as a valid redirect# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris="http://localhost/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
<commit_msg>Include webpack-dev-server URL as a valid redirect<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
|
a6a646dec44b2eb613cac9c143cf6c7770f738e8
|
tests/test_metadata.py
|
tests/test_metadata.py
|
"""
Tests for BSE metadata
"""
import os
import hashlib
import bse
from bse import curate
data_dir = bse.default_data_dir
def test_get_metadata():
bse.get_metadata()
def test_metadata_uptodate():
old_metadata = os.path.join(data_dir, 'METADATA.json')
new_metadata = os.path.join(data_dir, 'METADATA.json.new')
curate.create_metadata_file(new_metadata, data_dir)
with open(old_metadata, 'rb') as f:
old_hash = hashlib.sha1(f.read()).hexdigest()
with open(new_metadata, 'rb') as f:
new_hash = hashlib.sha1(f.read()).hexdigest()
os.remove(new_metadata)
if old_hash != new_hash:
print("Old hash: ", old_hash)
print("New hash: ", new_hash)
raise RuntimeError("Metadata does not appear to be up to date")
|
"""
Tests for BSE metadata
"""
import os
import bse
import json
from bse import curate
data_dir = bse.default_data_dir
def test_get_metadata():
bse.get_metadata()
def test_metadata_uptodate():
old_metadata = os.path.join(data_dir, 'METADATA.json')
new_metadata = os.path.join(data_dir, 'METADATA.json.new')
curate.create_metadata_file(new_metadata, data_dir)
with open(old_metadata, 'r') as f:
old_data = json.load(f)
with open(new_metadata, 'r') as f:
new_data = json.load(f)
os.remove(new_metadata)
if old_data != new_data:
raise RuntimeError("Metadata does not appear to be up to date")
|
Fix testing of metadata - hashing is too strict
|
Fix testing of metadata - hashing is too strict
|
Python
|
bsd-3-clause
|
MOLSSI-BSE/basis_set_exchange
|
"""
Tests for BSE metadata
"""
import os
import hashlib
import bse
from bse import curate
data_dir = bse.default_data_dir
def test_get_metadata():
bse.get_metadata()
def test_metadata_uptodate():
old_metadata = os.path.join(data_dir, 'METADATA.json')
new_metadata = os.path.join(data_dir, 'METADATA.json.new')
curate.create_metadata_file(new_metadata, data_dir)
with open(old_metadata, 'rb') as f:
old_hash = hashlib.sha1(f.read()).hexdigest()
with open(new_metadata, 'rb') as f:
new_hash = hashlib.sha1(f.read()).hexdigest()
os.remove(new_metadata)
if old_hash != new_hash:
print("Old hash: ", old_hash)
print("New hash: ", new_hash)
raise RuntimeError("Metadata does not appear to be up to date")
Fix testing of metadata - hashing is too strict
|
"""
Tests for BSE metadata
"""
import os
import bse
import json
from bse import curate
data_dir = bse.default_data_dir
def test_get_metadata():
bse.get_metadata()
def test_metadata_uptodate():
old_metadata = os.path.join(data_dir, 'METADATA.json')
new_metadata = os.path.join(data_dir, 'METADATA.json.new')
curate.create_metadata_file(new_metadata, data_dir)
with open(old_metadata, 'r') as f:
old_data = json.load(f)
with open(new_metadata, 'r') as f:
new_data = json.load(f)
os.remove(new_metadata)
if old_data != new_data:
raise RuntimeError("Metadata does not appear to be up to date")
|
<commit_before>"""
Tests for BSE metadata
"""
import os
import hashlib
import bse
from bse import curate
data_dir = bse.default_data_dir
def test_get_metadata():
bse.get_metadata()
def test_metadata_uptodate():
old_metadata = os.path.join(data_dir, 'METADATA.json')
new_metadata = os.path.join(data_dir, 'METADATA.json.new')
curate.create_metadata_file(new_metadata, data_dir)
with open(old_metadata, 'rb') as f:
old_hash = hashlib.sha1(f.read()).hexdigest()
with open(new_metadata, 'rb') as f:
new_hash = hashlib.sha1(f.read()).hexdigest()
os.remove(new_metadata)
if old_hash != new_hash:
print("Old hash: ", old_hash)
print("New hash: ", new_hash)
raise RuntimeError("Metadata does not appear to be up to date")
<commit_msg>Fix testing of metadata - hashing is too strict<commit_after>
|
"""
Tests for BSE metadata
"""
import os
import bse
import json
from bse import curate
data_dir = bse.default_data_dir
def test_get_metadata():
bse.get_metadata()
def test_metadata_uptodate():
old_metadata = os.path.join(data_dir, 'METADATA.json')
new_metadata = os.path.join(data_dir, 'METADATA.json.new')
curate.create_metadata_file(new_metadata, data_dir)
with open(old_metadata, 'r') as f:
old_data = json.load(f)
with open(new_metadata, 'r') as f:
new_data = json.load(f)
os.remove(new_metadata)
if old_data != new_data:
raise RuntimeError("Metadata does not appear to be up to date")
|
"""
Tests for BSE metadata
"""
import os
import hashlib
import bse
from bse import curate
data_dir = bse.default_data_dir
def test_get_metadata():
bse.get_metadata()
def test_metadata_uptodate():
old_metadata = os.path.join(data_dir, 'METADATA.json')
new_metadata = os.path.join(data_dir, 'METADATA.json.new')
curate.create_metadata_file(new_metadata, data_dir)
with open(old_metadata, 'rb') as f:
old_hash = hashlib.sha1(f.read()).hexdigest()
with open(new_metadata, 'rb') as f:
new_hash = hashlib.sha1(f.read()).hexdigest()
os.remove(new_metadata)
if old_hash != new_hash:
print("Old hash: ", old_hash)
print("New hash: ", new_hash)
raise RuntimeError("Metadata does not appear to be up to date")
Fix testing of metadata - hashing is too strict"""
Tests for BSE metadata
"""
import os
import bse
import json
from bse import curate
data_dir = bse.default_data_dir
def test_get_metadata():
bse.get_metadata()
def test_metadata_uptodate():
old_metadata = os.path.join(data_dir, 'METADATA.json')
new_metadata = os.path.join(data_dir, 'METADATA.json.new')
curate.create_metadata_file(new_metadata, data_dir)
with open(old_metadata, 'r') as f:
old_data = json.load(f)
with open(new_metadata, 'r') as f:
new_data = json.load(f)
os.remove(new_metadata)
if old_data != new_data:
raise RuntimeError("Metadata does not appear to be up to date")
|
<commit_before>"""
Tests for BSE metadata
"""
import os
import hashlib
import bse
from bse import curate
data_dir = bse.default_data_dir
def test_get_metadata():
bse.get_metadata()
def test_metadata_uptodate():
old_metadata = os.path.join(data_dir, 'METADATA.json')
new_metadata = os.path.join(data_dir, 'METADATA.json.new')
curate.create_metadata_file(new_metadata, data_dir)
with open(old_metadata, 'rb') as f:
old_hash = hashlib.sha1(f.read()).hexdigest()
with open(new_metadata, 'rb') as f:
new_hash = hashlib.sha1(f.read()).hexdigest()
os.remove(new_metadata)
if old_hash != new_hash:
print("Old hash: ", old_hash)
print("New hash: ", new_hash)
raise RuntimeError("Metadata does not appear to be up to date")
<commit_msg>Fix testing of metadata - hashing is too strict<commit_after>"""
Tests for BSE metadata
"""
import os
import bse
import json
from bse import curate
data_dir = bse.default_data_dir
def test_get_metadata():
bse.get_metadata()
def test_metadata_uptodate():
old_metadata = os.path.join(data_dir, 'METADATA.json')
new_metadata = os.path.join(data_dir, 'METADATA.json.new')
curate.create_metadata_file(new_metadata, data_dir)
with open(old_metadata, 'r') as f:
old_data = json.load(f)
with open(new_metadata, 'r') as f:
new_data = json.load(f)
os.remove(new_metadata)
if old_data != new_data:
raise RuntimeError("Metadata does not appear to be up to date")
|
9aa48fa2a3a693c7cd5a74712b9a63ac15f32a94
|
tests/test_settings.py
|
tests/test_settings.py
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = 'fake-key'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'csv_export',
'tests',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
]
USE_TZ = True
ROOT_URLCONF = 'tests.urls'
|
import django
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = 'fake-key'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'csv_export',
'tests',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
]
if django.VERSION < (1, 10):
MIDDLEWARE_CLASSES = MIDDLEWARE
USE_TZ = True
ROOT_URLCONF = 'tests.urls'
|
Fix tests on Django 1.8.
|
Fix tests on Django 1.8.
|
Python
|
bsd-3-clause
|
benkonrath/django-csv-export-view
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = 'fake-key'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'csv_export',
'tests',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
]
USE_TZ = True
ROOT_URLCONF = 'tests.urls'
Fix tests on Django 1.8.
|
import django
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = 'fake-key'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'csv_export',
'tests',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
]
if django.VERSION < (1, 10):
MIDDLEWARE_CLASSES = MIDDLEWARE
USE_TZ = True
ROOT_URLCONF = 'tests.urls'
|
<commit_before>DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = 'fake-key'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'csv_export',
'tests',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
]
USE_TZ = True
ROOT_URLCONF = 'tests.urls'
<commit_msg>Fix tests on Django 1.8.<commit_after>
|
import django
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = 'fake-key'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'csv_export',
'tests',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
]
if django.VERSION < (1, 10):
MIDDLEWARE_CLASSES = MIDDLEWARE
USE_TZ = True
ROOT_URLCONF = 'tests.urls'
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = 'fake-key'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'csv_export',
'tests',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
]
USE_TZ = True
ROOT_URLCONF = 'tests.urls'
Fix tests on Django 1.8.import django
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = 'fake-key'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'csv_export',
'tests',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
]
if django.VERSION < (1, 10):
MIDDLEWARE_CLASSES = MIDDLEWARE
USE_TZ = True
ROOT_URLCONF = 'tests.urls'
|
<commit_before>DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = 'fake-key'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'csv_export',
'tests',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
]
USE_TZ = True
ROOT_URLCONF = 'tests.urls'
<commit_msg>Fix tests on Django 1.8.<commit_after>import django
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = 'fake-key'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'csv_export',
'tests',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
]
if django.VERSION < (1, 10):
MIDDLEWARE_CLASSES = MIDDLEWARE
USE_TZ = True
ROOT_URLCONF = 'tests.urls'
|
b0c3ef9a162109aa654de28d15f47d103ddbbf58
|
fireplace/cards/brawl/gift_exchange.py
|
fireplace/cards/brawl/gift_exchange.py
|
"""
Gift Exchange
"""
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
COST >= 5,
CLASS_CARD=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
|
"""
Gift Exchange
"""
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
# COST >= 5,
card_class=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
|
Drop cost filtering from TB_GiftExchange_Treasure_Spell
|
Drop cost filtering from TB_GiftExchange_Treasure_Spell
It doesn't work, and makes things harder than they need to be.
|
Python
|
agpl-3.0
|
Ragowit/fireplace,beheh/fireplace,smallnamespace/fireplace,NightKev/fireplace,smallnamespace/fireplace,Ragowit/fireplace,jleclanche/fireplace
|
"""
Gift Exchange
"""
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
COST >= 5,
CLASS_CARD=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
Drop cost filtering from TB_GiftExchange_Treasure_Spell
It doesn't work, and makes things harder than they need to be.
|
"""
Gift Exchange
"""
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
# COST >= 5,
card_class=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
|
<commit_before>"""
Gift Exchange
"""
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
COST >= 5,
CLASS_CARD=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
<commit_msg>Drop cost filtering from TB_GiftExchange_Treasure_Spell
It doesn't work, and makes things harder than they need to be.<commit_after>
|
"""
Gift Exchange
"""
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
# COST >= 5,
card_class=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
|
"""
Gift Exchange
"""
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
COST >= 5,
CLASS_CARD=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
Drop cost filtering from TB_GiftExchange_Treasure_Spell
It doesn't work, and makes things harder than they need to be."""
Gift Exchange
"""
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
# COST >= 5,
card_class=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
|
<commit_before>"""
Gift Exchange
"""
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
COST >= 5,
CLASS_CARD=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
<commit_msg>Drop cost filtering from TB_GiftExchange_Treasure_Spell
It doesn't work, and makes things harder than they need to be.<commit_after>"""
Gift Exchange
"""
from ..utils import *
# Hardpacked Snowballs
class TB_GiftExchange_Snowball:
play = Bounce(RANDOM_ENEMY_MINION) * 3
# Winter's Veil Gift
class TB_GiftExchange_Treasure:
deathrattle = Give(CURRENT_PLAYER, "TB_GiftExchange_Treasure_Spell")
# Stolen Winter's Veil Gift
class TB_GiftExchange_Treasure_Spell:
# Surely none of this even sort of works.
RandomGift = RandomCollectible(
# COST >= 5,
card_class=Attr(Controller(OWNER), GameTag.CLASS)
)
play = Discover(RandomGift).then(Buff(Discover.CARDS, "TB_GiftExchange_Enchantment"))
# Cheap Gift
TB_GiftExchange_Enchantment = buff(cost=-5)
|
b75e19bcc20f4b35a1712633fe941fdab1fd1029
|
test_classy/test_route_base.py
|
test_classy/test_route_base.py
|
from flask import Flask
from .view_classes import BasicView, RouteBaseView
from nose.tools import *
app = Flask('route_base')
BasicView.register(app, route_base="/rb_test/")
BasicView.register(app)
RouteBaseView.register(app, route_base="/rb_test2/")
RouteBaseView.register(app)
def test_registered_route_base():
client = app.test_client()
resp = client.get('/rb_test/')
eq_(b"Index", resp.data)
def test_no_route_base_after_register_route_base():
client = app.test_client()
resp = client.get('/basic/')
eq_(b"Index", resp.data)
def test_route_base_override():
client = app.test_client()
resp = client.get('/rb_test2/')
eq_(b"Index", resp.data)
def test_route_base_after_route_base_override():
client = app.test_client()
resp = client.get('/base-routed/')
eq_(b"Index", resp.data)
|
from flask import Flask
from .view_classes import BasicView, RouteBaseView
from nose.tools import *
app = Flask('route_base')
RouteBaseView.register(app, route_base="/rb_test2/")
def test_route_base_override():
client = app.test_client()
resp = client.get('/rb_test2/')
eq_(b"Index", resp.data)
|
Remove some route_base tests that are no longer valid with Flask 0.10
|
Remove some route_base tests that are no longer valid with Flask 0.10
|
Python
|
bsd-3-clause
|
ei-grad/muffin-classy,apiguy/flask-classy,mapleoin/flask-classy,apiguy/flask-classy,apiguy/flask-classy,ei-grad/muffin-classy,teracyhq/flask-classy,hoatle/flask-classy,stas/flask-classy,teracyhq/flask-classy
|
from flask import Flask
from .view_classes import BasicView, RouteBaseView
from nose.tools import *
app = Flask('route_base')
BasicView.register(app, route_base="/rb_test/")
BasicView.register(app)
RouteBaseView.register(app, route_base="/rb_test2/")
RouteBaseView.register(app)
def test_registered_route_base():
client = app.test_client()
resp = client.get('/rb_test/')
eq_(b"Index", resp.data)
def test_no_route_base_after_register_route_base():
client = app.test_client()
resp = client.get('/basic/')
eq_(b"Index", resp.data)
def test_route_base_override():
client = app.test_client()
resp = client.get('/rb_test2/')
eq_(b"Index", resp.data)
def test_route_base_after_route_base_override():
client = app.test_client()
resp = client.get('/base-routed/')
eq_(b"Index", resp.data)Remove some route_base tests that are no longer valid with Flask 0.10
|
from flask import Flask
from .view_classes import BasicView, RouteBaseView
from nose.tools import *
app = Flask('route_base')
RouteBaseView.register(app, route_base="/rb_test2/")
def test_route_base_override():
client = app.test_client()
resp = client.get('/rb_test2/')
eq_(b"Index", resp.data)
|
<commit_before>from flask import Flask
from .view_classes import BasicView, RouteBaseView
from nose.tools import *
app = Flask('route_base')
BasicView.register(app, route_base="/rb_test/")
BasicView.register(app)
RouteBaseView.register(app, route_base="/rb_test2/")
RouteBaseView.register(app)
def test_registered_route_base():
client = app.test_client()
resp = client.get('/rb_test/')
eq_(b"Index", resp.data)
def test_no_route_base_after_register_route_base():
client = app.test_client()
resp = client.get('/basic/')
eq_(b"Index", resp.data)
def test_route_base_override():
client = app.test_client()
resp = client.get('/rb_test2/')
eq_(b"Index", resp.data)
def test_route_base_after_route_base_override():
client = app.test_client()
resp = client.get('/base-routed/')
eq_(b"Index", resp.data)<commit_msg>Remove some route_base tests that are no longer valid with Flask 0.10<commit_after>
|
from flask import Flask
from .view_classes import BasicView, RouteBaseView
from nose.tools import *
app = Flask('route_base')
RouteBaseView.register(app, route_base="/rb_test2/")
def test_route_base_override():
client = app.test_client()
resp = client.get('/rb_test2/')
eq_(b"Index", resp.data)
|
from flask import Flask
from .view_classes import BasicView, RouteBaseView
from nose.tools import *
app = Flask('route_base')
BasicView.register(app, route_base="/rb_test/")
BasicView.register(app)
RouteBaseView.register(app, route_base="/rb_test2/")
RouteBaseView.register(app)
def test_registered_route_base():
client = app.test_client()
resp = client.get('/rb_test/')
eq_(b"Index", resp.data)
def test_no_route_base_after_register_route_base():
client = app.test_client()
resp = client.get('/basic/')
eq_(b"Index", resp.data)
def test_route_base_override():
client = app.test_client()
resp = client.get('/rb_test2/')
eq_(b"Index", resp.data)
def test_route_base_after_route_base_override():
client = app.test_client()
resp = client.get('/base-routed/')
eq_(b"Index", resp.data)Remove some route_base tests that are no longer valid with Flask 0.10from flask import Flask
from .view_classes import BasicView, RouteBaseView
from nose.tools import *
app = Flask('route_base')
RouteBaseView.register(app, route_base="/rb_test2/")
def test_route_base_override():
client = app.test_client()
resp = client.get('/rb_test2/')
eq_(b"Index", resp.data)
|
<commit_before>from flask import Flask
from .view_classes import BasicView, RouteBaseView
from nose.tools import *
app = Flask('route_base')
BasicView.register(app, route_base="/rb_test/")
BasicView.register(app)
RouteBaseView.register(app, route_base="/rb_test2/")
RouteBaseView.register(app)
def test_registered_route_base():
client = app.test_client()
resp = client.get('/rb_test/')
eq_(b"Index", resp.data)
def test_no_route_base_after_register_route_base():
client = app.test_client()
resp = client.get('/basic/')
eq_(b"Index", resp.data)
def test_route_base_override():
client = app.test_client()
resp = client.get('/rb_test2/')
eq_(b"Index", resp.data)
def test_route_base_after_route_base_override():
client = app.test_client()
resp = client.get('/base-routed/')
eq_(b"Index", resp.data)<commit_msg>Remove some route_base tests that are no longer valid with Flask 0.10<commit_after>from flask import Flask
from .view_classes import BasicView, RouteBaseView
from nose.tools import *
app = Flask('route_base')
RouteBaseView.register(app, route_base="/rb_test2/")
def test_route_base_override():
client = app.test_client()
resp = client.get('/rb_test2/')
eq_(b"Index", resp.data)
|
81a0239812d01e9e876989d2334afe746e09f5da
|
chartflo/tests.py
|
chartflo/tests.py
|
from django.test import TestCase
# Create your tests here.
|
from django.test import TestCase
from .views import ChartsView
# Create your tests here.
class TestVegaLiteChartsView(TestCase):
def setUpTestCase(self):
self.chart_view = ChartsView()
# Set Vega Lite as template engine
self.chart_view.engine = "vegalite"
def test_vega_lite_template(self):
# URL for Vega Lite chart URL
vega_lite_template_url = "chartflo/vegalite/chart.html"
# Get chart view template URL
chart_view_template_url = self.chart_view._get_template_url()
# Make sure Chart View URL matches Vega Lite chart URL
self.assertEqual(chart_view_template_url, vega_lite_template_url)
|
Add Vega Lite template test
|
Add Vega Lite template test
|
Python
|
mit
|
synw/django-chartflo,synw/django-chartflo,synw/django-chartflo
|
from django.test import TestCase
# Create your tests here.
Add Vega Lite template test
|
from django.test import TestCase
from .views import ChartsView
# Create your tests here.
class TestVegaLiteChartsView(TestCase):
def setUpTestCase(self):
self.chart_view = ChartsView()
# Set Vega Lite as template engine
self.chart_view.engine = "vegalite"
def test_vega_lite_template(self):
# URL for Vega Lite chart URL
vega_lite_template_url = "chartflo/vegalite/chart.html"
# Get chart view template URL
chart_view_template_url = self.chart_view._get_template_url()
# Make sure Chart View URL matches Vega Lite chart URL
self.assertEqual(chart_view_template_url, vega_lite_template_url)
|
<commit_before>from django.test import TestCase
# Create your tests here.
<commit_msg>Add Vega Lite template test<commit_after>
|
from django.test import TestCase
from .views import ChartsView
# Create your tests here.
class TestVegaLiteChartsView(TestCase):
def setUpTestCase(self):
self.chart_view = ChartsView()
# Set Vega Lite as template engine
self.chart_view.engine = "vegalite"
def test_vega_lite_template(self):
# URL for Vega Lite chart URL
vega_lite_template_url = "chartflo/vegalite/chart.html"
# Get chart view template URL
chart_view_template_url = self.chart_view._get_template_url()
# Make sure Chart View URL matches Vega Lite chart URL
self.assertEqual(chart_view_template_url, vega_lite_template_url)
|
from django.test import TestCase
# Create your tests here.
Add Vega Lite template testfrom django.test import TestCase
from .views import ChartsView
# Create your tests here.
class TestVegaLiteChartsView(TestCase):
def setUpTestCase(self):
self.chart_view = ChartsView()
# Set Vega Lite as template engine
self.chart_view.engine = "vegalite"
def test_vega_lite_template(self):
# URL for Vega Lite chart URL
vega_lite_template_url = "chartflo/vegalite/chart.html"
# Get chart view template URL
chart_view_template_url = self.chart_view._get_template_url()
# Make sure Chart View URL matches Vega Lite chart URL
self.assertEqual(chart_view_template_url, vega_lite_template_url)
|
<commit_before>from django.test import TestCase
# Create your tests here.
<commit_msg>Add Vega Lite template test<commit_after>from django.test import TestCase
from .views import ChartsView
# Create your tests here.
class TestVegaLiteChartsView(TestCase):
def setUpTestCase(self):
self.chart_view = ChartsView()
# Set Vega Lite as template engine
self.chart_view.engine = "vegalite"
def test_vega_lite_template(self):
# URL for Vega Lite chart URL
vega_lite_template_url = "chartflo/vegalite/chart.html"
# Get chart view template URL
chart_view_template_url = self.chart_view._get_template_url()
# Make sure Chart View URL matches Vega Lite chart URL
self.assertEqual(chart_view_template_url, vega_lite_template_url)
|
c8418c27d1a0b7f204af6981948654ba5c17d050
|
parsl/tests/test_staging/test_implicit_staging_ftp.py
|
parsl/tests/test_staging/test_implicit_staging_ftp.py
|
import pytest
import parsl
from parsl.app.app import App
from parsl.data_provider.files import File
from parsl.tests.configs.local_threads import config
parsl.clear()
parsl.load(config)
@App('python')
def sort_strings(inputs=[], outputs=[]):
with open(inputs[0].filepath, 'r') as u:
strs = u.readlines()
strs.sort()
with open(outputs[0].filepath, 'w') as s:
for e in strs:
s.write(e)
@pytest.mark.local
def test_implicit_staging_ftp():
"""Test implicit staging for an ftp file
Create a remote input file (ftp) that points to file_test_cpt.txt.
"""
unsorted_file = File('ftp://ftp.uconn.edu/48_hour/file_test_cpt.txt')
# Create a local file for output data
sorted_file = File('sorted.txt')
f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file])
f.result()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action='store_true',
help="Count of apps to launch")
args = parser.parse_args()
if args.debug:
parsl.set_stream_logger()
test_implicit_staging_ftp()
|
import pytest
import parsl
from parsl.app.app import App
from parsl.data_provider.files import File
from parsl.tests.configs.local_threads import config
parsl.clear()
parsl.load(config)
@App('python')
def sort_strings(inputs=[], outputs=[]):
with open(inputs[0].filepath, 'r') as u:
strs = u.readlines()
strs.sort()
with open(outputs[0].filepath, 'w') as s:
for e in strs:
s.write(e)
@pytest.mark.local
def test_implicit_staging_ftp():
"""Test implicit staging for an ftp file
Create a remote input file (ftp) that points to file_test_cpt.txt.
"""
unsorted_file = File('ftp://ftp.cs.brown.edu/pub/info/README')
# Create a local file for output data
sorted_file = File('sorted.txt')
f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file])
f.result()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action='store_true',
help="Count of apps to launch")
args = parser.parse_args()
if args.debug:
parsl.set_stream_logger()
test_implicit_staging_ftp()
|
Change a URL to a ftp file in the ftp staging test.
|
Change a URL to a ftp file in the ftp staging test.
|
Python
|
apache-2.0
|
Parsl/parsl,Parsl/parsl,swift-lang/swift-e-lab,Parsl/parsl,Parsl/parsl,swift-lang/swift-e-lab
|
import pytest
import parsl
from parsl.app.app import App
from parsl.data_provider.files import File
from parsl.tests.configs.local_threads import config
parsl.clear()
parsl.load(config)
@App('python')
def sort_strings(inputs=[], outputs=[]):
with open(inputs[0].filepath, 'r') as u:
strs = u.readlines()
strs.sort()
with open(outputs[0].filepath, 'w') as s:
for e in strs:
s.write(e)
@pytest.mark.local
def test_implicit_staging_ftp():
"""Test implicit staging for an ftp file
Create a remote input file (ftp) that points to file_test_cpt.txt.
"""
unsorted_file = File('ftp://ftp.uconn.edu/48_hour/file_test_cpt.txt')
# Create a local file for output data
sorted_file = File('sorted.txt')
f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file])
f.result()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action='store_true',
help="Count of apps to launch")
args = parser.parse_args()
if args.debug:
parsl.set_stream_logger()
test_implicit_staging_ftp()
Change a URL to a ftp file in the ftp staging test.
|
import pytest
import parsl
from parsl.app.app import App
from parsl.data_provider.files import File
from parsl.tests.configs.local_threads import config
parsl.clear()
parsl.load(config)
@App('python')
def sort_strings(inputs=[], outputs=[]):
with open(inputs[0].filepath, 'r') as u:
strs = u.readlines()
strs.sort()
with open(outputs[0].filepath, 'w') as s:
for e in strs:
s.write(e)
@pytest.mark.local
def test_implicit_staging_ftp():
"""Test implicit staging for an ftp file
Create a remote input file (ftp) that points to file_test_cpt.txt.
"""
unsorted_file = File('ftp://ftp.cs.brown.edu/pub/info/README')
# Create a local file for output data
sorted_file = File('sorted.txt')
f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file])
f.result()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action='store_true',
help="Count of apps to launch")
args = parser.parse_args()
if args.debug:
parsl.set_stream_logger()
test_implicit_staging_ftp()
|
<commit_before>import pytest
import parsl
from parsl.app.app import App
from parsl.data_provider.files import File
from parsl.tests.configs.local_threads import config
parsl.clear()
parsl.load(config)
@App('python')
def sort_strings(inputs=[], outputs=[]):
with open(inputs[0].filepath, 'r') as u:
strs = u.readlines()
strs.sort()
with open(outputs[0].filepath, 'w') as s:
for e in strs:
s.write(e)
@pytest.mark.local
def test_implicit_staging_ftp():
"""Test implicit staging for an ftp file
Create a remote input file (ftp) that points to file_test_cpt.txt.
"""
unsorted_file = File('ftp://ftp.uconn.edu/48_hour/file_test_cpt.txt')
# Create a local file for output data
sorted_file = File('sorted.txt')
f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file])
f.result()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action='store_true',
help="Count of apps to launch")
args = parser.parse_args()
if args.debug:
parsl.set_stream_logger()
test_implicit_staging_ftp()
<commit_msg>Change a URL to a ftp file in the ftp staging test.<commit_after>
|
import pytest
import parsl
from parsl.app.app import App
from parsl.data_provider.files import File
from parsl.tests.configs.local_threads import config
parsl.clear()
parsl.load(config)
@App('python')
def sort_strings(inputs=[], outputs=[]):
with open(inputs[0].filepath, 'r') as u:
strs = u.readlines()
strs.sort()
with open(outputs[0].filepath, 'w') as s:
for e in strs:
s.write(e)
@pytest.mark.local
def test_implicit_staging_ftp():
"""Test implicit staging for an ftp file
Create a remote input file (ftp) that points to file_test_cpt.txt.
"""
unsorted_file = File('ftp://ftp.cs.brown.edu/pub/info/README')
# Create a local file for output data
sorted_file = File('sorted.txt')
f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file])
f.result()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action='store_true',
help="Count of apps to launch")
args = parser.parse_args()
if args.debug:
parsl.set_stream_logger()
test_implicit_staging_ftp()
|
import pytest
import parsl
from parsl.app.app import App
from parsl.data_provider.files import File
from parsl.tests.configs.local_threads import config
parsl.clear()
parsl.load(config)
@App('python')
def sort_strings(inputs=[], outputs=[]):
with open(inputs[0].filepath, 'r') as u:
strs = u.readlines()
strs.sort()
with open(outputs[0].filepath, 'w') as s:
for e in strs:
s.write(e)
@pytest.mark.local
def test_implicit_staging_ftp():
"""Test implicit staging for an ftp file
Create a remote input file (ftp) that points to file_test_cpt.txt.
"""
unsorted_file = File('ftp://ftp.uconn.edu/48_hour/file_test_cpt.txt')
# Create a local file for output data
sorted_file = File('sorted.txt')
f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file])
f.result()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action='store_true',
help="Count of apps to launch")
args = parser.parse_args()
if args.debug:
parsl.set_stream_logger()
test_implicit_staging_ftp()
Change a URL to a ftp file in the ftp staging test.import pytest
import parsl
from parsl.app.app import App
from parsl.data_provider.files import File
from parsl.tests.configs.local_threads import config
parsl.clear()
parsl.load(config)
@App('python')
def sort_strings(inputs=[], outputs=[]):
with open(inputs[0].filepath, 'r') as u:
strs = u.readlines()
strs.sort()
with open(outputs[0].filepath, 'w') as s:
for e in strs:
s.write(e)
@pytest.mark.local
def test_implicit_staging_ftp():
"""Test implicit staging for an ftp file
Create a remote input file (ftp) that points to file_test_cpt.txt.
"""
unsorted_file = File('ftp://ftp.cs.brown.edu/pub/info/README')
# Create a local file for output data
sorted_file = File('sorted.txt')
f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file])
f.result()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action='store_true',
help="Count of apps to launch")
args = parser.parse_args()
if args.debug:
parsl.set_stream_logger()
test_implicit_staging_ftp()
|
<commit_before>import pytest
import parsl
from parsl.app.app import App
from parsl.data_provider.files import File
from parsl.tests.configs.local_threads import config
parsl.clear()
parsl.load(config)
@App('python')
def sort_strings(inputs=[], outputs=[]):
with open(inputs[0].filepath, 'r') as u:
strs = u.readlines()
strs.sort()
with open(outputs[0].filepath, 'w') as s:
for e in strs:
s.write(e)
@pytest.mark.local
def test_implicit_staging_ftp():
"""Test implicit staging for an ftp file
Create a remote input file (ftp) that points to file_test_cpt.txt.
"""
unsorted_file = File('ftp://ftp.uconn.edu/48_hour/file_test_cpt.txt')
# Create a local file for output data
sorted_file = File('sorted.txt')
f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file])
f.result()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action='store_true',
help="Count of apps to launch")
args = parser.parse_args()
if args.debug:
parsl.set_stream_logger()
test_implicit_staging_ftp()
<commit_msg>Change a URL to a ftp file in the ftp staging test.<commit_after>import pytest
import parsl
from parsl.app.app import App
from parsl.data_provider.files import File
from parsl.tests.configs.local_threads import config
parsl.clear()
parsl.load(config)
@App('python')
def sort_strings(inputs=[], outputs=[]):
with open(inputs[0].filepath, 'r') as u:
strs = u.readlines()
strs.sort()
with open(outputs[0].filepath, 'w') as s:
for e in strs:
s.write(e)
@pytest.mark.local
def test_implicit_staging_ftp():
"""Test implicit staging for an ftp file
Create a remote input file (ftp) that points to file_test_cpt.txt.
"""
unsorted_file = File('ftp://ftp.cs.brown.edu/pub/info/README')
# Create a local file for output data
sorted_file = File('sorted.txt')
f = sort_strings(inputs=[unsorted_file], outputs=[sorted_file])
f.result()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action='store_true',
help="Count of apps to launch")
args = parser.parse_args()
if args.debug:
parsl.set_stream_logger()
test_implicit_staging_ftp()
|
68b5484cfb0910b3ed68e99520decc6aca08bb2d
|
flask_webapi/__init__.py
|
flask_webapi/__init__.py
|
# Make marshmallow's functions and classes importable from flask-io
from marshmallow import pre_load, pre_dump, post_load, post_dump, Schema, ValidationError, validates_schema
from marshmallow.utils import missing
from .api import WebAPI
from .decorators import authenticator, permissions, content_negotiator, renderer, serializer, route
from .views import ViewBase
|
# Make marshmallow's functions and classes importable from flask-io
from marshmallow import pre_load, pre_dump, post_load, post_dump, Schema, ValidationError, validates_schema
from marshmallow.utils import missing
from .api import WebAPI
from .errors import APIError
from .decorators import authenticator, permissions, content_negotiator, renderer, serializer, route
from .views import ViewBase
|
Add import for APIError to make it easy to import by users
|
Add import for APIError to make it easy to import by users
|
Python
|
mit
|
viniciuschiele/flask-webapi
|
# Make marshmallow's functions and classes importable from flask-io
from marshmallow import pre_load, pre_dump, post_load, post_dump, Schema, ValidationError, validates_schema
from marshmallow.utils import missing
from .api import WebAPI
from .decorators import authenticator, permissions, content_negotiator, renderer, serializer, route
from .views import ViewBase
Add import for APIError to make it easy to import by users
|
# Make marshmallow's functions and classes importable from flask-io
from marshmallow import pre_load, pre_dump, post_load, post_dump, Schema, ValidationError, validates_schema
from marshmallow.utils import missing
from .api import WebAPI
from .errors import APIError
from .decorators import authenticator, permissions, content_negotiator, renderer, serializer, route
from .views import ViewBase
|
<commit_before># Make marshmallow's functions and classes importable from flask-io
from marshmallow import pre_load, pre_dump, post_load, post_dump, Schema, ValidationError, validates_schema
from marshmallow.utils import missing
from .api import WebAPI
from .decorators import authenticator, permissions, content_negotiator, renderer, serializer, route
from .views import ViewBase
<commit_msg>Add import for APIError to make it easy to import by users<commit_after>
|
# Make marshmallow's functions and classes importable from flask-io
from marshmallow import pre_load, pre_dump, post_load, post_dump, Schema, ValidationError, validates_schema
from marshmallow.utils import missing
from .api import WebAPI
from .errors import APIError
from .decorators import authenticator, permissions, content_negotiator, renderer, serializer, route
from .views import ViewBase
|
# Make marshmallow's functions and classes importable from flask-io
from marshmallow import pre_load, pre_dump, post_load, post_dump, Schema, ValidationError, validates_schema
from marshmallow.utils import missing
from .api import WebAPI
from .decorators import authenticator, permissions, content_negotiator, renderer, serializer, route
from .views import ViewBase
Add import for APIError to make it easy to import by users# Make marshmallow's functions and classes importable from flask-io
from marshmallow import pre_load, pre_dump, post_load, post_dump, Schema, ValidationError, validates_schema
from marshmallow.utils import missing
from .api import WebAPI
from .errors import APIError
from .decorators import authenticator, permissions, content_negotiator, renderer, serializer, route
from .views import ViewBase
|
<commit_before># Make marshmallow's functions and classes importable from flask-io
from marshmallow import pre_load, pre_dump, post_load, post_dump, Schema, ValidationError, validates_schema
from marshmallow.utils import missing
from .api import WebAPI
from .decorators import authenticator, permissions, content_negotiator, renderer, serializer, route
from .views import ViewBase
<commit_msg>Add import for APIError to make it easy to import by users<commit_after># Make marshmallow's functions and classes importable from flask-io
from marshmallow import pre_load, pre_dump, post_load, post_dump, Schema, ValidationError, validates_schema
from marshmallow.utils import missing
from .api import WebAPI
from .errors import APIError
from .decorators import authenticator, permissions, content_negotiator, renderer, serializer, route
from .views import ViewBase
|
4c6f40f3d1394fff9ed9a4c6fe3ffd0ae5cb6230
|
jsondb/file_writer.py
|
jsondb/file_writer.py
|
from .compat import decode, encode
def read_data(path):
"""
Reads a file and returns a json encoded representation of the file.
"""
db = open(path, "r+")
content = db.read()
obj = decode(content)
db.close()
return obj
def write_data(path, obj):
"""
Writes to a file and returns the updated file content.
"""
with open(path, "w+") as db:
db.write(encode(obj))
return obj
def is_valid(file_path):
"""
Check to see if a file exists or is empty
"""
from os import path, stat
return path.exists(file_path) and stat(file_path).st_size > 0
|
from .compat import decode, encode
def read_data(file_path):
"""
Reads a file and returns a json encoded representation of the file.
"""
if not is_valid(file_path):
write_data(file_path, {})
db = open(file_path, "r+")
content = db.read()
obj = decode(content)
db.close()
return obj
def write_data(path, obj):
"""
Writes to a file and returns the updated file content.
"""
with open(path, "w+") as db:
db.write(encode(obj))
return obj
def is_valid(file_path):
"""
Check to see if a file exists or is empty.
"""
from os import path, stat
can_open = False
try:
with open(file_path) as fp:
can_open = True
except IOError:
return False
is_file = path.isfile(file_path)
return path.exists(file_path) and is_file and stat(file_path).st_size > 0
|
Create a new file if the path is invalid.
|
Create a new file if the path is invalid.
|
Python
|
bsd-3-clause
|
gunthercox/jsondb
|
from .compat import decode, encode
def read_data(path):
"""
Reads a file and returns a json encoded representation of the file.
"""
db = open(path, "r+")
content = db.read()
obj = decode(content)
db.close()
return obj
def write_data(path, obj):
"""
Writes to a file and returns the updated file content.
"""
with open(path, "w+") as db:
db.write(encode(obj))
return obj
def is_valid(file_path):
"""
Check to see if a file exists or is empty
"""
from os import path, stat
return path.exists(file_path) and stat(file_path).st_size > 0
Create a new file if the path is invalid.
|
from .compat import decode, encode
def read_data(file_path):
"""
Reads a file and returns a json encoded representation of the file.
"""
if not is_valid(file_path):
write_data(file_path, {})
db = open(file_path, "r+")
content = db.read()
obj = decode(content)
db.close()
return obj
def write_data(path, obj):
"""
Writes to a file and returns the updated file content.
"""
with open(path, "w+") as db:
db.write(encode(obj))
return obj
def is_valid(file_path):
"""
Check to see if a file exists or is empty.
"""
from os import path, stat
can_open = False
try:
with open(file_path) as fp:
can_open = True
except IOError:
return False
is_file = path.isfile(file_path)
return path.exists(file_path) and is_file and stat(file_path).st_size > 0
|
<commit_before>from .compat import decode, encode
def read_data(path):
"""
Reads a file and returns a json encoded representation of the file.
"""
db = open(path, "r+")
content = db.read()
obj = decode(content)
db.close()
return obj
def write_data(path, obj):
"""
Writes to a file and returns the updated file content.
"""
with open(path, "w+") as db:
db.write(encode(obj))
return obj
def is_valid(file_path):
"""
Check to see if a file exists or is empty
"""
from os import path, stat
return path.exists(file_path) and stat(file_path).st_size > 0
<commit_msg>Create a new file if the path is invalid.<commit_after>
|
from .compat import decode, encode
def read_data(file_path):
"""
Reads a file and returns a json encoded representation of the file.
"""
if not is_valid(file_path):
write_data(file_path, {})
db = open(file_path, "r+")
content = db.read()
obj = decode(content)
db.close()
return obj
def write_data(path, obj):
"""
Writes to a file and returns the updated file content.
"""
with open(path, "w+") as db:
db.write(encode(obj))
return obj
def is_valid(file_path):
"""
Check to see if a file exists or is empty.
"""
from os import path, stat
can_open = False
try:
with open(file_path) as fp:
can_open = True
except IOError:
return False
is_file = path.isfile(file_path)
return path.exists(file_path) and is_file and stat(file_path).st_size > 0
|
from .compat import decode, encode
def read_data(path):
"""
Reads a file and returns a json encoded representation of the file.
"""
db = open(path, "r+")
content = db.read()
obj = decode(content)
db.close()
return obj
def write_data(path, obj):
"""
Writes to a file and returns the updated file content.
"""
with open(path, "w+") as db:
db.write(encode(obj))
return obj
def is_valid(file_path):
"""
Check to see if a file exists or is empty
"""
from os import path, stat
return path.exists(file_path) and stat(file_path).st_size > 0
Create a new file if the path is invalid.from .compat import decode, encode
def read_data(file_path):
"""
Reads a file and returns a json encoded representation of the file.
"""
if not is_valid(file_path):
write_data(file_path, {})
db = open(file_path, "r+")
content = db.read()
obj = decode(content)
db.close()
return obj
def write_data(path, obj):
"""
Writes to a file and returns the updated file content.
"""
with open(path, "w+") as db:
db.write(encode(obj))
return obj
def is_valid(file_path):
"""
Check to see if a file exists or is empty.
"""
from os import path, stat
can_open = False
try:
with open(file_path) as fp:
can_open = True
except IOError:
return False
is_file = path.isfile(file_path)
return path.exists(file_path) and is_file and stat(file_path).st_size > 0
|
<commit_before>from .compat import decode, encode
def read_data(path):
"""
Reads a file and returns a json encoded representation of the file.
"""
db = open(path, "r+")
content = db.read()
obj = decode(content)
db.close()
return obj
def write_data(path, obj):
"""
Writes to a file and returns the updated file content.
"""
with open(path, "w+") as db:
db.write(encode(obj))
return obj
def is_valid(file_path):
"""
Check to see if a file exists or is empty
"""
from os import path, stat
return path.exists(file_path) and stat(file_path).st_size > 0
<commit_msg>Create a new file if the path is invalid.<commit_after>from .compat import decode, encode
def read_data(file_path):
"""
Reads a file and returns a json encoded representation of the file.
"""
if not is_valid(file_path):
write_data(file_path, {})
db = open(file_path, "r+")
content = db.read()
obj = decode(content)
db.close()
return obj
def write_data(path, obj):
"""
Writes to a file and returns the updated file content.
"""
with open(path, "w+") as db:
db.write(encode(obj))
return obj
def is_valid(file_path):
"""
Check to see if a file exists or is empty.
"""
from os import path, stat
can_open = False
try:
with open(file_path) as fp:
can_open = True
except IOError:
return False
is_file = path.isfile(file_path)
return path.exists(file_path) and is_file and stat(file_path).st_size > 0
|
45078e9b7bfff43d80e223b73dd6bf039c54de0e
|
flask_application/config.py
|
flask_application/config.py
|
#!/usr/bin/env python
# http://flask.pocoo.org/docs/config/#development-production
class Config(object):
SITE_TITLE = "Westminster Standards"
SITE_TAGLINE = "Read the Westminster Standards in a year."
TZ = 'US/Eastern'
SECRET_KEY = ''
SITE_NAME = 'reformedconfessions.com'
MEMCACHED_SERVERS = ['localhost:11211']
SYS_ADMINS = ['feedback@reformedconfessions.com']
class ProductionConfig(Config):
DEBUG = True
class DevelopmentConfig(Config):
'''Use "if app.debug" anywhere in your code, that code will run in development code.'''
DEBUG = True
|
#!/usr/bin/env python
# http://flask.pocoo.org/docs/config/#development-production
class Config(object):
SITE_TITLE = "Westminster Daily"
SITE_TAGLINE = "Read the Westminster Standards in a year."
TZ = 'US/Eastern'
SECRET_KEY = ''
SITE_NAME = 'reformedconfessions.com'
MEMCACHED_SERVERS = ['localhost:11211']
SYS_ADMINS = ['feedback@reformedconfessions.com']
class ProductionConfig(Config):
DEBUG = True
class DevelopmentConfig(Config):
'''Use "if app.debug" anywhere in your code, that code will run in development code.'''
DEBUG = True
|
Change site name to Westminster DAily
|
Change site name to Westminster DAily
|
Python
|
bsd-3-clause
|
olneyhymn/westminster-daily,olneyhymn/westminster-daily,olneyhymn/westminster-daily,tdhopper/westminster-daily,olneyhymn/westminster-daily,tdhopper/westminster-daily,tdhopper/westminster-daily
|
#!/usr/bin/env python
# http://flask.pocoo.org/docs/config/#development-production
class Config(object):
SITE_TITLE = "Westminster Standards"
SITE_TAGLINE = "Read the Westminster Standards in a year."
TZ = 'US/Eastern'
SECRET_KEY = ''
SITE_NAME = 'reformedconfessions.com'
MEMCACHED_SERVERS = ['localhost:11211']
SYS_ADMINS = ['feedback@reformedconfessions.com']
class ProductionConfig(Config):
DEBUG = True
class DevelopmentConfig(Config):
'''Use "if app.debug" anywhere in your code, that code will run in development code.'''
DEBUG = True
Change site name to Westminster DAily
|
#!/usr/bin/env python
# http://flask.pocoo.org/docs/config/#development-production
class Config(object):
SITE_TITLE = "Westminster Daily"
SITE_TAGLINE = "Read the Westminster Standards in a year."
TZ = 'US/Eastern'
SECRET_KEY = ''
SITE_NAME = 'reformedconfessions.com'
MEMCACHED_SERVERS = ['localhost:11211']
SYS_ADMINS = ['feedback@reformedconfessions.com']
class ProductionConfig(Config):
DEBUG = True
class DevelopmentConfig(Config):
'''Use "if app.debug" anywhere in your code, that code will run in development code.'''
DEBUG = True
|
<commit_before>#!/usr/bin/env python
# http://flask.pocoo.org/docs/config/#development-production
class Config(object):
SITE_TITLE = "Westminster Standards"
SITE_TAGLINE = "Read the Westminster Standards in a year."
TZ = 'US/Eastern'
SECRET_KEY = ''
SITE_NAME = 'reformedconfessions.com'
MEMCACHED_SERVERS = ['localhost:11211']
SYS_ADMINS = ['feedback@reformedconfessions.com']
class ProductionConfig(Config):
DEBUG = True
class DevelopmentConfig(Config):
'''Use "if app.debug" anywhere in your code, that code will run in development code.'''
DEBUG = True
<commit_msg>Change site name to Westminster DAily<commit_after>
|
#!/usr/bin/env python
# http://flask.pocoo.org/docs/config/#development-production
class Config(object):
SITE_TITLE = "Westminster Daily"
SITE_TAGLINE = "Read the Westminster Standards in a year."
TZ = 'US/Eastern'
SECRET_KEY = ''
SITE_NAME = 'reformedconfessions.com'
MEMCACHED_SERVERS = ['localhost:11211']
SYS_ADMINS = ['feedback@reformedconfessions.com']
class ProductionConfig(Config):
DEBUG = True
class DevelopmentConfig(Config):
'''Use "if app.debug" anywhere in your code, that code will run in development code.'''
DEBUG = True
|
#!/usr/bin/env python
# http://flask.pocoo.org/docs/config/#development-production
class Config(object):
SITE_TITLE = "Westminster Standards"
SITE_TAGLINE = "Read the Westminster Standards in a year."
TZ = 'US/Eastern'
SECRET_KEY = ''
SITE_NAME = 'reformedconfessions.com'
MEMCACHED_SERVERS = ['localhost:11211']
SYS_ADMINS = ['feedback@reformedconfessions.com']
class ProductionConfig(Config):
DEBUG = True
class DevelopmentConfig(Config):
'''Use "if app.debug" anywhere in your code, that code will run in development code.'''
DEBUG = True
Change site name to Westminster DAily#!/usr/bin/env python
# http://flask.pocoo.org/docs/config/#development-production
class Config(object):
SITE_TITLE = "Westminster Daily"
SITE_TAGLINE = "Read the Westminster Standards in a year."
TZ = 'US/Eastern'
SECRET_KEY = ''
SITE_NAME = 'reformedconfessions.com'
MEMCACHED_SERVERS = ['localhost:11211']
SYS_ADMINS = ['feedback@reformedconfessions.com']
class ProductionConfig(Config):
DEBUG = True
class DevelopmentConfig(Config):
'''Use "if app.debug" anywhere in your code, that code will run in development code.'''
DEBUG = True
|
<commit_before>#!/usr/bin/env python
# http://flask.pocoo.org/docs/config/#development-production
class Config(object):
SITE_TITLE = "Westminster Standards"
SITE_TAGLINE = "Read the Westminster Standards in a year."
TZ = 'US/Eastern'
SECRET_KEY = ''
SITE_NAME = 'reformedconfessions.com'
MEMCACHED_SERVERS = ['localhost:11211']
SYS_ADMINS = ['feedback@reformedconfessions.com']
class ProductionConfig(Config):
DEBUG = True
class DevelopmentConfig(Config):
'''Use "if app.debug" anywhere in your code, that code will run in development code.'''
DEBUG = True
<commit_msg>Change site name to Westminster DAily<commit_after>#!/usr/bin/env python
# http://flask.pocoo.org/docs/config/#development-production
class Config(object):
SITE_TITLE = "Westminster Daily"
SITE_TAGLINE = "Read the Westminster Standards in a year."
TZ = 'US/Eastern'
SECRET_KEY = ''
SITE_NAME = 'reformedconfessions.com'
MEMCACHED_SERVERS = ['localhost:11211']
SYS_ADMINS = ['feedback@reformedconfessions.com']
class ProductionConfig(Config):
DEBUG = True
class DevelopmentConfig(Config):
'''Use "if app.debug" anywhere in your code, that code will run in development code.'''
DEBUG = True
|
0a4aceb87eae57188c5f61bb93d78d5cc9f1779f
|
lava_scheduler_app/templatetags/utils.py
|
lava_scheduler_app/templatetags/utils.py
|
from django import template
from django.utils.safestring import mark_safe
from lava_scheduler_app.models import TestJob
register = template.Library()
@register.filter
def get_priority_select(current):
select = ""
val = TestJob.PRIORITY_CHOICES
for priority, label in val:
check = " checked" if priority == current else ""
default = " [default]" if current != 50 and priority == 50 else ""
select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\
(label.lower(), priority, check, label, default)
return mark_safe(select)
|
from django import template
from django.utils.safestring import mark_safe
from lava_scheduler_app.models import TestJob
register = template.Library()
@register.filter
def get_priority_select(current):
select = ""
val = TestJob.PRIORITY_CHOICES
for priority, label in val:
check = " checked" if priority == current else ""
default = " [default]" if current != 50 and priority == 50 else ""
select += '<label class="checkbox-inline">'
select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\
(label.lower(), priority, check, label, default)
select += '</label>'
return mark_safe(select)
|
Use inline radio buttons for priority changes.
|
Use inline radio buttons for priority changes.
Change-Id: Ifb9a685bca654c5139aef3ca78e800b66ce77eb9
|
Python
|
agpl-3.0
|
Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server
|
from django import template
from django.utils.safestring import mark_safe
from lava_scheduler_app.models import TestJob
register = template.Library()
@register.filter
def get_priority_select(current):
select = ""
val = TestJob.PRIORITY_CHOICES
for priority, label in val:
check = " checked" if priority == current else ""
default = " [default]" if current != 50 and priority == 50 else ""
select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\
(label.lower(), priority, check, label, default)
return mark_safe(select)
Use inline radio buttons for priority changes.
Change-Id: Ifb9a685bca654c5139aef3ca78e800b66ce77eb9
|
from django import template
from django.utils.safestring import mark_safe
from lava_scheduler_app.models import TestJob
register = template.Library()
@register.filter
def get_priority_select(current):
select = ""
val = TestJob.PRIORITY_CHOICES
for priority, label in val:
check = " checked" if priority == current else ""
default = " [default]" if current != 50 and priority == 50 else ""
select += '<label class="checkbox-inline">'
select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\
(label.lower(), priority, check, label, default)
select += '</label>'
return mark_safe(select)
|
<commit_before>from django import template
from django.utils.safestring import mark_safe
from lava_scheduler_app.models import TestJob
register = template.Library()
@register.filter
def get_priority_select(current):
select = ""
val = TestJob.PRIORITY_CHOICES
for priority, label in val:
check = " checked" if priority == current else ""
default = " [default]" if current != 50 and priority == 50 else ""
select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\
(label.lower(), priority, check, label, default)
return mark_safe(select)
<commit_msg>Use inline radio buttons for priority changes.
Change-Id: Ifb9a685bca654c5139aef3ca78e800b66ce77eb9<commit_after>
|
from django import template
from django.utils.safestring import mark_safe
from lava_scheduler_app.models import TestJob
register = template.Library()
@register.filter
def get_priority_select(current):
select = ""
val = TestJob.PRIORITY_CHOICES
for priority, label in val:
check = " checked" if priority == current else ""
default = " [default]" if current != 50 and priority == 50 else ""
select += '<label class="checkbox-inline">'
select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\
(label.lower(), priority, check, label, default)
select += '</label>'
return mark_safe(select)
|
from django import template
from django.utils.safestring import mark_safe
from lava_scheduler_app.models import TestJob
register = template.Library()
@register.filter
def get_priority_select(current):
select = ""
val = TestJob.PRIORITY_CHOICES
for priority, label in val:
check = " checked" if priority == current else ""
default = " [default]" if current != 50 and priority == 50 else ""
select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\
(label.lower(), priority, check, label, default)
return mark_safe(select)
Use inline radio buttons for priority changes.
Change-Id: Ifb9a685bca654c5139aef3ca78e800b66ce77eb9from django import template
from django.utils.safestring import mark_safe
from lava_scheduler_app.models import TestJob
register = template.Library()
@register.filter
def get_priority_select(current):
select = ""
val = TestJob.PRIORITY_CHOICES
for priority, label in val:
check = " checked" if priority == current else ""
default = " [default]" if current != 50 and priority == 50 else ""
select += '<label class="checkbox-inline">'
select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\
(label.lower(), priority, check, label, default)
select += '</label>'
return mark_safe(select)
|
<commit_before>from django import template
from django.utils.safestring import mark_safe
from lava_scheduler_app.models import TestJob
register = template.Library()
@register.filter
def get_priority_select(current):
select = ""
val = TestJob.PRIORITY_CHOICES
for priority, label in val:
check = " checked" if priority == current else ""
default = " [default]" if current != 50 and priority == 50 else ""
select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\
(label.lower(), priority, check, label, default)
return mark_safe(select)
<commit_msg>Use inline radio buttons for priority changes.
Change-Id: Ifb9a685bca654c5139aef3ca78e800b66ce77eb9<commit_after>from django import template
from django.utils.safestring import mark_safe
from lava_scheduler_app.models import TestJob
register = template.Library()
@register.filter
def get_priority_select(current):
select = ""
val = TestJob.PRIORITY_CHOICES
for priority, label in val:
check = " checked" if priority == current else ""
default = " [default]" if current != 50 and priority == 50 else ""
select += '<label class="checkbox-inline">'
select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\
(label.lower(), priority, check, label, default)
select += '</label>'
return mark_safe(select)
|
c9e90de4730050e4ab41fc6b42a4a51018262db7
|
sergey/management/commands/fix_speaker_slugs.py
|
sergey/management/commands/fix_speaker_slugs.py
|
# coding: utf-8
from django.core.management import BaseCommand
from django.template.defaultfilters import slugify
from unidecode import unidecode
from richard.videos.models import Speaker
class Command(BaseCommand):
help = 'Fixes speaker slugs'
def handle(self, *args, **options):
for speaker in Speaker.objects.all():
old_slug = speaker.slug
fixed_slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, fixed_slug))
|
# coding: utf-8
from django.core.management import BaseCommand
from django.template.defaultfilters import slugify
from unidecode import unidecode
from richard.videos.models import Speaker
class Command(BaseCommand):
help = 'Fixes speaker slugs'
def handle(self, *args, **options):
for speaker in Speaker.objects.all():
old_slug = speaker.slug
try:
speaker.slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, speaker.slug))
except Exception:
self.stdout.write(u'Problem fixing slug for %s\n' % speaker.name)
|
Fix management command for fixing slugs
|
Fix management command for fixing slugs
|
Python
|
bsd-3-clause
|
WarmongeR1/pyvideo.ru,coagulant/pyvideo.ru,coagulant/pyvideo.ru,WarmongeR1/pyvideo.ru,WarmongeR1/pyvideo.ru,coagulant/pyvideo.ru
|
# coding: utf-8
from django.core.management import BaseCommand
from django.template.defaultfilters import slugify
from unidecode import unidecode
from richard.videos.models import Speaker
class Command(BaseCommand):
help = 'Fixes speaker slugs'
def handle(self, *args, **options):
for speaker in Speaker.objects.all():
old_slug = speaker.slug
fixed_slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, fixed_slug))
Fix management command for fixing slugs
|
# coding: utf-8
from django.core.management import BaseCommand
from django.template.defaultfilters import slugify
from unidecode import unidecode
from richard.videos.models import Speaker
class Command(BaseCommand):
help = 'Fixes speaker slugs'
def handle(self, *args, **options):
for speaker in Speaker.objects.all():
old_slug = speaker.slug
try:
speaker.slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, speaker.slug))
except Exception:
self.stdout.write(u'Problem fixing slug for %s\n' % speaker.name)
|
<commit_before># coding: utf-8
from django.core.management import BaseCommand
from django.template.defaultfilters import slugify
from unidecode import unidecode
from richard.videos.models import Speaker
class Command(BaseCommand):
help = 'Fixes speaker slugs'
def handle(self, *args, **options):
for speaker in Speaker.objects.all():
old_slug = speaker.slug
fixed_slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, fixed_slug))
<commit_msg>Fix management command for fixing slugs<commit_after>
|
# coding: utf-8
from django.core.management import BaseCommand
from django.template.defaultfilters import slugify
from unidecode import unidecode
from richard.videos.models import Speaker
class Command(BaseCommand):
help = 'Fixes speaker slugs'
def handle(self, *args, **options):
for speaker in Speaker.objects.all():
old_slug = speaker.slug
try:
speaker.slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, speaker.slug))
except Exception:
self.stdout.write(u'Problem fixing slug for %s\n' % speaker.name)
|
# coding: utf-8
from django.core.management import BaseCommand
from django.template.defaultfilters import slugify
from unidecode import unidecode
from richard.videos.models import Speaker
class Command(BaseCommand):
help = 'Fixes speaker slugs'
def handle(self, *args, **options):
for speaker in Speaker.objects.all():
old_slug = speaker.slug
fixed_slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, fixed_slug))
Fix management command for fixing slugs# coding: utf-8
from django.core.management import BaseCommand
from django.template.defaultfilters import slugify
from unidecode import unidecode
from richard.videos.models import Speaker
class Command(BaseCommand):
help = 'Fixes speaker slugs'
def handle(self, *args, **options):
for speaker in Speaker.objects.all():
old_slug = speaker.slug
try:
speaker.slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, speaker.slug))
except Exception:
self.stdout.write(u'Problem fixing slug for %s\n' % speaker.name)
|
<commit_before># coding: utf-8
from django.core.management import BaseCommand
from django.template.defaultfilters import slugify
from unidecode import unidecode
from richard.videos.models import Speaker
class Command(BaseCommand):
help = 'Fixes speaker slugs'
def handle(self, *args, **options):
for speaker in Speaker.objects.all():
old_slug = speaker.slug
fixed_slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, fixed_slug))
<commit_msg>Fix management command for fixing slugs<commit_after># coding: utf-8
from django.core.management import BaseCommand
from django.template.defaultfilters import slugify
from unidecode import unidecode
from richard.videos.models import Speaker
class Command(BaseCommand):
help = 'Fixes speaker slugs'
def handle(self, *args, **options):
for speaker in Speaker.objects.all():
old_slug = speaker.slug
try:
speaker.slug = slugify(unidecode(speaker.name))
speaker.save()
self.stdout.write(u'Changed slug for %s "%s" => %s\n' % (speaker.name, old_slug, speaker.slug))
except Exception:
self.stdout.write(u'Problem fixing slug for %s\n' % speaker.name)
|
45b99469f13379acbc92e7be20968f5973882726
|
prestoadmin/_version.py
|
prestoadmin/_version.py
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Version information"""
# This must be the last line in the file and the format must be maintained
# even when the version is changed
__version__ = '2.2'
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Version information"""
# This must be the last line in the file and the format must be maintained
# even when the version is changed
__version__ = '2.2-SNAPSHOT'
|
Prepare for the next development iteration
|
Prepare for the next development iteration
|
Python
|
apache-2.0
|
prestodb/presto-admin,prestodb/presto-admin
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Version information"""
# This must be the last line in the file and the format must be maintained
# even when the version is changed
__version__ = '2.2'
Prepare for the next development iteration
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Version information"""
# This must be the last line in the file and the format must be maintained
# even when the version is changed
__version__ = '2.2-SNAPSHOT'
|
<commit_before># -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Version information"""
# This must be the last line in the file and the format must be maintained
# even when the version is changed
__version__ = '2.2'
<commit_msg>Prepare for the next development iteration<commit_after>
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Version information"""
# This must be the last line in the file and the format must be maintained
# even when the version is changed
__version__ = '2.2-SNAPSHOT'
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Version information"""
# This must be the last line in the file and the format must be maintained
# even when the version is changed
__version__ = '2.2'
Prepare for the next development iteration# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Version information"""
# This must be the last line in the file and the format must be maintained
# even when the version is changed
__version__ = '2.2-SNAPSHOT'
|
<commit_before># -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Version information"""
# This must be the last line in the file and the format must be maintained
# even when the version is changed
__version__ = '2.2'
<commit_msg>Prepare for the next development iteration<commit_after># -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Version information"""
# This must be the last line in the file and the format must be maintained
# even when the version is changed
__version__ = '2.2-SNAPSHOT'
|
9e73de0014b3f88b9e94ead11a878c6bc3819782
|
selenium_testcase/tests/test_navigation.py
|
selenium_testcase/tests/test_navigation.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from ..testcases import SeleniumLiveTestCase
class NavigationTestCase(SeleniumLiveTestCase):
test_templates = [
(r'^nav_1/$', 'nav_1.html'),
(r'^nav_1/nav_2/$', 'nav_2.html')
]
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from ..testcases import SeleniumLiveTestCase
class NavigationTestCase(SeleniumLiveTestCase):
test_templates = [
(r'^nav_1/$', 'nav_1.html'),
(r'^nav_1/nav_2/$', 'nav_2.html')
]
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.has_title("Navigation 1")
self.title_contains("1")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
def test_get_bad_page(self):
""" Test that /bogus/ is not found. """
self.get_page("/bogus/")
self.should_see("Not Found")
self.should_see(
"The requested URL /bogus/ was not found on this server.")
def test_missing_content_with_retry(self):
""" Test retry for missing content, LONG RETRIES! """
self.get_page("/nav_1/")
self.should_not_see("This is nav 2.")
self.url_should_not_contain("nav_2")
self.has_not_title("Navigation 2")
self.title_does_not_contain("2")
self.assertRaises(
AssertionError, self.click_button, "not_there_dude")
self.not_at_page("/nav_2/")
|
Test missing content and failed navigation tests.
|
Test missing content and failed navigation tests.
This commit adds unit tests outside of the happy path where
a url does not exist or the test is looking for conten that
doesn't exist on the page. Since testing for missing informaion
requires timeouts to be sure, some of these tests take several
seconds to execute.
|
Python
|
bsd-3-clause
|
nimbis/django-selenium-testcase,nimbis/django-selenium-testcase
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from ..testcases import SeleniumLiveTestCase
class NavigationTestCase(SeleniumLiveTestCase):
test_templates = [
(r'^nav_1/$', 'nav_1.html'),
(r'^nav_1/nav_2/$', 'nav_2.html')
]
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
Test missing content and failed navigation tests.
This commit adds unit tests outside of the happy path where
a url does not exist or the test is looking for conten that
doesn't exist on the page. Since testing for missing informaion
requires timeouts to be sure, some of these tests take several
seconds to execute.
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from ..testcases import SeleniumLiveTestCase
class NavigationTestCase(SeleniumLiveTestCase):
test_templates = [
(r'^nav_1/$', 'nav_1.html'),
(r'^nav_1/nav_2/$', 'nav_2.html')
]
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.has_title("Navigation 1")
self.title_contains("1")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
def test_get_bad_page(self):
""" Test that /bogus/ is not found. """
self.get_page("/bogus/")
self.should_see("Not Found")
self.should_see(
"The requested URL /bogus/ was not found on this server.")
def test_missing_content_with_retry(self):
""" Test retry for missing content, LONG RETRIES! """
self.get_page("/nav_1/")
self.should_not_see("This is nav 2.")
self.url_should_not_contain("nav_2")
self.has_not_title("Navigation 2")
self.title_does_not_contain("2")
self.assertRaises(
AssertionError, self.click_button, "not_there_dude")
self.not_at_page("/nav_2/")
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
from ..testcases import SeleniumLiveTestCase
class NavigationTestCase(SeleniumLiveTestCase):
test_templates = [
(r'^nav_1/$', 'nav_1.html'),
(r'^nav_1/nav_2/$', 'nav_2.html')
]
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
<commit_msg>Test missing content and failed navigation tests.
This commit adds unit tests outside of the happy path where
a url does not exist or the test is looking for conten that
doesn't exist on the page. Since testing for missing informaion
requires timeouts to be sure, some of these tests take several
seconds to execute.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from ..testcases import SeleniumLiveTestCase
class NavigationTestCase(SeleniumLiveTestCase):
test_templates = [
(r'^nav_1/$', 'nav_1.html'),
(r'^nav_1/nav_2/$', 'nav_2.html')
]
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.has_title("Navigation 1")
self.title_contains("1")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
def test_get_bad_page(self):
""" Test that /bogus/ is not found. """
self.get_page("/bogus/")
self.should_see("Not Found")
self.should_see(
"The requested URL /bogus/ was not found on this server.")
def test_missing_content_with_retry(self):
""" Test retry for missing content, LONG RETRIES! """
self.get_page("/nav_1/")
self.should_not_see("This is nav 2.")
self.url_should_not_contain("nav_2")
self.has_not_title("Navigation 2")
self.title_does_not_contain("2")
self.assertRaises(
AssertionError, self.click_button, "not_there_dude")
self.not_at_page("/nav_2/")
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from ..testcases import SeleniumLiveTestCase
class NavigationTestCase(SeleniumLiveTestCase):
test_templates = [
(r'^nav_1/$', 'nav_1.html'),
(r'^nav_1/nav_2/$', 'nav_2.html')
]
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
Test missing content and failed navigation tests.
This commit adds unit tests outside of the happy path where
a url does not exist or the test is looking for conten that
doesn't exist on the page. Since testing for missing informaion
requires timeouts to be sure, some of these tests take several
seconds to execute.# -*- coding: utf-8 -*-
from __future__ import absolute_import
from ..testcases import SeleniumLiveTestCase
class NavigationTestCase(SeleniumLiveTestCase):
test_templates = [
(r'^nav_1/$', 'nav_1.html'),
(r'^nav_1/nav_2/$', 'nav_2.html')
]
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.has_title("Navigation 1")
self.title_contains("1")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
def test_get_bad_page(self):
""" Test that /bogus/ is not found. """
self.get_page("/bogus/")
self.should_see("Not Found")
self.should_see(
"The requested URL /bogus/ was not found on this server.")
def test_missing_content_with_retry(self):
""" Test retry for missing content, LONG RETRIES! """
self.get_page("/nav_1/")
self.should_not_see("This is nav 2.")
self.url_should_not_contain("nav_2")
self.has_not_title("Navigation 2")
self.title_does_not_contain("2")
self.assertRaises(
AssertionError, self.click_button, "not_there_dude")
self.not_at_page("/nav_2/")
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
from ..testcases import SeleniumLiveTestCase
class NavigationTestCase(SeleniumLiveTestCase):
test_templates = [
(r'^nav_1/$', 'nav_1.html'),
(r'^nav_1/nav_2/$', 'nav_2.html')
]
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
<commit_msg>Test missing content and failed navigation tests.
This commit adds unit tests outside of the happy path where
a url does not exist or the test is looking for conten that
doesn't exist on the page. Since testing for missing informaion
requires timeouts to be sure, some of these tests take several
seconds to execute.<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import
from ..testcases import SeleniumLiveTestCase
class NavigationTestCase(SeleniumLiveTestCase):
test_templates = [
(r'^nav_1/$', 'nav_1.html'),
(r'^nav_1/nav_2/$', 'nav_2.html')
]
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.has_title("Navigation 1")
self.title_contains("1")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
def test_get_bad_page(self):
""" Test that /bogus/ is not found. """
self.get_page("/bogus/")
self.should_see("Not Found")
self.should_see(
"The requested URL /bogus/ was not found on this server.")
def test_missing_content_with_retry(self):
""" Test retry for missing content, LONG RETRIES! """
self.get_page("/nav_1/")
self.should_not_see("This is nav 2.")
self.url_should_not_contain("nav_2")
self.has_not_title("Navigation 2")
self.title_does_not_contain("2")
self.assertRaises(
AssertionError, self.click_button, "not_there_dude")
self.not_at_page("/nav_2/")
|
1b27133a182204a44a8ee3cd73c832777fa3723b
|
tests/unit/test_metric_timer.py
|
tests/unit/test_metric_timer.py
|
"""
Contains tests for the timer metric.
"""
from statsite.metrics import Timer
class TestTimerMetric(object):
def test_fold_sum(self):
"""
Tests that folding generates a sum of the timers.
"""
now = 10
metrics = [Timer("k", 10),
Timer("k", 15),
Timer("j", 7),
Timer("j", 8)]
result = Timer.fold(metrics, now)
assert ("k", 25, now) == self._get_metric("timers.k.sum", result)
assert ("j", 15, now) == self._get_metric("timers.j.sum", result)
def _get_metric(self, key, metrics):
"""
This will extract a specific metric out of an array of metrics.
"""
for metric in metrics:
if metric[0] == key:
return metric
return None
|
"""
Contains tests for the timer metric.
"""
from statsite.metrics import Timer
class TestTimerMetric(object):
def test_fold_sum(self):
"""
Tests that folding generates a sum of the timers.
"""
now = 10
metrics = [Timer("k", 10),
Timer("k", 15),
Timer("j", 7),
Timer("j", 8)]
result = Timer.fold(metrics, now)
assert ("timers.k.sum", 25, now) == self._get_metric("timers.k.sum", result)
assert ("timers.j.sum", 15, now) == self._get_metric("timers.j.sum", result)
def _get_metric(self, key, metrics):
"""
This will extract a specific metric out of an array of metrics.
"""
for metric in metrics:
if metric[0] == key:
return metric
return None
|
Fix typo with expected metric
|
Fix typo with expected metric
|
Python
|
bsd-3-clause
|
kiip/statsite
|
"""
Contains tests for the timer metric.
"""
from statsite.metrics import Timer
class TestTimerMetric(object):
def test_fold_sum(self):
"""
Tests that folding generates a sum of the timers.
"""
now = 10
metrics = [Timer("k", 10),
Timer("k", 15),
Timer("j", 7),
Timer("j", 8)]
result = Timer.fold(metrics, now)
assert ("k", 25, now) == self._get_metric("timers.k.sum", result)
assert ("j", 15, now) == self._get_metric("timers.j.sum", result)
def _get_metric(self, key, metrics):
"""
This will extract a specific metric out of an array of metrics.
"""
for metric in metrics:
if metric[0] == key:
return metric
return None
Fix typo with expected metric
|
"""
Contains tests for the timer metric.
"""
from statsite.metrics import Timer
class TestTimerMetric(object):
def test_fold_sum(self):
"""
Tests that folding generates a sum of the timers.
"""
now = 10
metrics = [Timer("k", 10),
Timer("k", 15),
Timer("j", 7),
Timer("j", 8)]
result = Timer.fold(metrics, now)
assert ("timers.k.sum", 25, now) == self._get_metric("timers.k.sum", result)
assert ("timers.j.sum", 15, now) == self._get_metric("timers.j.sum", result)
def _get_metric(self, key, metrics):
"""
This will extract a specific metric out of an array of metrics.
"""
for metric in metrics:
if metric[0] == key:
return metric
return None
|
<commit_before>"""
Contains tests for the timer metric.
"""
from statsite.metrics import Timer
class TestTimerMetric(object):
def test_fold_sum(self):
"""
Tests that folding generates a sum of the timers.
"""
now = 10
metrics = [Timer("k", 10),
Timer("k", 15),
Timer("j", 7),
Timer("j", 8)]
result = Timer.fold(metrics, now)
assert ("k", 25, now) == self._get_metric("timers.k.sum", result)
assert ("j", 15, now) == self._get_metric("timers.j.sum", result)
def _get_metric(self, key, metrics):
"""
This will extract a specific metric out of an array of metrics.
"""
for metric in metrics:
if metric[0] == key:
return metric
return None
<commit_msg>Fix typo with expected metric<commit_after>
|
"""
Contains tests for the timer metric.
"""
from statsite.metrics import Timer
class TestTimerMetric(object):
def test_fold_sum(self):
"""
Tests that folding generates a sum of the timers.
"""
now = 10
metrics = [Timer("k", 10),
Timer("k", 15),
Timer("j", 7),
Timer("j", 8)]
result = Timer.fold(metrics, now)
assert ("timers.k.sum", 25, now) == self._get_metric("timers.k.sum", result)
assert ("timers.j.sum", 15, now) == self._get_metric("timers.j.sum", result)
def _get_metric(self, key, metrics):
"""
This will extract a specific metric out of an array of metrics.
"""
for metric in metrics:
if metric[0] == key:
return metric
return None
|
"""
Contains tests for the timer metric.
"""
from statsite.metrics import Timer
class TestTimerMetric(object):
def test_fold_sum(self):
"""
Tests that folding generates a sum of the timers.
"""
now = 10
metrics = [Timer("k", 10),
Timer("k", 15),
Timer("j", 7),
Timer("j", 8)]
result = Timer.fold(metrics, now)
assert ("k", 25, now) == self._get_metric("timers.k.sum", result)
assert ("j", 15, now) == self._get_metric("timers.j.sum", result)
def _get_metric(self, key, metrics):
"""
This will extract a specific metric out of an array of metrics.
"""
for metric in metrics:
if metric[0] == key:
return metric
return None
Fix typo with expected metric"""
Contains tests for the timer metric.
"""
from statsite.metrics import Timer
class TestTimerMetric(object):
def test_fold_sum(self):
"""
Tests that folding generates a sum of the timers.
"""
now = 10
metrics = [Timer("k", 10),
Timer("k", 15),
Timer("j", 7),
Timer("j", 8)]
result = Timer.fold(metrics, now)
assert ("timers.k.sum", 25, now) == self._get_metric("timers.k.sum", result)
assert ("timers.j.sum", 15, now) == self._get_metric("timers.j.sum", result)
def _get_metric(self, key, metrics):
"""
This will extract a specific metric out of an array of metrics.
"""
for metric in metrics:
if metric[0] == key:
return metric
return None
|
<commit_before>"""
Contains tests for the timer metric.
"""
from statsite.metrics import Timer
class TestTimerMetric(object):
def test_fold_sum(self):
"""
Tests that folding generates a sum of the timers.
"""
now = 10
metrics = [Timer("k", 10),
Timer("k", 15),
Timer("j", 7),
Timer("j", 8)]
result = Timer.fold(metrics, now)
assert ("k", 25, now) == self._get_metric("timers.k.sum", result)
assert ("j", 15, now) == self._get_metric("timers.j.sum", result)
def _get_metric(self, key, metrics):
"""
This will extract a specific metric out of an array of metrics.
"""
for metric in metrics:
if metric[0] == key:
return metric
return None
<commit_msg>Fix typo with expected metric<commit_after>"""
Contains tests for the timer metric.
"""
from statsite.metrics import Timer
class TestTimerMetric(object):
def test_fold_sum(self):
"""
Tests that folding generates a sum of the timers.
"""
now = 10
metrics = [Timer("k", 10),
Timer("k", 15),
Timer("j", 7),
Timer("j", 8)]
result = Timer.fold(metrics, now)
assert ("timers.k.sum", 25, now) == self._get_metric("timers.k.sum", result)
assert ("timers.j.sum", 15, now) == self._get_metric("timers.j.sum", result)
def _get_metric(self, key, metrics):
"""
This will extract a specific metric out of an array of metrics.
"""
for metric in metrics:
if metric[0] == key:
return metric
return None
|
21799c73bdc6f0dc7410edc61db5de5694ab911a
|
django/hello/world/models.py
|
django/hello/world/models.py
|
from django.db import models
# Create your models here.
class World(models.Model):
randomnumber = models.IntegerField()
class Meta:
db_table = 'world'
class Fortune(models.Model):
message = models.CharField(max_length=65535)
class Meta:
db_table = 'fortune'
|
from django.db import models
# Create your models here.
class World(models.Model):
randomnumber = models.IntegerField()
class Meta:
db_table = 'World'
class Fortune(models.Model):
message = models.CharField(max_length=65535)
class Meta:
db_table = 'Fortune'
|
Fix table name for MySQL
|
Fix table name for MySQL
|
Python
|
bsd-3-clause
|
dmacd/FB-try1,julienschmidt/FrameworkBenchmarks,zapov/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,methane/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zapov/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,herloct/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,leafo/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,torhve/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,actframework/FrameworkBenchmarks,leafo/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,doom369/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,doom369/FrameworkBenchmarks,herloct/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zloster/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jamming/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,valyala/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zapov/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,torhve/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zloster/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,valyala/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,grob/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sgml/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,denkab/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zapov/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sgml/FrameworkBenchmarks,testn/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,grob/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zloster/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,herloct/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zloster/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,khellang/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,leafo/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,testn/FrameworkBenchmarks,joshk/FrameworkBenchmarks,khellang/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sgml/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jamming/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Verber/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,joshk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sgml/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,leafo/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Verber/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Verber/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,methane/FrameworkBenchmarks,dmacd/FB-try1,donovanmuller/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zapov/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,dmacd/FB-try1,kostya-sh/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,dmacd/FB-try1,hamiltont/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,denkab/FrameworkBenchmarks,grob/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,methane/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,denkab/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,doom369/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,joshk/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,testn/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sxend/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,leafo/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jamming/FrameworkBenchmarks,herloct/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,leafo/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,sgml/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,testn/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,grob/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,leafo/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zapov/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,denkab/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,sgml/FrameworkBenchmarks,grob/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zapov/FrameworkBenchmarks,grob/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,zloster/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,doom369/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,grob/FrameworkBenchmarks,zloster/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,valyala/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,methane/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,dmacd/FB-try1,Verber/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,herloct/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,khellang/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,torhve/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jamming/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zloster/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,sgml/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,testn/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,doom369/FrameworkBenchmarks,actframework/FrameworkBenchmarks,testn/FrameworkBenchmarks,denkab/FrameworkBenchmarks,valyala/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jamming/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,grob/FrameworkBenchmarks,torhve/FrameworkBenchmarks,valyala/FrameworkBenchmarks,testn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,methane/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,grob/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,dmacd/FB-try1,thousandsofthem/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zapov/FrameworkBenchmarks,khellang/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,torhve/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sxend/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,actframework/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,joshk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,doom369/FrameworkBenchmarks,joshk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,methane/FrameworkBenchmarks,torhve/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Verber/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,methane/FrameworkBenchmarks,testn/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,leafo/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,joshk/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,methane/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Verber/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,methane/FrameworkBenchmarks,actframework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,doom369/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sgml/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,torhve/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jamming/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jamming/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,dmacd/FB-try1,sxend/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,doom369/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,denkab/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,grob/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zloster/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,dmacd/FB-try1,actframework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,jamming/FrameworkBenchmarks,joshk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,joshk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,khellang/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,leafo/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Verber/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sxend/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zapov/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,testn/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,herloct/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,grob/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,dmacd/FB-try1,thousandsofthem/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sxend/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,actframework/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zloster/FrameworkBenchmarks,joshk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Verber/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,testn/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,actframework/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,methane/FrameworkBenchmarks,jamming/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,khellang/FrameworkBenchmarks,dmacd/FB-try1,RockinRoel/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,doom369/FrameworkBenchmarks,dmacd/FB-try1,victorbriz/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,testn/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,doom369/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,grob/FrameworkBenchmarks,jamming/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,torhve/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,valyala/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,jamming/FrameworkBenchmarks,dmacd/FB-try1,circlespainter/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,leafo/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,torhve/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Verber/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,actframework/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zloster/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,khellang/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,grob/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,torhve/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,joshk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,valyala/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,methane/FrameworkBenchmarks,denkab/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,testn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,methane/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,joshk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,doom369/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,methane/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sxend/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,leafo/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,testn/FrameworkBenchmarks,doom369/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,valyala/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sgml/FrameworkBenchmarks,actframework/FrameworkBenchmarks,valyala/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,torhve/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sxend/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,methane/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Verber/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,grob/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,valyala/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,leafo/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sxend/FrameworkBenchmarks,joshk/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,denkab/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,denkab/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,jamming/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,herloct/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jamming/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,valyala/FrameworkBenchmarks,herloct/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sgml/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sxend/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zapov/FrameworkBenchmarks,khellang/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,doom369/FrameworkBenchmarks,denkab/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Verber/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,denkab/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,testn/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Verber/FrameworkBenchmarks,herloct/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sxend/FrameworkBenchmarks
|
from django.db import models
# Create your models here.
class World(models.Model):
randomnumber = models.IntegerField()
class Meta:
db_table = 'world'
class Fortune(models.Model):
message = models.CharField(max_length=65535)
class Meta:
db_table = 'fortune'
Fix table name for MySQL
|
from django.db import models
# Create your models here.
class World(models.Model):
randomnumber = models.IntegerField()
class Meta:
db_table = 'World'
class Fortune(models.Model):
message = models.CharField(max_length=65535)
class Meta:
db_table = 'Fortune'
|
<commit_before>from django.db import models
# Create your models here.
class World(models.Model):
randomnumber = models.IntegerField()
class Meta:
db_table = 'world'
class Fortune(models.Model):
message = models.CharField(max_length=65535)
class Meta:
db_table = 'fortune'
<commit_msg>Fix table name for MySQL<commit_after>
|
from django.db import models
# Create your models here.
class World(models.Model):
randomnumber = models.IntegerField()
class Meta:
db_table = 'World'
class Fortune(models.Model):
message = models.CharField(max_length=65535)
class Meta:
db_table = 'Fortune'
|
from django.db import models
# Create your models here.
class World(models.Model):
randomnumber = models.IntegerField()
class Meta:
db_table = 'world'
class Fortune(models.Model):
message = models.CharField(max_length=65535)
class Meta:
db_table = 'fortune'
Fix table name for MySQLfrom django.db import models
# Create your models here.
class World(models.Model):
randomnumber = models.IntegerField()
class Meta:
db_table = 'World'
class Fortune(models.Model):
message = models.CharField(max_length=65535)
class Meta:
db_table = 'Fortune'
|
<commit_before>from django.db import models
# Create your models here.
class World(models.Model):
randomnumber = models.IntegerField()
class Meta:
db_table = 'world'
class Fortune(models.Model):
message = models.CharField(max_length=65535)
class Meta:
db_table = 'fortune'
<commit_msg>Fix table name for MySQL<commit_after>from django.db import models
# Create your models here.
class World(models.Model):
randomnumber = models.IntegerField()
class Meta:
db_table = 'World'
class Fortune(models.Model):
message = models.CharField(max_length=65535)
class Meta:
db_table = 'Fortune'
|
5a1ad6a2fdd0586517899b3f2ec3d27a00a5d2b1
|
databroker/intake_xarray_core/__init__.py
|
databroker/intake_xarray_core/__init__.py
|
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import intake # Import this first to avoid circular imports during discovery.
from .netcdf import NetCDFSource
from .opendap import OpenDapSource
from .raster import RasterIOSource
from .xzarr import ZarrSource
from .xarray_container import RemoteXarray
from .image import ImageSource
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
|
import intake # Import this first to avoid circular imports during discovery.
from .xarray_container import RemoteXarray
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
|
Remove imports of omitted modules.
|
Remove imports of omitted modules.
|
Python
|
bsd-3-clause
|
ericdill/databroker,ericdill/databroker
|
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import intake # Import this first to avoid circular imports during discovery.
from .netcdf import NetCDFSource
from .opendap import OpenDapSource
from .raster import RasterIOSource
from .xzarr import ZarrSource
from .xarray_container import RemoteXarray
from .image import ImageSource
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
Remove imports of omitted modules.
|
import intake # Import this first to avoid circular imports during discovery.
from .xarray_container import RemoteXarray
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
|
<commit_before>from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import intake # Import this first to avoid circular imports during discovery.
from .netcdf import NetCDFSource
from .opendap import OpenDapSource
from .raster import RasterIOSource
from .xzarr import ZarrSource
from .xarray_container import RemoteXarray
from .image import ImageSource
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
<commit_msg>Remove imports of omitted modules.<commit_after>
|
import intake # Import this first to avoid circular imports during discovery.
from .xarray_container import RemoteXarray
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
|
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import intake # Import this first to avoid circular imports during discovery.
from .netcdf import NetCDFSource
from .opendap import OpenDapSource
from .raster import RasterIOSource
from .xzarr import ZarrSource
from .xarray_container import RemoteXarray
from .image import ImageSource
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
Remove imports of omitted modules.import intake # Import this first to avoid circular imports during discovery.
from .xarray_container import RemoteXarray
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
|
<commit_before>from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
import intake # Import this first to avoid circular imports during discovery.
from .netcdf import NetCDFSource
from .opendap import OpenDapSource
from .raster import RasterIOSource
from .xzarr import ZarrSource
from .xarray_container import RemoteXarray
from .image import ImageSource
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
<commit_msg>Remove imports of omitted modules.<commit_after>import intake # Import this first to avoid circular imports during discovery.
from .xarray_container import RemoteXarray
import intake.container
intake.registry['remote-xarray'] = RemoteXarray
intake.container.container_map['xarray'] = RemoteXarray
|
68c256ef51f0e622dcfc92cb63bf4b0503fb61a8
|
common/templatetags/lutris.py
|
common/templatetags/lutris.py
|
import copy
from django import template
from django.conf import settings
from games import models
register = template.Library()
def get_links(user_agent):
systems = ['ubuntu', 'fedora', 'linux']
downloads = copy.copy(settings.DOWNLOADS)
main_download = None
for system in systems:
if system in user_agent:
main_download = {system: settings.DOWNLOADS[system]}
downloads.pop(system)
if not main_download:
main_download = {'linux': downloads.pop('linux')}
return (main_download, downloads)
@register.inclusion_tag('includes/download_links.html', takes_context=True)
def download_links(context):
request = context['request']
user_agent = request.META['HTTP_USER_AGENT'].lower()
context['main_download'], context['downloads'] = get_links(user_agent)
return context
@register.inclusion_tag('includes/featured_slider.html', takes_context=True)
def featured_slider(context):
context['featured_contents'] = models.Featured.objects.all()
return context
@register.inclusion_tag('includes/latest_games.html', takes_context=True)
def latest_games(context):
games = models.Game.objects.published().order_by('-created')[:5]
context['latest_games'] = games
return context
|
import copy
from django import template
from django.conf import settings
from games import models
register = template.Library()
def get_links(user_agent):
systems = ['ubuntu', 'fedora', 'linux']
downloads = copy.copy(settings.DOWNLOADS)
main_download = None
for system in systems:
if system in user_agent:
main_download = {system: settings.DOWNLOADS[system]}
downloads.pop(system)
if not main_download:
main_download = {'linux': downloads.pop('linux')}
return (main_download, downloads)
@register.inclusion_tag('includes/download_links.html', takes_context=True)
def download_links(context):
request = context['request']
user_agent = request.META.get('HTTP_USER_AGENT', '').lower()
context['main_download'], context['downloads'] = get_links(user_agent)
return context
@register.inclusion_tag('includes/featured_slider.html', takes_context=True)
def featured_slider(context):
context['featured_contents'] = models.Featured.objects.all()
return context
@register.inclusion_tag('includes/latest_games.html', takes_context=True)
def latest_games(context):
games = models.Game.objects.published().order_by('-created')[:5]
context['latest_games'] = games
return context
|
Make code compatible with no user agent
|
Make code compatible with no user agent
|
Python
|
agpl-3.0
|
lutris/website,Turupawn/website,Turupawn/website,lutris/website,lutris/website,Turupawn/website,Turupawn/website,lutris/website
|
import copy
from django import template
from django.conf import settings
from games import models
register = template.Library()
def get_links(user_agent):
systems = ['ubuntu', 'fedora', 'linux']
downloads = copy.copy(settings.DOWNLOADS)
main_download = None
for system in systems:
if system in user_agent:
main_download = {system: settings.DOWNLOADS[system]}
downloads.pop(system)
if not main_download:
main_download = {'linux': downloads.pop('linux')}
return (main_download, downloads)
@register.inclusion_tag('includes/download_links.html', takes_context=True)
def download_links(context):
request = context['request']
user_agent = request.META['HTTP_USER_AGENT'].lower()
context['main_download'], context['downloads'] = get_links(user_agent)
return context
@register.inclusion_tag('includes/featured_slider.html', takes_context=True)
def featured_slider(context):
context['featured_contents'] = models.Featured.objects.all()
return context
@register.inclusion_tag('includes/latest_games.html', takes_context=True)
def latest_games(context):
games = models.Game.objects.published().order_by('-created')[:5]
context['latest_games'] = games
return context
Make code compatible with no user agent
|
import copy
from django import template
from django.conf import settings
from games import models
register = template.Library()
def get_links(user_agent):
systems = ['ubuntu', 'fedora', 'linux']
downloads = copy.copy(settings.DOWNLOADS)
main_download = None
for system in systems:
if system in user_agent:
main_download = {system: settings.DOWNLOADS[system]}
downloads.pop(system)
if not main_download:
main_download = {'linux': downloads.pop('linux')}
return (main_download, downloads)
@register.inclusion_tag('includes/download_links.html', takes_context=True)
def download_links(context):
request = context['request']
user_agent = request.META.get('HTTP_USER_AGENT', '').lower()
context['main_download'], context['downloads'] = get_links(user_agent)
return context
@register.inclusion_tag('includes/featured_slider.html', takes_context=True)
def featured_slider(context):
context['featured_contents'] = models.Featured.objects.all()
return context
@register.inclusion_tag('includes/latest_games.html', takes_context=True)
def latest_games(context):
games = models.Game.objects.published().order_by('-created')[:5]
context['latest_games'] = games
return context
|
<commit_before>import copy
from django import template
from django.conf import settings
from games import models
register = template.Library()
def get_links(user_agent):
systems = ['ubuntu', 'fedora', 'linux']
downloads = copy.copy(settings.DOWNLOADS)
main_download = None
for system in systems:
if system in user_agent:
main_download = {system: settings.DOWNLOADS[system]}
downloads.pop(system)
if not main_download:
main_download = {'linux': downloads.pop('linux')}
return (main_download, downloads)
@register.inclusion_tag('includes/download_links.html', takes_context=True)
def download_links(context):
request = context['request']
user_agent = request.META['HTTP_USER_AGENT'].lower()
context['main_download'], context['downloads'] = get_links(user_agent)
return context
@register.inclusion_tag('includes/featured_slider.html', takes_context=True)
def featured_slider(context):
context['featured_contents'] = models.Featured.objects.all()
return context
@register.inclusion_tag('includes/latest_games.html', takes_context=True)
def latest_games(context):
games = models.Game.objects.published().order_by('-created')[:5]
context['latest_games'] = games
return context
<commit_msg>Make code compatible with no user agent<commit_after>
|
import copy
from django import template
from django.conf import settings
from games import models
register = template.Library()
def get_links(user_agent):
systems = ['ubuntu', 'fedora', 'linux']
downloads = copy.copy(settings.DOWNLOADS)
main_download = None
for system in systems:
if system in user_agent:
main_download = {system: settings.DOWNLOADS[system]}
downloads.pop(system)
if not main_download:
main_download = {'linux': downloads.pop('linux')}
return (main_download, downloads)
@register.inclusion_tag('includes/download_links.html', takes_context=True)
def download_links(context):
request = context['request']
user_agent = request.META.get('HTTP_USER_AGENT', '').lower()
context['main_download'], context['downloads'] = get_links(user_agent)
return context
@register.inclusion_tag('includes/featured_slider.html', takes_context=True)
def featured_slider(context):
context['featured_contents'] = models.Featured.objects.all()
return context
@register.inclusion_tag('includes/latest_games.html', takes_context=True)
def latest_games(context):
games = models.Game.objects.published().order_by('-created')[:5]
context['latest_games'] = games
return context
|
import copy
from django import template
from django.conf import settings
from games import models
register = template.Library()
def get_links(user_agent):
systems = ['ubuntu', 'fedora', 'linux']
downloads = copy.copy(settings.DOWNLOADS)
main_download = None
for system in systems:
if system in user_agent:
main_download = {system: settings.DOWNLOADS[system]}
downloads.pop(system)
if not main_download:
main_download = {'linux': downloads.pop('linux')}
return (main_download, downloads)
@register.inclusion_tag('includes/download_links.html', takes_context=True)
def download_links(context):
request = context['request']
user_agent = request.META['HTTP_USER_AGENT'].lower()
context['main_download'], context['downloads'] = get_links(user_agent)
return context
@register.inclusion_tag('includes/featured_slider.html', takes_context=True)
def featured_slider(context):
context['featured_contents'] = models.Featured.objects.all()
return context
@register.inclusion_tag('includes/latest_games.html', takes_context=True)
def latest_games(context):
games = models.Game.objects.published().order_by('-created')[:5]
context['latest_games'] = games
return context
Make code compatible with no user agentimport copy
from django import template
from django.conf import settings
from games import models
register = template.Library()
def get_links(user_agent):
systems = ['ubuntu', 'fedora', 'linux']
downloads = copy.copy(settings.DOWNLOADS)
main_download = None
for system in systems:
if system in user_agent:
main_download = {system: settings.DOWNLOADS[system]}
downloads.pop(system)
if not main_download:
main_download = {'linux': downloads.pop('linux')}
return (main_download, downloads)
@register.inclusion_tag('includes/download_links.html', takes_context=True)
def download_links(context):
request = context['request']
user_agent = request.META.get('HTTP_USER_AGENT', '').lower()
context['main_download'], context['downloads'] = get_links(user_agent)
return context
@register.inclusion_tag('includes/featured_slider.html', takes_context=True)
def featured_slider(context):
context['featured_contents'] = models.Featured.objects.all()
return context
@register.inclusion_tag('includes/latest_games.html', takes_context=True)
def latest_games(context):
games = models.Game.objects.published().order_by('-created')[:5]
context['latest_games'] = games
return context
|
<commit_before>import copy
from django import template
from django.conf import settings
from games import models
register = template.Library()
def get_links(user_agent):
systems = ['ubuntu', 'fedora', 'linux']
downloads = copy.copy(settings.DOWNLOADS)
main_download = None
for system in systems:
if system in user_agent:
main_download = {system: settings.DOWNLOADS[system]}
downloads.pop(system)
if not main_download:
main_download = {'linux': downloads.pop('linux')}
return (main_download, downloads)
@register.inclusion_tag('includes/download_links.html', takes_context=True)
def download_links(context):
request = context['request']
user_agent = request.META['HTTP_USER_AGENT'].lower()
context['main_download'], context['downloads'] = get_links(user_agent)
return context
@register.inclusion_tag('includes/featured_slider.html', takes_context=True)
def featured_slider(context):
context['featured_contents'] = models.Featured.objects.all()
return context
@register.inclusion_tag('includes/latest_games.html', takes_context=True)
def latest_games(context):
games = models.Game.objects.published().order_by('-created')[:5]
context['latest_games'] = games
return context
<commit_msg>Make code compatible with no user agent<commit_after>import copy
from django import template
from django.conf import settings
from games import models
register = template.Library()
def get_links(user_agent):
systems = ['ubuntu', 'fedora', 'linux']
downloads = copy.copy(settings.DOWNLOADS)
main_download = None
for system in systems:
if system in user_agent:
main_download = {system: settings.DOWNLOADS[system]}
downloads.pop(system)
if not main_download:
main_download = {'linux': downloads.pop('linux')}
return (main_download, downloads)
@register.inclusion_tag('includes/download_links.html', takes_context=True)
def download_links(context):
request = context['request']
user_agent = request.META.get('HTTP_USER_AGENT', '').lower()
context['main_download'], context['downloads'] = get_links(user_agent)
return context
@register.inclusion_tag('includes/featured_slider.html', takes_context=True)
def featured_slider(context):
context['featured_contents'] = models.Featured.objects.all()
return context
@register.inclusion_tag('includes/latest_games.html', takes_context=True)
def latest_games(context):
games = models.Game.objects.published().order_by('-created')[:5]
context['latest_games'] = games
return context
|
ed542ea8979882e7cc245aee7e3c4a6cb6235a5f
|
HARK/tests/test_validators.py
|
HARK/tests/test_validators.py
|
import unittest, sys
from HARK.validators import non_empty
class ValidatorsTests(unittest.TestCase):
'''
Tests for validator decorators which validate function arguments
'''
def test_non_empty(self):
@non_empty('list_a')
def foo(list_a, list_b):
pass
try:
foo([1], [])
except Exception:
self.fail()
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
@non_empty('list_a', 'list_b')
def foo(list_a, list_b):
pass
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
|
import unittest, sys
from HARK.validators import non_empty
class ValidatorsTests(unittest.TestCase):
'''
Tests for validator decorators which validate function arguments
'''
def test_non_empty(self):
@non_empty('list_a')
def foo(list_a, list_b):
pass
try:
foo([1], [])
except Exception:
self.fail()
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
@non_empty('list_a', 'list_b')
def foo(list_a, list_b):
pass
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
|
Fix other tests with same regexp issue
|
Fix other tests with same regexp issue
|
Python
|
apache-2.0
|
econ-ark/HARK,econ-ark/HARK
|
import unittest, sys
from HARK.validators import non_empty
class ValidatorsTests(unittest.TestCase):
'''
Tests for validator decorators which validate function arguments
'''
def test_non_empty(self):
@non_empty('list_a')
def foo(list_a, list_b):
pass
try:
foo([1], [])
except Exception:
self.fail()
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
@non_empty('list_a', 'list_b')
def foo(list_a, list_b):
pass
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
Fix other tests with same regexp issue
|
import unittest, sys
from HARK.validators import non_empty
class ValidatorsTests(unittest.TestCase):
'''
Tests for validator decorators which validate function arguments
'''
def test_non_empty(self):
@non_empty('list_a')
def foo(list_a, list_b):
pass
try:
foo([1], [])
except Exception:
self.fail()
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
@non_empty('list_a', 'list_b')
def foo(list_a, list_b):
pass
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
|
<commit_before>import unittest, sys
from HARK.validators import non_empty
class ValidatorsTests(unittest.TestCase):
'''
Tests for validator decorators which validate function arguments
'''
def test_non_empty(self):
@non_empty('list_a')
def foo(list_a, list_b):
pass
try:
foo([1], [])
except Exception:
self.fail()
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
@non_empty('list_a', 'list_b')
def foo(list_a, list_b):
pass
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
<commit_msg>Fix other tests with same regexp issue<commit_after>
|
import unittest, sys
from HARK.validators import non_empty
class ValidatorsTests(unittest.TestCase):
'''
Tests for validator decorators which validate function arguments
'''
def test_non_empty(self):
@non_empty('list_a')
def foo(list_a, list_b):
pass
try:
foo([1], [])
except Exception:
self.fail()
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
@non_empty('list_a', 'list_b')
def foo(list_a, list_b):
pass
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
|
import unittest, sys
from HARK.validators import non_empty
class ValidatorsTests(unittest.TestCase):
'''
Tests for validator decorators which validate function arguments
'''
def test_non_empty(self):
@non_empty('list_a')
def foo(list_a, list_b):
pass
try:
foo([1], [])
except Exception:
self.fail()
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
@non_empty('list_a', 'list_b')
def foo(list_a, list_b):
pass
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
Fix other tests with same regexp issueimport unittest, sys
from HARK.validators import non_empty
class ValidatorsTests(unittest.TestCase):
'''
Tests for validator decorators which validate function arguments
'''
def test_non_empty(self):
@non_empty('list_a')
def foo(list_a, list_b):
pass
try:
foo([1], [])
except Exception:
self.fail()
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
@non_empty('list_a', 'list_b')
def foo(list_a, list_b):
pass
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
|
<commit_before>import unittest, sys
from HARK.validators import non_empty
class ValidatorsTests(unittest.TestCase):
'''
Tests for validator decorators which validate function arguments
'''
def test_non_empty(self):
@non_empty('list_a')
def foo(list_a, list_b):
pass
try:
foo([1], [])
except Exception:
self.fail()
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
@non_empty('list_a', 'list_b')
def foo(list_a, list_b):
pass
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
<commit_msg>Fix other tests with same regexp issue<commit_after>import unittest, sys
from HARK.validators import non_empty
class ValidatorsTests(unittest.TestCase):
'''
Tests for validator decorators which validate function arguments
'''
def test_non_empty(self):
@non_empty('list_a')
def foo(list_a, list_b):
pass
try:
foo([1], [])
except Exception:
self.fail()
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
@non_empty('list_a', 'list_b')
def foo(list_a, list_b):
pass
if sys.version[0] == '2':
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegexp(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
else:
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_b',
):
foo([1], [])
with self.assertRaisesRegex(
TypeError,
'Expected non-empty argument for parameter list_a',
):
foo([], [1])
|
7623966ac3962dfe871638b6804e056fa794ea60
|
api/webscripts/show_summary.py
|
api/webscripts/show_summary.py
|
from django import forms
from webscript import WebScript
from django.template.loader import render_to_string
import amcat.scripts.forms
import amcat.forms
from amcat.tools import keywordsearch
from amcat.scripts import script
#from amcat.scripts.searchscripts.articlelist import ArticleListScript, ArticleListSpecificForm
from amcat.scripts.searchscripts.articlelist import ArticleListScript
class ShowSummary(WebScript):
name = "Summary"
form_template = None
form = None
def run(self):
self.progress_monitor.update(1, "Creating summary")
if isinstance(self.data['projects'], (basestring, int)):
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
n = keywordsearch.get_total_n(self.data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
a.hack_project_id = project_id
self.output_template = 'api/webscripts/articlelist.html'
self.progress_monitor.update(40, "Created summary")
return self.outputResponse(dict(articlelist=articles, n=n, page=self.data.get('start')), ArticleListScript.output_type)
|
from webscript import WebScript
from amcat.tools import keywordsearch
from amcat.scripts.searchscripts.articlelist import ArticleListScript
from amcat.scripts.forms import SelectionForm
class ShowSummary(WebScript):
name = "Summary"
form_template = None
form = None
def run(self):
self.progress_monitor.update(1, "Creating summary")
if isinstance(self.data['projects'], (basestring, int)):
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
sf = SelectionForm(self.project, self.data)
sf.full_clean()
n = keywordsearch.get_total_n(sf.cleaned_data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
a.hack_project_id = project_id
self.output_template = 'api/webscripts/articlelist.html'
self.progress_monitor.update(40, "Created summary")
return self.outputResponse(dict(articlelist=articles, n=n, page=self.data.get('start')), ArticleListScript.output_type)
|
Clean user data before passing it to keywordsearch.get_total_n()
|
Clean user data before passing it to keywordsearch.get_total_n()
|
Python
|
agpl-3.0
|
amcat/amcat,tschmorleiz/amcat,tschmorleiz/amcat,tschmorleiz/amcat,amcat/amcat,amcat/amcat,tschmorleiz/amcat,amcat/amcat,tschmorleiz/amcat,amcat/amcat,amcat/amcat
|
from django import forms
from webscript import WebScript
from django.template.loader import render_to_string
import amcat.scripts.forms
import amcat.forms
from amcat.tools import keywordsearch
from amcat.scripts import script
#from amcat.scripts.searchscripts.articlelist import ArticleListScript, ArticleListSpecificForm
from amcat.scripts.searchscripts.articlelist import ArticleListScript
class ShowSummary(WebScript):
name = "Summary"
form_template = None
form = None
def run(self):
self.progress_monitor.update(1, "Creating summary")
if isinstance(self.data['projects'], (basestring, int)):
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
n = keywordsearch.get_total_n(self.data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
a.hack_project_id = project_id
self.output_template = 'api/webscripts/articlelist.html'
self.progress_monitor.update(40, "Created summary")
return self.outputResponse(dict(articlelist=articles, n=n, page=self.data.get('start')), ArticleListScript.output_type)
Clean user data before passing it to keywordsearch.get_total_n()
|
from webscript import WebScript
from amcat.tools import keywordsearch
from amcat.scripts.searchscripts.articlelist import ArticleListScript
from amcat.scripts.forms import SelectionForm
class ShowSummary(WebScript):
name = "Summary"
form_template = None
form = None
def run(self):
self.progress_monitor.update(1, "Creating summary")
if isinstance(self.data['projects'], (basestring, int)):
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
sf = SelectionForm(self.project, self.data)
sf.full_clean()
n = keywordsearch.get_total_n(sf.cleaned_data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
a.hack_project_id = project_id
self.output_template = 'api/webscripts/articlelist.html'
self.progress_monitor.update(40, "Created summary")
return self.outputResponse(dict(articlelist=articles, n=n, page=self.data.get('start')), ArticleListScript.output_type)
|
<commit_before>from django import forms
from webscript import WebScript
from django.template.loader import render_to_string
import amcat.scripts.forms
import amcat.forms
from amcat.tools import keywordsearch
from amcat.scripts import script
#from amcat.scripts.searchscripts.articlelist import ArticleListScript, ArticleListSpecificForm
from amcat.scripts.searchscripts.articlelist import ArticleListScript
class ShowSummary(WebScript):
name = "Summary"
form_template = None
form = None
def run(self):
self.progress_monitor.update(1, "Creating summary")
if isinstance(self.data['projects'], (basestring, int)):
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
n = keywordsearch.get_total_n(self.data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
a.hack_project_id = project_id
self.output_template = 'api/webscripts/articlelist.html'
self.progress_monitor.update(40, "Created summary")
return self.outputResponse(dict(articlelist=articles, n=n, page=self.data.get('start')), ArticleListScript.output_type)
<commit_msg>Clean user data before passing it to keywordsearch.get_total_n()<commit_after>
|
from webscript import WebScript
from amcat.tools import keywordsearch
from amcat.scripts.searchscripts.articlelist import ArticleListScript
from amcat.scripts.forms import SelectionForm
class ShowSummary(WebScript):
name = "Summary"
form_template = None
form = None
def run(self):
self.progress_monitor.update(1, "Creating summary")
if isinstance(self.data['projects'], (basestring, int)):
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
sf = SelectionForm(self.project, self.data)
sf.full_clean()
n = keywordsearch.get_total_n(sf.cleaned_data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
a.hack_project_id = project_id
self.output_template = 'api/webscripts/articlelist.html'
self.progress_monitor.update(40, "Created summary")
return self.outputResponse(dict(articlelist=articles, n=n, page=self.data.get('start')), ArticleListScript.output_type)
|
from django import forms
from webscript import WebScript
from django.template.loader import render_to_string
import amcat.scripts.forms
import amcat.forms
from amcat.tools import keywordsearch
from amcat.scripts import script
#from amcat.scripts.searchscripts.articlelist import ArticleListScript, ArticleListSpecificForm
from amcat.scripts.searchscripts.articlelist import ArticleListScript
class ShowSummary(WebScript):
name = "Summary"
form_template = None
form = None
def run(self):
self.progress_monitor.update(1, "Creating summary")
if isinstance(self.data['projects'], (basestring, int)):
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
n = keywordsearch.get_total_n(self.data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
a.hack_project_id = project_id
self.output_template = 'api/webscripts/articlelist.html'
self.progress_monitor.update(40, "Created summary")
return self.outputResponse(dict(articlelist=articles, n=n, page=self.data.get('start')), ArticleListScript.output_type)
Clean user data before passing it to keywordsearch.get_total_n()from webscript import WebScript
from amcat.tools import keywordsearch
from amcat.scripts.searchscripts.articlelist import ArticleListScript
from amcat.scripts.forms import SelectionForm
class ShowSummary(WebScript):
name = "Summary"
form_template = None
form = None
def run(self):
self.progress_monitor.update(1, "Creating summary")
if isinstance(self.data['projects'], (basestring, int)):
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
sf = SelectionForm(self.project, self.data)
sf.full_clean()
n = keywordsearch.get_total_n(sf.cleaned_data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
a.hack_project_id = project_id
self.output_template = 'api/webscripts/articlelist.html'
self.progress_monitor.update(40, "Created summary")
return self.outputResponse(dict(articlelist=articles, n=n, page=self.data.get('start')), ArticleListScript.output_type)
|
<commit_before>from django import forms
from webscript import WebScript
from django.template.loader import render_to_string
import amcat.scripts.forms
import amcat.forms
from amcat.tools import keywordsearch
from amcat.scripts import script
#from amcat.scripts.searchscripts.articlelist import ArticleListScript, ArticleListSpecificForm
from amcat.scripts.searchscripts.articlelist import ArticleListScript
class ShowSummary(WebScript):
name = "Summary"
form_template = None
form = None
def run(self):
self.progress_monitor.update(1, "Creating summary")
if isinstance(self.data['projects'], (basestring, int)):
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
n = keywordsearch.get_total_n(self.data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
a.hack_project_id = project_id
self.output_template = 'api/webscripts/articlelist.html'
self.progress_monitor.update(40, "Created summary")
return self.outputResponse(dict(articlelist=articles, n=n, page=self.data.get('start')), ArticleListScript.output_type)
<commit_msg>Clean user data before passing it to keywordsearch.get_total_n()<commit_after>from webscript import WebScript
from amcat.tools import keywordsearch
from amcat.scripts.searchscripts.articlelist import ArticleListScript
from amcat.scripts.forms import SelectionForm
class ShowSummary(WebScript):
name = "Summary"
form_template = None
form = None
def run(self):
self.progress_monitor.update(1, "Creating summary")
if isinstance(self.data['projects'], (basestring, int)):
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
sf = SelectionForm(self.project, self.data)
sf.full_clean()
n = keywordsearch.get_total_n(sf.cleaned_data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
a.hack_project_id = project_id
self.output_template = 'api/webscripts/articlelist.html'
self.progress_monitor.update(40, "Created summary")
return self.outputResponse(dict(articlelist=articles, n=n, page=self.data.get('start')), ArticleListScript.output_type)
|
b7fd2af25423847236b5d382aeb829b00c556485
|
alertaclient/auth/oidc.py
|
alertaclient/auth/oidc.py
|
import webbrowser
from uuid import uuid4
from alertaclient.auth.token import TokenHandler
def login(client, oidc_auth_url, client_id):
xsrf_token = str(uuid4())
redirect_uri = 'http://127.0.0.1:9004'
url = (
'{oidc_auth_url}?'
'response_type=code'
'&client_id={client_id}'
'&redirect_uri={redirect_uri}'
'&scope=openid%20profile%20email'
'&state={state}'
).format(
oidc_auth_url=oidc_auth_url,
client_id=client_id,
redirect_uri=redirect_uri,
state=xsrf_token
)
webbrowser.open(url, new=0, autoraise=True)
auth = TokenHandler()
access_token = auth.get_access_token(xsrf_token)
data = {
'code': access_token,
'clientId': client_id,
'redirectUri': redirect_uri
}
return client.token('openid', data)
|
import webbrowser
from uuid import uuid4
from alertaclient.auth.token import TokenHandler
def login(client, oidc_auth_url, client_id):
xsrf_token = str(uuid4())
redirect_uri = 'http://localhost:9004' # azure only supports 'localhost'
url = (
'{oidc_auth_url}?'
'response_type=code'
'&client_id={client_id}'
'&redirect_uri={redirect_uri}'
'&scope=openid%20profile%20email'
'&state={state}'
).format(
oidc_auth_url=oidc_auth_url,
client_id=client_id,
redirect_uri=redirect_uri,
state=xsrf_token
)
webbrowser.open(url, new=0, autoraise=True)
auth = TokenHandler()
access_token = auth.get_access_token(xsrf_token)
data = {
'code': access_token,
'clientId': client_id,
'redirectUri': redirect_uri
}
return client.token('openid', data)
|
Use localhost instead of 127.0.0.1
|
Use localhost instead of 127.0.0.1
|
Python
|
apache-2.0
|
alerta/python-alerta,alerta/python-alerta-client,alerta/python-alerta-client
|
import webbrowser
from uuid import uuid4
from alertaclient.auth.token import TokenHandler
def login(client, oidc_auth_url, client_id):
xsrf_token = str(uuid4())
redirect_uri = 'http://127.0.0.1:9004'
url = (
'{oidc_auth_url}?'
'response_type=code'
'&client_id={client_id}'
'&redirect_uri={redirect_uri}'
'&scope=openid%20profile%20email'
'&state={state}'
).format(
oidc_auth_url=oidc_auth_url,
client_id=client_id,
redirect_uri=redirect_uri,
state=xsrf_token
)
webbrowser.open(url, new=0, autoraise=True)
auth = TokenHandler()
access_token = auth.get_access_token(xsrf_token)
data = {
'code': access_token,
'clientId': client_id,
'redirectUri': redirect_uri
}
return client.token('openid', data)
Use localhost instead of 127.0.0.1
|
import webbrowser
from uuid import uuid4
from alertaclient.auth.token import TokenHandler
def login(client, oidc_auth_url, client_id):
xsrf_token = str(uuid4())
redirect_uri = 'http://localhost:9004' # azure only supports 'localhost'
url = (
'{oidc_auth_url}?'
'response_type=code'
'&client_id={client_id}'
'&redirect_uri={redirect_uri}'
'&scope=openid%20profile%20email'
'&state={state}'
).format(
oidc_auth_url=oidc_auth_url,
client_id=client_id,
redirect_uri=redirect_uri,
state=xsrf_token
)
webbrowser.open(url, new=0, autoraise=True)
auth = TokenHandler()
access_token = auth.get_access_token(xsrf_token)
data = {
'code': access_token,
'clientId': client_id,
'redirectUri': redirect_uri
}
return client.token('openid', data)
|
<commit_before>
import webbrowser
from uuid import uuid4
from alertaclient.auth.token import TokenHandler
def login(client, oidc_auth_url, client_id):
xsrf_token = str(uuid4())
redirect_uri = 'http://127.0.0.1:9004'
url = (
'{oidc_auth_url}?'
'response_type=code'
'&client_id={client_id}'
'&redirect_uri={redirect_uri}'
'&scope=openid%20profile%20email'
'&state={state}'
).format(
oidc_auth_url=oidc_auth_url,
client_id=client_id,
redirect_uri=redirect_uri,
state=xsrf_token
)
webbrowser.open(url, new=0, autoraise=True)
auth = TokenHandler()
access_token = auth.get_access_token(xsrf_token)
data = {
'code': access_token,
'clientId': client_id,
'redirectUri': redirect_uri
}
return client.token('openid', data)
<commit_msg>Use localhost instead of 127.0.0.1<commit_after>
|
import webbrowser
from uuid import uuid4
from alertaclient.auth.token import TokenHandler
def login(client, oidc_auth_url, client_id):
xsrf_token = str(uuid4())
redirect_uri = 'http://localhost:9004' # azure only supports 'localhost'
url = (
'{oidc_auth_url}?'
'response_type=code'
'&client_id={client_id}'
'&redirect_uri={redirect_uri}'
'&scope=openid%20profile%20email'
'&state={state}'
).format(
oidc_auth_url=oidc_auth_url,
client_id=client_id,
redirect_uri=redirect_uri,
state=xsrf_token
)
webbrowser.open(url, new=0, autoraise=True)
auth = TokenHandler()
access_token = auth.get_access_token(xsrf_token)
data = {
'code': access_token,
'clientId': client_id,
'redirectUri': redirect_uri
}
return client.token('openid', data)
|
import webbrowser
from uuid import uuid4
from alertaclient.auth.token import TokenHandler
def login(client, oidc_auth_url, client_id):
xsrf_token = str(uuid4())
redirect_uri = 'http://127.0.0.1:9004'
url = (
'{oidc_auth_url}?'
'response_type=code'
'&client_id={client_id}'
'&redirect_uri={redirect_uri}'
'&scope=openid%20profile%20email'
'&state={state}'
).format(
oidc_auth_url=oidc_auth_url,
client_id=client_id,
redirect_uri=redirect_uri,
state=xsrf_token
)
webbrowser.open(url, new=0, autoraise=True)
auth = TokenHandler()
access_token = auth.get_access_token(xsrf_token)
data = {
'code': access_token,
'clientId': client_id,
'redirectUri': redirect_uri
}
return client.token('openid', data)
Use localhost instead of 127.0.0.1
import webbrowser
from uuid import uuid4
from alertaclient.auth.token import TokenHandler
def login(client, oidc_auth_url, client_id):
xsrf_token = str(uuid4())
redirect_uri = 'http://localhost:9004' # azure only supports 'localhost'
url = (
'{oidc_auth_url}?'
'response_type=code'
'&client_id={client_id}'
'&redirect_uri={redirect_uri}'
'&scope=openid%20profile%20email'
'&state={state}'
).format(
oidc_auth_url=oidc_auth_url,
client_id=client_id,
redirect_uri=redirect_uri,
state=xsrf_token
)
webbrowser.open(url, new=0, autoraise=True)
auth = TokenHandler()
access_token = auth.get_access_token(xsrf_token)
data = {
'code': access_token,
'clientId': client_id,
'redirectUri': redirect_uri
}
return client.token('openid', data)
|
<commit_before>
import webbrowser
from uuid import uuid4
from alertaclient.auth.token import TokenHandler
def login(client, oidc_auth_url, client_id):
xsrf_token = str(uuid4())
redirect_uri = 'http://127.0.0.1:9004'
url = (
'{oidc_auth_url}?'
'response_type=code'
'&client_id={client_id}'
'&redirect_uri={redirect_uri}'
'&scope=openid%20profile%20email'
'&state={state}'
).format(
oidc_auth_url=oidc_auth_url,
client_id=client_id,
redirect_uri=redirect_uri,
state=xsrf_token
)
webbrowser.open(url, new=0, autoraise=True)
auth = TokenHandler()
access_token = auth.get_access_token(xsrf_token)
data = {
'code': access_token,
'clientId': client_id,
'redirectUri': redirect_uri
}
return client.token('openid', data)
<commit_msg>Use localhost instead of 127.0.0.1<commit_after>
import webbrowser
from uuid import uuid4
from alertaclient.auth.token import TokenHandler
def login(client, oidc_auth_url, client_id):
xsrf_token = str(uuid4())
redirect_uri = 'http://localhost:9004' # azure only supports 'localhost'
url = (
'{oidc_auth_url}?'
'response_type=code'
'&client_id={client_id}'
'&redirect_uri={redirect_uri}'
'&scope=openid%20profile%20email'
'&state={state}'
).format(
oidc_auth_url=oidc_auth_url,
client_id=client_id,
redirect_uri=redirect_uri,
state=xsrf_token
)
webbrowser.open(url, new=0, autoraise=True)
auth = TokenHandler()
access_token = auth.get_access_token(xsrf_token)
data = {
'code': access_token,
'clientId': client_id,
'redirectUri': redirect_uri
}
return client.token('openid', data)
|
09cd2fb49950d654b6c30cb250f1f8acac39fc23
|
accelerator/migrations/0074_update_url_to_community.py
|
accelerator/migrations/0074_update_url_to_community.py
|
# Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people/"]
mentor_url = "/directory"
community_url = "/community"
mentor_refinement_url = ("/directory/?refinementList%5B"
"home_program_family%5D%5B0%5D=Israel")
community_refinement_url = ("/community/?refinementList%5B"
"program_family_names%5D%5B0%5D=Israel")
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
SiteRedirectPage.objects.filter(
new_url=mentor_refinement_url
).update(new_url=community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
|
# Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people/"]
mentor_url = "/directory"
community_url = "/community"
mentor_refinement_url = ("/directory/?refinementList%5B"
"home_program_family%5D%5B0%5D=Israel")
community_refinement_url = ("/community/?refinementList%5B"
"program_family_names%5D%5B0%5D=Israel")
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
SiteRedirectPage.objects.filter(
new_url=mentor_refinement_url
).update(new_url=community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
|
Remove unused import and fix linting issues
|
[AC-9046] Remove unused import and fix linting issues
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
# Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people/"]
mentor_url = "/directory"
community_url = "/community"
mentor_refinement_url = ("/directory/?refinementList%5B"
"home_program_family%5D%5B0%5D=Israel")
community_refinement_url = ("/community/?refinementList%5B"
"program_family_names%5D%5B0%5D=Israel")
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
SiteRedirectPage.objects.filter(
new_url=mentor_refinement_url
).update(new_url=community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
[AC-9046] Remove unused import and fix linting issues
|
# Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people/"]
mentor_url = "/directory"
community_url = "/community"
mentor_refinement_url = ("/directory/?refinementList%5B"
"home_program_family%5D%5B0%5D=Israel")
community_refinement_url = ("/community/?refinementList%5B"
"program_family_names%5D%5B0%5D=Israel")
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
SiteRedirectPage.objects.filter(
new_url=mentor_refinement_url
).update(new_url=community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
|
<commit_before># Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people/"]
mentor_url = "/directory"
community_url = "/community"
mentor_refinement_url = ("/directory/?refinementList%5B"
"home_program_family%5D%5B0%5D=Israel")
community_refinement_url = ("/community/?refinementList%5B"
"program_family_names%5D%5B0%5D=Israel")
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
SiteRedirectPage.objects.filter(
new_url=mentor_refinement_url
).update(new_url=community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
<commit_msg>[AC-9046] Remove unused import and fix linting issues<commit_after>
|
# Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people/"]
mentor_url = "/directory"
community_url = "/community"
mentor_refinement_url = ("/directory/?refinementList%5B"
"home_program_family%5D%5B0%5D=Israel")
community_refinement_url = ("/community/?refinementList%5B"
"program_family_names%5D%5B0%5D=Israel")
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
SiteRedirectPage.objects.filter(
new_url=mentor_refinement_url
).update(new_url=community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
|
# Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people/"]
mentor_url = "/directory"
community_url = "/community"
mentor_refinement_url = ("/directory/?refinementList%5B"
"home_program_family%5D%5B0%5D=Israel")
community_refinement_url = ("/community/?refinementList%5B"
"program_family_names%5D%5B0%5D=Israel")
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
SiteRedirectPage.objects.filter(
new_url=mentor_refinement_url
).update(new_url=community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
[AC-9046] Remove unused import and fix linting issues# Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people/"]
mentor_url = "/directory"
community_url = "/community"
mentor_refinement_url = ("/directory/?refinementList%5B"
"home_program_family%5D%5B0%5D=Israel")
community_refinement_url = ("/community/?refinementList%5B"
"program_family_names%5D%5B0%5D=Israel")
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
SiteRedirectPage.objects.filter(
new_url=mentor_refinement_url
).update(new_url=community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
|
<commit_before># Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people/"]
mentor_url = "/directory"
community_url = "/community"
mentor_refinement_url = ("/directory/?refinementList%5B"
"home_program_family%5D%5B0%5D=Israel")
community_refinement_url = ("/community/?refinementList%5B"
"program_family_names%5D%5B0%5D=Israel")
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
SiteRedirectPage.objects.filter(
new_url=mentor_refinement_url
).update(new_url=community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
<commit_msg>[AC-9046] Remove unused import and fix linting issues<commit_after># Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people/"]
mentor_url = "/directory"
community_url = "/community"
mentor_refinement_url = ("/directory/?refinementList%5B"
"home_program_family%5D%5B0%5D=Israel")
community_refinement_url = ("/community/?refinementList%5B"
"program_family_names%5D%5B0%5D=Israel")
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
SiteRedirectPage.objects.filter(
new_url=mentor_refinement_url
).update(new_url=community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
|
b3011d19e937694bca44a5677a12811188577084
|
docker_xylem/compat.py
|
docker_xylem/compat.py
|
"""
Very limited reimplementation of some of `twisted.logger.Logger`'s public
API so we can use older Twisted versions that don't have the new logging
features.
"""
try:
from twisted.logger import Logger
except ImportError:
import logging
from twisted.python import log
class Logger(object):
def info(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.INFO)
def warn(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.WARNING)
|
"""
Very limited reimplementation of some of `twisted.logger.Logger`'s public
API so we can use older Twisted versions that don't have the new logging
features.
"""
try:
from twisted.logger import Logger
except ImportError:
import logging
from twisted.python import log
class Logger(object):
def debug(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.DEBUG)
def info(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.INFO)
def warn(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.WARNING)
def error(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.ERROR)
def critical(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.CRITICAL)
|
Add some more logging methods. (@JayH5)
|
Add some more logging methods. (@JayH5)
|
Python
|
mit
|
praekeltfoundation/docker-xylem,praekeltfoundation/docker-xylem
|
"""
Very limited reimplementation of some of `twisted.logger.Logger`'s public
API so we can use older Twisted versions that don't have the new logging
features.
"""
try:
from twisted.logger import Logger
except ImportError:
import logging
from twisted.python import log
class Logger(object):
def info(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.INFO)
def warn(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.WARNING)
Add some more logging methods. (@JayH5)
|
"""
Very limited reimplementation of some of `twisted.logger.Logger`'s public
API so we can use older Twisted versions that don't have the new logging
features.
"""
try:
from twisted.logger import Logger
except ImportError:
import logging
from twisted.python import log
class Logger(object):
def debug(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.DEBUG)
def info(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.INFO)
def warn(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.WARNING)
def error(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.ERROR)
def critical(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.CRITICAL)
|
<commit_before>"""
Very limited reimplementation of some of `twisted.logger.Logger`'s public
API so we can use older Twisted versions that don't have the new logging
features.
"""
try:
from twisted.logger import Logger
except ImportError:
import logging
from twisted.python import log
class Logger(object):
def info(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.INFO)
def warn(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.WARNING)
<commit_msg>Add some more logging methods. (@JayH5)<commit_after>
|
"""
Very limited reimplementation of some of `twisted.logger.Logger`'s public
API so we can use older Twisted versions that don't have the new logging
features.
"""
try:
from twisted.logger import Logger
except ImportError:
import logging
from twisted.python import log
class Logger(object):
def debug(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.DEBUG)
def info(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.INFO)
def warn(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.WARNING)
def error(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.ERROR)
def critical(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.CRITICAL)
|
"""
Very limited reimplementation of some of `twisted.logger.Logger`'s public
API so we can use older Twisted versions that don't have the new logging
features.
"""
try:
from twisted.logger import Logger
except ImportError:
import logging
from twisted.python import log
class Logger(object):
def info(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.INFO)
def warn(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.WARNING)
Add some more logging methods. (@JayH5)"""
Very limited reimplementation of some of `twisted.logger.Logger`'s public
API so we can use older Twisted versions that don't have the new logging
features.
"""
try:
from twisted.logger import Logger
except ImportError:
import logging
from twisted.python import log
class Logger(object):
def debug(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.DEBUG)
def info(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.INFO)
def warn(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.WARNING)
def error(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.ERROR)
def critical(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.CRITICAL)
|
<commit_before>"""
Very limited reimplementation of some of `twisted.logger.Logger`'s public
API so we can use older Twisted versions that don't have the new logging
features.
"""
try:
from twisted.logger import Logger
except ImportError:
import logging
from twisted.python import log
class Logger(object):
def info(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.INFO)
def warn(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.WARNING)
<commit_msg>Add some more logging methods. (@JayH5)<commit_after>"""
Very limited reimplementation of some of `twisted.logger.Logger`'s public
API so we can use older Twisted versions that don't have the new logging
features.
"""
try:
from twisted.logger import Logger
except ImportError:
import logging
from twisted.python import log
class Logger(object):
def debug(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.DEBUG)
def info(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.INFO)
def warn(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.WARNING)
def error(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.ERROR)
def critical(self, format, **kw):
log.msg(format.format(**kw), logLevel=logging.CRITICAL)
|
77beb7f5a1503481e28179f1ea84531e1ece99ed
|
test/test_urlification.py
|
test/test_urlification.py
|
from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
def test_urlification():
tiddler = Tiddler('blah')
tiddler.text = """
lorem ipsum http://example.org dolor sit amet
... http://www.example.com/foo/bar ...
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
for url in ["http://example.org", "http://www.example.com/foo/bar"]:
assert '<a href="%(url)s">%(url)s</a>' % { "url": url } in output
def test_precedence():
tiddler = Tiddler('cow')
tiddler.text = """
* [Pry](http://www.philaquilina.com/2012/05/17/tossing-out-irb-for-pry/)
* [Rails console](http://37signals.com/svn/posts/3176-three-quick-rails-console-tips)
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
assert "http://<a href" not in output
|
from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
def test_urlification():
tiddler = Tiddler('blah')
tiddler.text = """
lorem ipsum http://example.org dolor sit amet
... http://www.example.com/foo/bar ...
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
for url in ["http://example.org", "http://www.example.com/foo/bar"]:
assert '<a href="%(url)s">%(url)s</a>' % { "url": url } in output
def test_precedence():
tiddler = Tiddler('cow')
tiddler.text = """
* [Pry](http://www.philaquilina.com/2012/05/17/tossing-out-irb-for-pry/)
* [Rails console](http://37signals.com/svn/posts/3176-three-quick-rails-console-tips)
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
assert "http://<a href" not in output
|
Make work with modern markdown2
|
Make work with modern markdown2
Within a pre block, links don't link, which is good.
|
Python
|
bsd-2-clause
|
tiddlyweb/tiddlywebplugins.markdown
|
from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
def test_urlification():
tiddler = Tiddler('blah')
tiddler.text = """
lorem ipsum http://example.org dolor sit amet
... http://www.example.com/foo/bar ...
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
for url in ["http://example.org", "http://www.example.com/foo/bar"]:
assert '<a href="%(url)s">%(url)s</a>' % { "url": url } in output
def test_precedence():
tiddler = Tiddler('cow')
tiddler.text = """
* [Pry](http://www.philaquilina.com/2012/05/17/tossing-out-irb-for-pry/)
* [Rails console](http://37signals.com/svn/posts/3176-three-quick-rails-console-tips)
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
assert "http://<a href" not in output
Make work with modern markdown2
Within a pre block, links don't link, which is good.
|
from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
def test_urlification():
tiddler = Tiddler('blah')
tiddler.text = """
lorem ipsum http://example.org dolor sit amet
... http://www.example.com/foo/bar ...
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
for url in ["http://example.org", "http://www.example.com/foo/bar"]:
assert '<a href="%(url)s">%(url)s</a>' % { "url": url } in output
def test_precedence():
tiddler = Tiddler('cow')
tiddler.text = """
* [Pry](http://www.philaquilina.com/2012/05/17/tossing-out-irb-for-pry/)
* [Rails console](http://37signals.com/svn/posts/3176-three-quick-rails-console-tips)
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
assert "http://<a href" not in output
|
<commit_before>from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
def test_urlification():
tiddler = Tiddler('blah')
tiddler.text = """
lorem ipsum http://example.org dolor sit amet
... http://www.example.com/foo/bar ...
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
for url in ["http://example.org", "http://www.example.com/foo/bar"]:
assert '<a href="%(url)s">%(url)s</a>' % { "url": url } in output
def test_precedence():
tiddler = Tiddler('cow')
tiddler.text = """
* [Pry](http://www.philaquilina.com/2012/05/17/tossing-out-irb-for-pry/)
* [Rails console](http://37signals.com/svn/posts/3176-three-quick-rails-console-tips)
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
assert "http://<a href" not in output
<commit_msg>Make work with modern markdown2
Within a pre block, links don't link, which is good.<commit_after>
|
from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
def test_urlification():
tiddler = Tiddler('blah')
tiddler.text = """
lorem ipsum http://example.org dolor sit amet
... http://www.example.com/foo/bar ...
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
for url in ["http://example.org", "http://www.example.com/foo/bar"]:
assert '<a href="%(url)s">%(url)s</a>' % { "url": url } in output
def test_precedence():
tiddler = Tiddler('cow')
tiddler.text = """
* [Pry](http://www.philaquilina.com/2012/05/17/tossing-out-irb-for-pry/)
* [Rails console](http://37signals.com/svn/posts/3176-three-quick-rails-console-tips)
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
assert "http://<a href" not in output
|
from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
def test_urlification():
tiddler = Tiddler('blah')
tiddler.text = """
lorem ipsum http://example.org dolor sit amet
... http://www.example.com/foo/bar ...
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
for url in ["http://example.org", "http://www.example.com/foo/bar"]:
assert '<a href="%(url)s">%(url)s</a>' % { "url": url } in output
def test_precedence():
tiddler = Tiddler('cow')
tiddler.text = """
* [Pry](http://www.philaquilina.com/2012/05/17/tossing-out-irb-for-pry/)
* [Rails console](http://37signals.com/svn/posts/3176-three-quick-rails-console-tips)
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
assert "http://<a href" not in output
Make work with modern markdown2
Within a pre block, links don't link, which is good.from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
def test_urlification():
tiddler = Tiddler('blah')
tiddler.text = """
lorem ipsum http://example.org dolor sit amet
... http://www.example.com/foo/bar ...
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
for url in ["http://example.org", "http://www.example.com/foo/bar"]:
assert '<a href="%(url)s">%(url)s</a>' % { "url": url } in output
def test_precedence():
tiddler = Tiddler('cow')
tiddler.text = """
* [Pry](http://www.philaquilina.com/2012/05/17/tossing-out-irb-for-pry/)
* [Rails console](http://37signals.com/svn/posts/3176-three-quick-rails-console-tips)
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
assert "http://<a href" not in output
|
<commit_before>from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
def test_urlification():
tiddler = Tiddler('blah')
tiddler.text = """
lorem ipsum http://example.org dolor sit amet
... http://www.example.com/foo/bar ...
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
for url in ["http://example.org", "http://www.example.com/foo/bar"]:
assert '<a href="%(url)s">%(url)s</a>' % { "url": url } in output
def test_precedence():
tiddler = Tiddler('cow')
tiddler.text = """
* [Pry](http://www.philaquilina.com/2012/05/17/tossing-out-irb-for-pry/)
* [Rails console](http://37signals.com/svn/posts/3176-three-quick-rails-console-tips)
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
assert "http://<a href" not in output
<commit_msg>Make work with modern markdown2
Within a pre block, links don't link, which is good.<commit_after>from tiddlywebplugins.markdown import render
from tiddlyweb.model.tiddler import Tiddler
def test_urlification():
tiddler = Tiddler('blah')
tiddler.text = """
lorem ipsum http://example.org dolor sit amet
... http://www.example.com/foo/bar ...
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
for url in ["http://example.org", "http://www.example.com/foo/bar"]:
assert '<a href="%(url)s">%(url)s</a>' % { "url": url } in output
def test_precedence():
tiddler = Tiddler('cow')
tiddler.text = """
* [Pry](http://www.philaquilina.com/2012/05/17/tossing-out-irb-for-pry/)
* [Rails console](http://37signals.com/svn/posts/3176-three-quick-rails-console-tips)
"""
environ = {'tiddlyweb.config': {'markdown.wiki_link_base': ''}}
output = render(tiddler, environ)
assert "http://<a href" not in output
|
2a6f0f7fbb655c568a42493e1181aeef9fa1ead1
|
test_setup.py
|
test_setup.py
|
"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'backlog'))
for directory in sys.path
)
|
"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'backlog'))
for directory in os.environ['PATH'].split(':')
)
|
Use $PATH instead of sys.path
|
Use $PATH instead of sys.path
|
Python
|
lgpl-2.1
|
dmtucker/backlog
|
"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'backlog'))
for directory in sys.path
)
Use $PATH instead of sys.path
|
"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'backlog'))
for directory in os.environ['PATH'].split(':')
)
|
<commit_before>"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'backlog'))
for directory in sys.path
)
<commit_msg>Use $PATH instead of sys.path<commit_after>
|
"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'backlog'))
for directory in os.environ['PATH'].split(':')
)
|
"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'backlog'))
for directory in sys.path
)
Use $PATH instead of sys.path"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'backlog'))
for directory in os.environ['PATH'].split(':')
)
|
<commit_before>"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'backlog'))
for directory in sys.path
)
<commit_msg>Use $PATH instead of sys.path<commit_after>"""Test setup.py."""
import os
import subprocess
import sys
def test_setup():
"""Run setup.py check."""
command = [sys.executable, 'setup.py', 'check', '--metadata', '--strict']
assert subprocess.run(command).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert any(
os.path.isfile(os.path.join(directory, 'backlog'))
for directory in os.environ['PATH'].split(':')
)
|
a4507b7dcd5d2dfc1e56497040cfca6607b6de71
|
edpwd/random_string.py
|
edpwd/random_string.py
|
# -*- coding: utf-8
from random import choice
import string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join([choice(chars) for i in range(length)])
|
# -*- coding: utf-8
import random, string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join(random.sample(chars, length))
|
Use random.sample() rather than reinventing it.
|
Use random.sample() rather than reinventing it.
|
Python
|
bsd-2-clause
|
tampakrap/edpwd
|
# -*- coding: utf-8
from random import choice
import string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join([choice(chars) for i in range(length)])
Use random.sample() rather than reinventing it.
|
# -*- coding: utf-8
import random, string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join(random.sample(chars, length))
|
<commit_before># -*- coding: utf-8
from random import choice
import string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join([choice(chars) for i in range(length)])
<commit_msg>Use random.sample() rather than reinventing it.<commit_after>
|
# -*- coding: utf-8
import random, string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join(random.sample(chars, length))
|
# -*- coding: utf-8
from random import choice
import string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join([choice(chars) for i in range(length)])
Use random.sample() rather than reinventing it.# -*- coding: utf-8
import random, string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join(random.sample(chars, length))
|
<commit_before># -*- coding: utf-8
from random import choice
import string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join([choice(chars) for i in range(length)])
<commit_msg>Use random.sample() rather than reinventing it.<commit_after># -*- coding: utf-8
import random, string
def random_string(length,
letters=True,
digits=True,
punctuation=False,
whitespace=False):
""" Returns a random string """
chars = ''
if letters:
chars += string.ascii_letters
if digits:
chars += string.digits
if punctuation:
chars += string.punctuation
if whitespace:
chars += string.whitespace
return ''.join(random.sample(chars, length))
|
166a78061059ad57189365d1cf56c81b513b7d9e
|
tests/test_ultrametric.py
|
tests/test_ultrametric.py
|
from viridis import tree
from six.moves import range
import pytest
@pytest.fixture
def base_tree():
t = tree.Ultrametric(list(range(6)))
t.merge(0, 1, 0.1) # 6
t.merge(6, 2, 0.2) # 7
t.merge(3, 4, 0.3) # 8
t.merge(8, 5, 0.4) # 9
t.merge(7, 9, 0.5) # 10
return t
def test_split(base_tree):
t = base_tree
t.split(0, 2)
assert t.node[10]['num_leaves'] == 3
t.split(0, 4) # nothing to do
assert tree.num_leaves(t, 10) == 3
def test_children(base_tree):
t = base_tree
assert t.children(6) == [0, 1]
def test_leaves(base_tree):
t = base_tree
assert set(t.leaves(10)) == set(range(6))
assert set(t.leaves(6)) == set([0, 1])
assert set(t.leaves(9)) == set(range(3, 6))
|
from viridis import tree
from six.moves import range
import pytest
@pytest.fixture
def base_tree():
t = tree.Ultrametric(list(range(6)))
t.merge(0, 1, 0.1) # 6
t.merge(6, 2, 0.2) # 7
t.merge(3, 4, 0.3) # 8
t.merge(8, 5, 0.4) # 9
t.merge(7, 9, 0.5) # 10
return t
def test_split(base_tree):
t = base_tree
t.split(0, 2)
assert t.node[10]['num_leaves'] == 3
t.split(0, 4) # nothing to do
assert tree.num_leaves(t, 10) == 3
def test_children(base_tree):
t = base_tree
assert t.children(6) == [0, 1]
def test_leaves(base_tree):
t = base_tree
assert set(t.leaves(10)) == set(range(6))
assert set(t.leaves(6)) == set([0, 1])
assert set(t.leaves(9)) == set(range(3, 6))
def test_highest(base_tree):
t = base_tree
for i in range(t.number_of_nodes()):
assert t.highest_ancestor(i) == 10
t.remove_node(10)
t.remove_node(9)
assert t.highest_ancestor(4) == 8
|
Add test for highest_ancestor function
|
Add test for highest_ancestor function
|
Python
|
mit
|
jni/viridis
|
from viridis import tree
from six.moves import range
import pytest
@pytest.fixture
def base_tree():
t = tree.Ultrametric(list(range(6)))
t.merge(0, 1, 0.1) # 6
t.merge(6, 2, 0.2) # 7
t.merge(3, 4, 0.3) # 8
t.merge(8, 5, 0.4) # 9
t.merge(7, 9, 0.5) # 10
return t
def test_split(base_tree):
t = base_tree
t.split(0, 2)
assert t.node[10]['num_leaves'] == 3
t.split(0, 4) # nothing to do
assert tree.num_leaves(t, 10) == 3
def test_children(base_tree):
t = base_tree
assert t.children(6) == [0, 1]
def test_leaves(base_tree):
t = base_tree
assert set(t.leaves(10)) == set(range(6))
assert set(t.leaves(6)) == set([0, 1])
assert set(t.leaves(9)) == set(range(3, 6))
Add test for highest_ancestor function
|
from viridis import tree
from six.moves import range
import pytest
@pytest.fixture
def base_tree():
t = tree.Ultrametric(list(range(6)))
t.merge(0, 1, 0.1) # 6
t.merge(6, 2, 0.2) # 7
t.merge(3, 4, 0.3) # 8
t.merge(8, 5, 0.4) # 9
t.merge(7, 9, 0.5) # 10
return t
def test_split(base_tree):
t = base_tree
t.split(0, 2)
assert t.node[10]['num_leaves'] == 3
t.split(0, 4) # nothing to do
assert tree.num_leaves(t, 10) == 3
def test_children(base_tree):
t = base_tree
assert t.children(6) == [0, 1]
def test_leaves(base_tree):
t = base_tree
assert set(t.leaves(10)) == set(range(6))
assert set(t.leaves(6)) == set([0, 1])
assert set(t.leaves(9)) == set(range(3, 6))
def test_highest(base_tree):
t = base_tree
for i in range(t.number_of_nodes()):
assert t.highest_ancestor(i) == 10
t.remove_node(10)
t.remove_node(9)
assert t.highest_ancestor(4) == 8
|
<commit_before>from viridis import tree
from six.moves import range
import pytest
@pytest.fixture
def base_tree():
t = tree.Ultrametric(list(range(6)))
t.merge(0, 1, 0.1) # 6
t.merge(6, 2, 0.2) # 7
t.merge(3, 4, 0.3) # 8
t.merge(8, 5, 0.4) # 9
t.merge(7, 9, 0.5) # 10
return t
def test_split(base_tree):
t = base_tree
t.split(0, 2)
assert t.node[10]['num_leaves'] == 3
t.split(0, 4) # nothing to do
assert tree.num_leaves(t, 10) == 3
def test_children(base_tree):
t = base_tree
assert t.children(6) == [0, 1]
def test_leaves(base_tree):
t = base_tree
assert set(t.leaves(10)) == set(range(6))
assert set(t.leaves(6)) == set([0, 1])
assert set(t.leaves(9)) == set(range(3, 6))
<commit_msg>Add test for highest_ancestor function<commit_after>
|
from viridis import tree
from six.moves import range
import pytest
@pytest.fixture
def base_tree():
t = tree.Ultrametric(list(range(6)))
t.merge(0, 1, 0.1) # 6
t.merge(6, 2, 0.2) # 7
t.merge(3, 4, 0.3) # 8
t.merge(8, 5, 0.4) # 9
t.merge(7, 9, 0.5) # 10
return t
def test_split(base_tree):
t = base_tree
t.split(0, 2)
assert t.node[10]['num_leaves'] == 3
t.split(0, 4) # nothing to do
assert tree.num_leaves(t, 10) == 3
def test_children(base_tree):
t = base_tree
assert t.children(6) == [0, 1]
def test_leaves(base_tree):
t = base_tree
assert set(t.leaves(10)) == set(range(6))
assert set(t.leaves(6)) == set([0, 1])
assert set(t.leaves(9)) == set(range(3, 6))
def test_highest(base_tree):
t = base_tree
for i in range(t.number_of_nodes()):
assert t.highest_ancestor(i) == 10
t.remove_node(10)
t.remove_node(9)
assert t.highest_ancestor(4) == 8
|
from viridis import tree
from six.moves import range
import pytest
@pytest.fixture
def base_tree():
t = tree.Ultrametric(list(range(6)))
t.merge(0, 1, 0.1) # 6
t.merge(6, 2, 0.2) # 7
t.merge(3, 4, 0.3) # 8
t.merge(8, 5, 0.4) # 9
t.merge(7, 9, 0.5) # 10
return t
def test_split(base_tree):
t = base_tree
t.split(0, 2)
assert t.node[10]['num_leaves'] == 3
t.split(0, 4) # nothing to do
assert tree.num_leaves(t, 10) == 3
def test_children(base_tree):
t = base_tree
assert t.children(6) == [0, 1]
def test_leaves(base_tree):
t = base_tree
assert set(t.leaves(10)) == set(range(6))
assert set(t.leaves(6)) == set([0, 1])
assert set(t.leaves(9)) == set(range(3, 6))
Add test for highest_ancestor functionfrom viridis import tree
from six.moves import range
import pytest
@pytest.fixture
def base_tree():
t = tree.Ultrametric(list(range(6)))
t.merge(0, 1, 0.1) # 6
t.merge(6, 2, 0.2) # 7
t.merge(3, 4, 0.3) # 8
t.merge(8, 5, 0.4) # 9
t.merge(7, 9, 0.5) # 10
return t
def test_split(base_tree):
t = base_tree
t.split(0, 2)
assert t.node[10]['num_leaves'] == 3
t.split(0, 4) # nothing to do
assert tree.num_leaves(t, 10) == 3
def test_children(base_tree):
t = base_tree
assert t.children(6) == [0, 1]
def test_leaves(base_tree):
t = base_tree
assert set(t.leaves(10)) == set(range(6))
assert set(t.leaves(6)) == set([0, 1])
assert set(t.leaves(9)) == set(range(3, 6))
def test_highest(base_tree):
t = base_tree
for i in range(t.number_of_nodes()):
assert t.highest_ancestor(i) == 10
t.remove_node(10)
t.remove_node(9)
assert t.highest_ancestor(4) == 8
|
<commit_before>from viridis import tree
from six.moves import range
import pytest
@pytest.fixture
def base_tree():
t = tree.Ultrametric(list(range(6)))
t.merge(0, 1, 0.1) # 6
t.merge(6, 2, 0.2) # 7
t.merge(3, 4, 0.3) # 8
t.merge(8, 5, 0.4) # 9
t.merge(7, 9, 0.5) # 10
return t
def test_split(base_tree):
t = base_tree
t.split(0, 2)
assert t.node[10]['num_leaves'] == 3
t.split(0, 4) # nothing to do
assert tree.num_leaves(t, 10) == 3
def test_children(base_tree):
t = base_tree
assert t.children(6) == [0, 1]
def test_leaves(base_tree):
t = base_tree
assert set(t.leaves(10)) == set(range(6))
assert set(t.leaves(6)) == set([0, 1])
assert set(t.leaves(9)) == set(range(3, 6))
<commit_msg>Add test for highest_ancestor function<commit_after>from viridis import tree
from six.moves import range
import pytest
@pytest.fixture
def base_tree():
t = tree.Ultrametric(list(range(6)))
t.merge(0, 1, 0.1) # 6
t.merge(6, 2, 0.2) # 7
t.merge(3, 4, 0.3) # 8
t.merge(8, 5, 0.4) # 9
t.merge(7, 9, 0.5) # 10
return t
def test_split(base_tree):
t = base_tree
t.split(0, 2)
assert t.node[10]['num_leaves'] == 3
t.split(0, 4) # nothing to do
assert tree.num_leaves(t, 10) == 3
def test_children(base_tree):
t = base_tree
assert t.children(6) == [0, 1]
def test_leaves(base_tree):
t = base_tree
assert set(t.leaves(10)) == set(range(6))
assert set(t.leaves(6)) == set([0, 1])
assert set(t.leaves(9)) == set(range(3, 6))
def test_highest(base_tree):
t = base_tree
for i in range(t.number_of_nodes()):
assert t.highest_ancestor(i) == 10
t.remove_node(10)
t.remove_node(9)
assert t.highest_ancestor(4) == 8
|
3c95ba7e4eda0762d735503b718119e361eb7295
|
tests/basics/try-finally-return.py
|
tests/basics/try-finally-return.py
|
def func1():
try:
return "it worked"
finally:
print("finally 1")
print(func1())
|
def func1():
try:
return "it worked"
finally:
print("finally 1")
print(func1())
def func2():
try:
return "it worked"
finally:
print("finally 2")
def func3():
try:
s = func2()
return s + ", did this work?"
finally:
print("finally 3")
print(func3())
|
Add additional testcase for finally/return.
|
Add additional testcase for finally/return.
|
Python
|
mit
|
heisewangluo/micropython,noahchense/micropython,henriknelson/micropython,alex-robbins/micropython,aethaniel/micropython,pramasoul/micropython,jlillest/micropython,noahwilliamsson/micropython,henriknelson/micropython,warner83/micropython,ruffy91/micropython,adafruit/circuitpython,mianos/micropython,dhylands/micropython,tralamazza/micropython,swegener/micropython,tralamazza/micropython,oopy/micropython,toolmacher/micropython,paul-xxx/micropython,orionrobots/micropython,oopy/micropython,ceramos/micropython,matthewelse/micropython,hosaka/micropython,EcmaXp/micropython,alex-robbins/micropython,xyb/micropython,galenhz/micropython,neilh10/micropython,drrk/micropython,cloudformdesign/micropython,Peetz0r/micropython-esp32,Timmenem/micropython,jimkmc/micropython,swegener/micropython,skybird6672/micropython,pfalcon/micropython,warner83/micropython,drrk/micropython,danicampora/micropython,lowRISC/micropython,ernesto-g/micropython,kostyll/micropython,HenrikSolver/micropython,cwyark/micropython,methoxid/micropystat,vriera/micropython,Timmenem/micropython,vitiral/micropython,supergis/micropython,MrSurly/micropython-esp32,stonegithubs/micropython,omtinez/micropython,martinribelotta/micropython,firstval/micropython,heisewangluo/micropython,supergis/micropython,emfcamp/micropython,utopiaprince/micropython,noahwilliamsson/micropython,KISSMonX/micropython,rubencabrera/micropython,hiway/micropython,hiway/micropython,chrisdearman/micropython,torwag/micropython,methoxid/micropystat,SungEun-Steve-Kim/test-mp,TDAbboud/micropython,mgyenik/micropython,neilh10/micropython,infinnovation/micropython,praemdonck/micropython,supergis/micropython,adamkh/micropython,mpalomer/micropython,PappaPeppar/micropython,dhylands/micropython,TDAbboud/micropython,turbinenreiter/micropython,rubencabrera/micropython,dinau/micropython,Vogtinator/micropython,EcmaXp/micropython,bvernoux/micropython,ChuckM/micropython,PappaPeppar/micropython,redbear/micropython,turbinenreiter/micropython,torwag/micropython,infinnovation/micropython,PappaPeppar/micropython,mianos/micropython,SHA2017-badge/micropython-esp32,AriZuu/micropython,hosaka/micropython,supergis/micropython,turbinenreiter/micropython,torwag/micropython,ahotam/micropython,drrk/micropython,deshipu/micropython,methoxid/micropystat,alex-march/micropython,hosaka/micropython,praemdonck/micropython,stonegithubs/micropython,xuxiaoxin/micropython,slzatz/micropython,selste/micropython,swegener/micropython,adafruit/micropython,jimkmc/micropython,Vogtinator/micropython,toolmacher/micropython,stonegithubs/micropython,toolmacher/micropython,galenhz/micropython,adafruit/micropython,mianos/micropython,alex-robbins/micropython,matthewelse/micropython,HenrikSolver/micropython,tuc-osg/micropython,praemdonck/micropython,firstval/micropython,blazewicz/micropython,feilongfl/micropython,pramasoul/micropython,dxxb/micropython,cwyark/micropython,martinribelotta/micropython,adafruit/circuitpython,TDAbboud/micropython,blazewicz/micropython,ceramos/micropython,adamkh/micropython,utopiaprince/micropython,tdautc19841202/micropython,dinau/micropython,lbattraw/micropython,xhat/micropython,MrSurly/micropython-esp32,ahotam/micropython,galenhz/micropython,dinau/micropython,jmarcelino/pycom-micropython,ryannathans/micropython,SungEun-Steve-Kim/test-mp,ChuckM/micropython,mhoffma/micropython,ruffy91/micropython,micropython/micropython-esp32,dxxb/micropython,martinribelotta/micropython,rubencabrera/micropython,ericsnowcurrently/micropython,vriera/micropython,danicampora/micropython,xuxiaoxin/micropython,vriera/micropython,kostyll/micropython,firstval/micropython,dhylands/micropython,pfalcon/micropython,supergis/micropython,redbear/micropython,pfalcon/micropython,MrSurly/micropython,alex-robbins/micropython,jlillest/micropython,pramasoul/micropython,cnoviello/micropython,pozetroninc/micropython,ryannathans/micropython,adafruit/circuitpython,galenhz/micropython,bvernoux/micropython,Timmenem/micropython,tdautc19841202/micropython,danicampora/micropython,noahchense/micropython,suda/micropython,cnoviello/micropython,hosaka/micropython,torwag/micropython,SHA2017-badge/micropython-esp32,tobbad/micropython,blmorris/micropython,xyb/micropython,noahchense/micropython,kerneltask/micropython,ruffy91/micropython,deshipu/micropython,cloudformdesign/micropython,firstval/micropython,SungEun-Steve-Kim/test-mp,orionrobots/micropython,dinau/micropython,warner83/micropython,aethaniel/micropython,jlillest/micropython,blazewicz/micropython,jimkmc/micropython,neilh10/micropython,stonegithubs/micropython,KISSMonX/micropython,pfalcon/micropython,ceramos/micropython,TDAbboud/micropython,deshipu/micropython,feilongfl/micropython,SHA2017-badge/micropython-esp32,cnoviello/micropython,mianos/micropython,trezor/micropython,swegener/micropython,blazewicz/micropython,alex-robbins/micropython,skybird6672/micropython,blmorris/micropython,Peetz0r/micropython-esp32,KISSMonX/micropython,aitjcize/micropython,dinau/micropython,mianos/micropython,chrisdearman/micropython,warner83/micropython,micropython/micropython-esp32,ruffy91/micropython,kerneltask/micropython,orionrobots/micropython,jlillest/micropython,MrSurly/micropython,infinnovation/micropython,bvernoux/micropython,adafruit/micropython,danicampora/micropython,ganshun666/micropython,tobbad/micropython,trezor/micropython,omtinez/micropython,misterdanb/micropython,mhoffma/micropython,ChuckM/micropython,adafruit/circuitpython,tdautc19841202/micropython,vriera/micropython,cnoviello/micropython,lbattraw/micropython,xhat/micropython,cwyark/micropython,tdautc19841202/micropython,mhoffma/micropython,pramasoul/micropython,mhoffma/micropython,ceramos/micropython,adafruit/micropython,misterdanb/micropython,infinnovation/micropython,vriera/micropython,PappaPeppar/micropython,ryannathans/micropython,swegener/micropython,kostyll/micropython,pfalcon/micropython,AriZuu/micropython,toolmacher/micropython,trezor/micropython,micropython/micropython-esp32,xyb/micropython,selste/micropython,rubencabrera/micropython,vitiral/micropython,toolmacher/micropython,blmorris/micropython,kostyll/micropython,martinribelotta/micropython,redbear/micropython,dmazzella/micropython,ruffy91/micropython,tdautc19841202/micropython,xuxiaoxin/micropython,praemdonck/micropython,tralamazza/micropython,emfcamp/micropython,drrk/micropython,vitiral/micropython,emfcamp/micropython,ernesto-g/micropython,emfcamp/micropython,misterdanb/micropython,skybird6672/micropython,noahchense/micropython,misterdanb/micropython,MrSurly/micropython-esp32,mpalomer/micropython,henriknelson/micropython,omtinez/micropython,pozetroninc/micropython,redbear/micropython,jimkmc/micropython,cloudformdesign/micropython,ganshun666/micropython,methoxid/micropystat,alex-march/micropython,micropython/micropython-esp32,xyb/micropython,noahwilliamsson/micropython,Timmenem/micropython,SHA2017-badge/micropython-esp32,feilongfl/micropython,selste/micropython,pozetroninc/micropython,xhat/micropython,lowRISC/micropython,TDAbboud/micropython,skybird6672/micropython,hiway/micropython,selste/micropython,slzatz/micropython,pramasoul/micropython,tuc-osg/micropython,mpalomer/micropython,ganshun666/micropython,paul-xxx/micropython,utopiaprince/micropython,matthewelse/micropython,hosaka/micropython,oopy/micropython,jimkmc/micropython,puuu/micropython,xyb/micropython,galenhz/micropython,adamkh/micropython,lbattraw/micropython,feilongfl/micropython,ahotam/micropython,mgyenik/micropython,matthewelse/micropython,paul-xxx/micropython,cnoviello/micropython,ChuckM/micropython,MrSurly/micropython,bvernoux/micropython,Vogtinator/micropython,trezor/micropython,Peetz0r/micropython-esp32,alex-march/micropython,utopiaprince/micropython,lowRISC/micropython,adamkh/micropython,suda/micropython,noahwilliamsson/micropython,chrisdearman/micropython,SHA2017-badge/micropython-esp32,slzatz/micropython,chrisdearman/micropython,Timmenem/micropython,vitiral/micropython,lbattraw/micropython,matthewelse/micropython,utopiaprince/micropython,jlillest/micropython,lowRISC/micropython,adamkh/micropython,mgyenik/micropython,ryannathans/micropython,KISSMonX/micropython,emfcamp/micropython,MrSurly/micropython,aethaniel/micropython,xhat/micropython,skybird6672/micropython,noahwilliamsson/micropython,EcmaXp/micropython,danicampora/micropython,redbear/micropython,Vogtinator/micropython,AriZuu/micropython,dhylands/micropython,tobbad/micropython,blmorris/micropython,dmazzella/micropython,SungEun-Steve-Kim/test-mp,ChuckM/micropython,drrk/micropython,HenrikSolver/micropython,trezor/micropython,jmarcelino/pycom-micropython,cwyark/micropython,oopy/micropython,EcmaXp/micropython,blazewicz/micropython,ericsnowcurrently/micropython,lowRISC/micropython,SungEun-Steve-Kim/test-mp,ahotam/micropython,orionrobots/micropython,adafruit/micropython,MrSurly/micropython-esp32,cloudformdesign/micropython,mhoffma/micropython,oopy/micropython,dmazzella/micropython,vitiral/micropython,tralamazza/micropython,Peetz0r/micropython-esp32,chrisdearman/micropython,slzatz/micropython,PappaPeppar/micropython,suda/micropython,tuc-osg/micropython,tobbad/micropython,aitjcize/micropython,mpalomer/micropython,pozetroninc/micropython,ahotam/micropython,micropython/micropython-esp32,misterdanb/micropython,AriZuu/micropython,ganshun666/micropython,kerneltask/micropython,deshipu/micropython,warner83/micropython,ceramos/micropython,dxxb/micropython,KISSMonX/micropython,ericsnowcurrently/micropython,jmarcelino/pycom-micropython,tobbad/micropython,omtinez/micropython,kerneltask/micropython,kerneltask/micropython,adafruit/circuitpython,HenrikSolver/micropython,blmorris/micropython,henriknelson/micropython,EcmaXp/micropython,xuxiaoxin/micropython,tuc-osg/micropython,bvernoux/micropython,turbinenreiter/micropython,pozetroninc/micropython,feilongfl/micropython,firstval/micropython,jmarcelino/pycom-micropython,martinribelotta/micropython,ernesto-g/micropython,jmarcelino/pycom-micropython,aitjcize/micropython,adafruit/circuitpython,suda/micropython,omtinez/micropython,tuc-osg/micropython,aitjcize/micropython,aethaniel/micropython,puuu/micropython,methoxid/micropystat,Vogtinator/micropython,paul-xxx/micropython,ericsnowcurrently/micropython,ernesto-g/micropython,paul-xxx/micropython,ericsnowcurrently/micropython,dxxb/micropython,ryannathans/micropython,AriZuu/micropython,HenrikSolver/micropython,praemdonck/micropython,suda/micropython,heisewangluo/micropython,slzatz/micropython,puuu/micropython,puuu/micropython,noahchense/micropython,cwyark/micropython,mgyenik/micropython,neilh10/micropython,torwag/micropython,rubencabrera/micropython,matthewelse/micropython,aethaniel/micropython,puuu/micropython,dhylands/micropython,dxxb/micropython,MrSurly/micropython-esp32,turbinenreiter/micropython,hiway/micropython,xhat/micropython,alex-march/micropython,cloudformdesign/micropython,deshipu/micropython,xuxiaoxin/micropython,stonegithubs/micropython,infinnovation/micropython,lbattraw/micropython,heisewangluo/micropython,mgyenik/micropython,alex-march/micropython,ernesto-g/micropython,henriknelson/micropython,orionrobots/micropython,Peetz0r/micropython-esp32,ganshun666/micropython,heisewangluo/micropython,selste/micropython,mpalomer/micropython,dmazzella/micropython,hiway/micropython,kostyll/micropython,neilh10/micropython,MrSurly/micropython
|
def func1():
try:
return "it worked"
finally:
print("finally 1")
print(func1())
Add additional testcase for finally/return.
|
def func1():
try:
return "it worked"
finally:
print("finally 1")
print(func1())
def func2():
try:
return "it worked"
finally:
print("finally 2")
def func3():
try:
s = func2()
return s + ", did this work?"
finally:
print("finally 3")
print(func3())
|
<commit_before>def func1():
try:
return "it worked"
finally:
print("finally 1")
print(func1())
<commit_msg>Add additional testcase for finally/return.<commit_after>
|
def func1():
try:
return "it worked"
finally:
print("finally 1")
print(func1())
def func2():
try:
return "it worked"
finally:
print("finally 2")
def func3():
try:
s = func2()
return s + ", did this work?"
finally:
print("finally 3")
print(func3())
|
def func1():
try:
return "it worked"
finally:
print("finally 1")
print(func1())
Add additional testcase for finally/return.def func1():
try:
return "it worked"
finally:
print("finally 1")
print(func1())
def func2():
try:
return "it worked"
finally:
print("finally 2")
def func3():
try:
s = func2()
return s + ", did this work?"
finally:
print("finally 3")
print(func3())
|
<commit_before>def func1():
try:
return "it worked"
finally:
print("finally 1")
print(func1())
<commit_msg>Add additional testcase for finally/return.<commit_after>def func1():
try:
return "it worked"
finally:
print("finally 1")
print(func1())
def func2():
try:
return "it worked"
finally:
print("finally 2")
def func3():
try:
s = func2()
return s + ", did this work?"
finally:
print("finally 3")
print(func3())
|
cf6034fc62cc97a5655b371fdef4a4728707fdea
|
changes/utils/locking.py
|
changes/utils/locking.py
|
from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}'.format(
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, timeout=1, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
|
from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, timeout=1, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
|
Use __module__ to make @lock unique
|
Use __module__ to make @lock unique
Summary: Fixes T49428.
Test Plan:
Hard to test on changes_dev because it can't run both handlers (no
place to send notifications to), but this seems simple enough...
Reviewers: armooo, kylec
Reviewed By: kylec
Subscribers: changesbot, mkedia, jukka, vishal
Maniphest Tasks: T49428
Differential Revision: https://tails.corp.dropbox.com/D122408
|
Python
|
apache-2.0
|
bowlofstew/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,bowlofstew/changes
|
from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}'.format(
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, timeout=1, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
Use __module__ to make @lock unique
Summary: Fixes T49428.
Test Plan:
Hard to test on changes_dev because it can't run both handlers (no
place to send notifications to), but this seems simple enough...
Reviewers: armooo, kylec
Reviewed By: kylec
Subscribers: changesbot, mkedia, jukka, vishal
Maniphest Tasks: T49428
Differential Revision: https://tails.corp.dropbox.com/D122408
|
from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, timeout=1, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
|
<commit_before>from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}'.format(
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, timeout=1, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
<commit_msg>Use __module__ to make @lock unique
Summary: Fixes T49428.
Test Plan:
Hard to test on changes_dev because it can't run both handlers (no
place to send notifications to), but this seems simple enough...
Reviewers: armooo, kylec
Reviewed By: kylec
Subscribers: changesbot, mkedia, jukka, vishal
Maniphest Tasks: T49428
Differential Revision: https://tails.corp.dropbox.com/D122408<commit_after>
|
from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, timeout=1, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
|
from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}'.format(
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, timeout=1, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
Use __module__ to make @lock unique
Summary: Fixes T49428.
Test Plan:
Hard to test on changes_dev because it can't run both handlers (no
place to send notifications to), but this seems simple enough...
Reviewers: armooo, kylec
Reviewed By: kylec
Subscribers: changesbot, mkedia, jukka, vishal
Maniphest Tasks: T49428
Differential Revision: https://tails.corp.dropbox.com/D122408from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, timeout=1, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
|
<commit_before>from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}'.format(
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, timeout=1, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
<commit_msg>Use __module__ to make @lock unique
Summary: Fixes T49428.
Test Plan:
Hard to test on changes_dev because it can't run both handlers (no
place to send notifications to), but this seems simple enough...
Reviewers: armooo, kylec
Reviewed By: kylec
Subscribers: changesbot, mkedia, jukka, vishal
Maniphest Tasks: T49428
Differential Revision: https://tails.corp.dropbox.com/D122408<commit_after>from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, timeout=1, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
|
f3c78eff85efda94915fd3c432d5c0485b5e302c
|
benchexec/tools/korn.py
|
benchexec/tools/korn.py
|
# This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.util as util
import benchexec.result as result
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for Korn, a software verifier based on Horn-clauses.
URL: https://github.com/gernst/korn
"""
REQUIRED_PATHS = [
"run",
"korn.jar",
"z3",
"eld",
"eld.jar",
]
def executable(self):
return util.find_executable("run")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "Korn"
def determine_result(self, returncode, returnsignal, output, isTimeout):
"""
This is literally the output from the underlying CHC solver
"""
for line in output:
line = line.strip()
if line == "unsat":
return result.RESULT_FALSE_PROP
elif line == "sat":
return result.RESULT_TRUE_PROP
elif "error" in line:
return "ERROR"
return result.RESULT_UNKNOWN
|
Add tool info for Korn
|
Add tool info for Korn
For more information, see https://github.com/gernst/korn
|
Python
|
apache-2.0
|
sosy-lab/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,dbeyer/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,sosy-lab/benchexec,dbeyer/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec
|
Add tool info for Korn
For more information, see https://github.com/gernst/korn
|
# This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.util as util
import benchexec.result as result
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for Korn, a software verifier based on Horn-clauses.
URL: https://github.com/gernst/korn
"""
REQUIRED_PATHS = [
"run",
"korn.jar",
"z3",
"eld",
"eld.jar",
]
def executable(self):
return util.find_executable("run")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "Korn"
def determine_result(self, returncode, returnsignal, output, isTimeout):
"""
This is literally the output from the underlying CHC solver
"""
for line in output:
line = line.strip()
if line == "unsat":
return result.RESULT_FALSE_PROP
elif line == "sat":
return result.RESULT_TRUE_PROP
elif "error" in line:
return "ERROR"
return result.RESULT_UNKNOWN
|
<commit_before><commit_msg>Add tool info for Korn
For more information, see https://github.com/gernst/korn<commit_after>
|
# This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.util as util
import benchexec.result as result
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for Korn, a software verifier based on Horn-clauses.
URL: https://github.com/gernst/korn
"""
REQUIRED_PATHS = [
"run",
"korn.jar",
"z3",
"eld",
"eld.jar",
]
def executable(self):
return util.find_executable("run")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "Korn"
def determine_result(self, returncode, returnsignal, output, isTimeout):
"""
This is literally the output from the underlying CHC solver
"""
for line in output:
line = line.strip()
if line == "unsat":
return result.RESULT_FALSE_PROP
elif line == "sat":
return result.RESULT_TRUE_PROP
elif "error" in line:
return "ERROR"
return result.RESULT_UNKNOWN
|
Add tool info for Korn
For more information, see https://github.com/gernst/korn# This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.util as util
import benchexec.result as result
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for Korn, a software verifier based on Horn-clauses.
URL: https://github.com/gernst/korn
"""
REQUIRED_PATHS = [
"run",
"korn.jar",
"z3",
"eld",
"eld.jar",
]
def executable(self):
return util.find_executable("run")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "Korn"
def determine_result(self, returncode, returnsignal, output, isTimeout):
"""
This is literally the output from the underlying CHC solver
"""
for line in output:
line = line.strip()
if line == "unsat":
return result.RESULT_FALSE_PROP
elif line == "sat":
return result.RESULT_TRUE_PROP
elif "error" in line:
return "ERROR"
return result.RESULT_UNKNOWN
|
<commit_before><commit_msg>Add tool info for Korn
For more information, see https://github.com/gernst/korn<commit_after># This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.util as util
import benchexec.result as result
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for Korn, a software verifier based on Horn-clauses.
URL: https://github.com/gernst/korn
"""
REQUIRED_PATHS = [
"run",
"korn.jar",
"z3",
"eld",
"eld.jar",
]
def executable(self):
return util.find_executable("run")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "Korn"
def determine_result(self, returncode, returnsignal, output, isTimeout):
"""
This is literally the output from the underlying CHC solver
"""
for line in output:
line = line.strip()
if line == "unsat":
return result.RESULT_FALSE_PROP
elif line == "sat":
return result.RESULT_TRUE_PROP
elif "error" in line:
return "ERROR"
return result.RESULT_UNKNOWN
|
|
06b547057e5822bfdff1272c1f8209f12c66bf2a
|
openedx/core/djangoapps/site_configuration/migrations/0005_populate_siteconfig_history_site_values.py
|
openedx/core/djangoapps/site_configuration/migrations/0005_populate_siteconfig_history_site_values.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
forward_sql = """
UPDATE
site_configuration_siteconfigurationhistory
SET
site_values = '{}';
"""
reverse_sql = """
UPDATE
site_configuration_siteconfigurationhistory
SET
site_values = '';
"""
class Migration(migrations.Migration):
dependencies = [
('site_configuration', '0004_add_site_values_field'),
]
operations = [
migrations.RunSQL(forward_sql, reverse_sql=reverse_sql),
]
|
Add migration to populate site_values in SiteConfigurationHistory
|
Add migration to populate site_values in SiteConfigurationHistory
Right now the ORM is very unhappy about the JSONField `site_values`
in SiteConfigurationHistory containing non-JSON (empty strings). We
cannot even write a data migration using the ORM to populate the field
because that causes a JSONDeserializationError. Therefore, we must
bypass the ORM and populate the values with raw SQL.
DENG-18
|
Python
|
agpl-3.0
|
angelapper/edx-platform,edx-solutions/edx-platform,appsembler/edx-platform,angelapper/edx-platform,msegado/edx-platform,eduNEXT/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,appsembler/edx-platform,cpennington/edx-platform,EDUlib/edx-platform,mitocw/edx-platform,stvstnfrd/edx-platform,eduNEXT/edunext-platform,stvstnfrd/edx-platform,edx-solutions/edx-platform,arbrandes/edx-platform,msegado/edx-platform,edx-solutions/edx-platform,eduNEXT/edunext-platform,stvstnfrd/edx-platform,appsembler/edx-platform,EDUlib/edx-platform,eduNEXT/edx-platform,mitocw/edx-platform,arbrandes/edx-platform,edx/edx-platform,edx/edx-platform,edx/edx-platform,angelapper/edx-platform,cpennington/edx-platform,msegado/edx-platform,arbrandes/edx-platform,edx/edx-platform,eduNEXT/edx-platform,msegado/edx-platform,cpennington/edx-platform,edx-solutions/edx-platform,eduNEXT/edunext-platform,appsembler/edx-platform,arbrandes/edx-platform,cpennington/edx-platform,EDUlib/edx-platform,mitocw/edx-platform,angelapper/edx-platform,mitocw/edx-platform,msegado/edx-platform,stvstnfrd/edx-platform,eduNEXT/edunext-platform
|
Add migration to populate site_values in SiteConfigurationHistory
Right now the ORM is very unhappy about the JSONField `site_values`
in SiteConfigurationHistory containing non-JSON (empty strings). We
cannot even write a data migration using the ORM to populate the field
because that causes a JSONDeserializationError. Therefore, we must
bypass the ORM and populate the values with raw SQL.
DENG-18
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
forward_sql = """
UPDATE
site_configuration_siteconfigurationhistory
SET
site_values = '{}';
"""
reverse_sql = """
UPDATE
site_configuration_siteconfigurationhistory
SET
site_values = '';
"""
class Migration(migrations.Migration):
dependencies = [
('site_configuration', '0004_add_site_values_field'),
]
operations = [
migrations.RunSQL(forward_sql, reverse_sql=reverse_sql),
]
|
<commit_before><commit_msg>Add migration to populate site_values in SiteConfigurationHistory
Right now the ORM is very unhappy about the JSONField `site_values`
in SiteConfigurationHistory containing non-JSON (empty strings). We
cannot even write a data migration using the ORM to populate the field
because that causes a JSONDeserializationError. Therefore, we must
bypass the ORM and populate the values with raw SQL.
DENG-18<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
forward_sql = """
UPDATE
site_configuration_siteconfigurationhistory
SET
site_values = '{}';
"""
reverse_sql = """
UPDATE
site_configuration_siteconfigurationhistory
SET
site_values = '';
"""
class Migration(migrations.Migration):
dependencies = [
('site_configuration', '0004_add_site_values_field'),
]
operations = [
migrations.RunSQL(forward_sql, reverse_sql=reverse_sql),
]
|
Add migration to populate site_values in SiteConfigurationHistory
Right now the ORM is very unhappy about the JSONField `site_values`
in SiteConfigurationHistory containing non-JSON (empty strings). We
cannot even write a data migration using the ORM to populate the field
because that causes a JSONDeserializationError. Therefore, we must
bypass the ORM and populate the values with raw SQL.
DENG-18# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
forward_sql = """
UPDATE
site_configuration_siteconfigurationhistory
SET
site_values = '{}';
"""
reverse_sql = """
UPDATE
site_configuration_siteconfigurationhistory
SET
site_values = '';
"""
class Migration(migrations.Migration):
dependencies = [
('site_configuration', '0004_add_site_values_field'),
]
operations = [
migrations.RunSQL(forward_sql, reverse_sql=reverse_sql),
]
|
<commit_before><commit_msg>Add migration to populate site_values in SiteConfigurationHistory
Right now the ORM is very unhappy about the JSONField `site_values`
in SiteConfigurationHistory containing non-JSON (empty strings). We
cannot even write a data migration using the ORM to populate the field
because that causes a JSONDeserializationError. Therefore, we must
bypass the ORM and populate the values with raw SQL.
DENG-18<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
forward_sql = """
UPDATE
site_configuration_siteconfigurationhistory
SET
site_values = '{}';
"""
reverse_sql = """
UPDATE
site_configuration_siteconfigurationhistory
SET
site_values = '';
"""
class Migration(migrations.Migration):
dependencies = [
('site_configuration', '0004_add_site_values_field'),
]
operations = [
migrations.RunSQL(forward_sql, reverse_sql=reverse_sql),
]
|
|
2963909063e434936ba095ba9532782e7e3fd518
|
tests/QtDeclarative/qdeclarativeview_test.py
|
tests/QtDeclarative/qdeclarativeview_test.py
|
'''Test cases for QDeclarativeView'''
import unittest
from PySide.QtCore import QUrl, QStringList, QVariant
from PySide.QtGui import QPushButton
from PySide.QtDeclarative import QDeclarativeView
from helper import adjust_filename, TimedQApplication
class TestQDeclarativeView(TimedQApplication):
def testQDeclarativeViewList(self):
view = QDeclarativeView()
dataList = QStringList(["Item 1", "Item 2", "Item 3", "Item 4"])
ctxt = view.rootContext()
ctxt.setContextProperty("myModel", dataList)
url = QUrl.fromLocalFile(adjust_filename('view.qml', __file__))
view.setSource(url)
view.show()
self.assertEqual(view.status(), QDeclarativeView.Ready)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
|
'''Test cases for QDeclarativeView'''
import unittest
from PySide.QtCore import QUrl
from PySide.QtDeclarative import QDeclarativeView
from helper import adjust_filename, TimedQApplication
class TestQDeclarativeView(TimedQApplication):
def testQDeclarativeViewList(self):
view = QDeclarativeView()
dataList = ["Item 1", "Item 2", "Item 3", "Item 4"]
ctxt = view.rootContext()
ctxt.setContextProperty("myModel", dataList)
url = QUrl.fromLocalFile(adjust_filename('view.qml', __file__))
view.setSource(url)
view.show()
self.assertEqual(view.status(), QDeclarativeView.Ready)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
|
Remove use of deprecated types.
|
Remove use of deprecated types.
Reviewer: Hugo Parente Lima <e250cbdf6b5a11059e9d944a6e5e9282be80d14c@openbossa.org>,
Luciano Wolf <luciano.wolf@openbossa.org>
|
Python
|
lgpl-2.1
|
RobinD42/pyside,pankajp/pyside,RobinD42/pyside,BadSingleton/pyside2,IronManMark20/pyside2,M4rtinK/pyside-android,enthought/pyside,PySide/PySide,PySide/PySide,PySide/PySide,pankajp/pyside,PySide/PySide,M4rtinK/pyside-bb10,M4rtinK/pyside-android,M4rtinK/pyside-bb10,M4rtinK/pyside-bb10,RobinD42/pyside,IronManMark20/pyside2,M4rtinK/pyside-bb10,gbaty/pyside2,M4rtinK/pyside-android,IronManMark20/pyside2,qtproject/pyside-pyside,enthought/pyside,gbaty/pyside2,pankajp/pyside,gbaty/pyside2,RobinD42/pyside,pankajp/pyside,enthought/pyside,M4rtinK/pyside-bb10,RobinD42/pyside,enthought/pyside,IronManMark20/pyside2,RobinD42/pyside,RobinD42/pyside,qtproject/pyside-pyside,qtproject/pyside-pyside,M4rtinK/pyside-android,BadSingleton/pyside2,enthought/pyside,BadSingleton/pyside2,M4rtinK/pyside-android,gbaty/pyside2,BadSingleton/pyside2,enthought/pyside,PySide/PySide,M4rtinK/pyside-android,qtproject/pyside-pyside,M4rtinK/pyside-bb10,qtproject/pyside-pyside,gbaty/pyside2,pankajp/pyside,IronManMark20/pyside2,BadSingleton/pyside2,enthought/pyside
|
'''Test cases for QDeclarativeView'''
import unittest
from PySide.QtCore import QUrl, QStringList, QVariant
from PySide.QtGui import QPushButton
from PySide.QtDeclarative import QDeclarativeView
from helper import adjust_filename, TimedQApplication
class TestQDeclarativeView(TimedQApplication):
def testQDeclarativeViewList(self):
view = QDeclarativeView()
dataList = QStringList(["Item 1", "Item 2", "Item 3", "Item 4"])
ctxt = view.rootContext()
ctxt.setContextProperty("myModel", dataList)
url = QUrl.fromLocalFile(adjust_filename('view.qml', __file__))
view.setSource(url)
view.show()
self.assertEqual(view.status(), QDeclarativeView.Ready)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
Remove use of deprecated types.
Reviewer: Hugo Parente Lima <e250cbdf6b5a11059e9d944a6e5e9282be80d14c@openbossa.org>,
Luciano Wolf <luciano.wolf@openbossa.org>
|
'''Test cases for QDeclarativeView'''
import unittest
from PySide.QtCore import QUrl
from PySide.QtDeclarative import QDeclarativeView
from helper import adjust_filename, TimedQApplication
class TestQDeclarativeView(TimedQApplication):
def testQDeclarativeViewList(self):
view = QDeclarativeView()
dataList = ["Item 1", "Item 2", "Item 3", "Item 4"]
ctxt = view.rootContext()
ctxt.setContextProperty("myModel", dataList)
url = QUrl.fromLocalFile(adjust_filename('view.qml', __file__))
view.setSource(url)
view.show()
self.assertEqual(view.status(), QDeclarativeView.Ready)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
|
<commit_before>'''Test cases for QDeclarativeView'''
import unittest
from PySide.QtCore import QUrl, QStringList, QVariant
from PySide.QtGui import QPushButton
from PySide.QtDeclarative import QDeclarativeView
from helper import adjust_filename, TimedQApplication
class TestQDeclarativeView(TimedQApplication):
def testQDeclarativeViewList(self):
view = QDeclarativeView()
dataList = QStringList(["Item 1", "Item 2", "Item 3", "Item 4"])
ctxt = view.rootContext()
ctxt.setContextProperty("myModel", dataList)
url = QUrl.fromLocalFile(adjust_filename('view.qml', __file__))
view.setSource(url)
view.show()
self.assertEqual(view.status(), QDeclarativeView.Ready)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
<commit_msg>Remove use of deprecated types.
Reviewer: Hugo Parente Lima <e250cbdf6b5a11059e9d944a6e5e9282be80d14c@openbossa.org>,
Luciano Wolf <luciano.wolf@openbossa.org><commit_after>
|
'''Test cases for QDeclarativeView'''
import unittest
from PySide.QtCore import QUrl
from PySide.QtDeclarative import QDeclarativeView
from helper import adjust_filename, TimedQApplication
class TestQDeclarativeView(TimedQApplication):
def testQDeclarativeViewList(self):
view = QDeclarativeView()
dataList = ["Item 1", "Item 2", "Item 3", "Item 4"]
ctxt = view.rootContext()
ctxt.setContextProperty("myModel", dataList)
url = QUrl.fromLocalFile(adjust_filename('view.qml', __file__))
view.setSource(url)
view.show()
self.assertEqual(view.status(), QDeclarativeView.Ready)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
|
'''Test cases for QDeclarativeView'''
import unittest
from PySide.QtCore import QUrl, QStringList, QVariant
from PySide.QtGui import QPushButton
from PySide.QtDeclarative import QDeclarativeView
from helper import adjust_filename, TimedQApplication
class TestQDeclarativeView(TimedQApplication):
def testQDeclarativeViewList(self):
view = QDeclarativeView()
dataList = QStringList(["Item 1", "Item 2", "Item 3", "Item 4"])
ctxt = view.rootContext()
ctxt.setContextProperty("myModel", dataList)
url = QUrl.fromLocalFile(adjust_filename('view.qml', __file__))
view.setSource(url)
view.show()
self.assertEqual(view.status(), QDeclarativeView.Ready)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
Remove use of deprecated types.
Reviewer: Hugo Parente Lima <e250cbdf6b5a11059e9d944a6e5e9282be80d14c@openbossa.org>,
Luciano Wolf <luciano.wolf@openbossa.org>'''Test cases for QDeclarativeView'''
import unittest
from PySide.QtCore import QUrl
from PySide.QtDeclarative import QDeclarativeView
from helper import adjust_filename, TimedQApplication
class TestQDeclarativeView(TimedQApplication):
def testQDeclarativeViewList(self):
view = QDeclarativeView()
dataList = ["Item 1", "Item 2", "Item 3", "Item 4"]
ctxt = view.rootContext()
ctxt.setContextProperty("myModel", dataList)
url = QUrl.fromLocalFile(adjust_filename('view.qml', __file__))
view.setSource(url)
view.show()
self.assertEqual(view.status(), QDeclarativeView.Ready)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
|
<commit_before>'''Test cases for QDeclarativeView'''
import unittest
from PySide.QtCore import QUrl, QStringList, QVariant
from PySide.QtGui import QPushButton
from PySide.QtDeclarative import QDeclarativeView
from helper import adjust_filename, TimedQApplication
class TestQDeclarativeView(TimedQApplication):
def testQDeclarativeViewList(self):
view = QDeclarativeView()
dataList = QStringList(["Item 1", "Item 2", "Item 3", "Item 4"])
ctxt = view.rootContext()
ctxt.setContextProperty("myModel", dataList)
url = QUrl.fromLocalFile(adjust_filename('view.qml', __file__))
view.setSource(url)
view.show()
self.assertEqual(view.status(), QDeclarativeView.Ready)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
<commit_msg>Remove use of deprecated types.
Reviewer: Hugo Parente Lima <e250cbdf6b5a11059e9d944a6e5e9282be80d14c@openbossa.org>,
Luciano Wolf <luciano.wolf@openbossa.org><commit_after>'''Test cases for QDeclarativeView'''
import unittest
from PySide.QtCore import QUrl
from PySide.QtDeclarative import QDeclarativeView
from helper import adjust_filename, TimedQApplication
class TestQDeclarativeView(TimedQApplication):
def testQDeclarativeViewList(self):
view = QDeclarativeView()
dataList = ["Item 1", "Item 2", "Item 3", "Item 4"]
ctxt = view.rootContext()
ctxt.setContextProperty("myModel", dataList)
url = QUrl.fromLocalFile(adjust_filename('view.qml', __file__))
view.setSource(url)
view.show()
self.assertEqual(view.status(), QDeclarativeView.Ready)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
|
de00ac22c8becefa8b2538416e4e5cc8b36ecc40
|
utils/exceptions.py
|
utils/exceptions.py
|
import json
from werkzeug.exceptions import HTTPException
from werkzeug.utils import escape
from utils.views import serialize_response, get_request_type
class BaseHttpException(HTTPException):
_template = {
'json': {
'description': lambda description: description,
'headers': ('Content-Type', 'application/json'),
'body': lambda code, name, description: json.dumps({'message': description, 'status': 'error'}),
},
'xml': {
'description': lambda description: description,
'headers': ('Content-Type', 'application/xml'),
'body': lambda code, name, description: description,
'': lambda code, name, description: description,
},
'html': {
'description': lambda description: u'<p>%s</p>' % escape(description),
'headers': ('Content-Type', 'application/json'),
'body': lambda code, name, description: (
u'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n'
u'<title>%(code)s %(name)s</title>\n'
u'<h1>%(name)s</h1>\n'
u'%(description)s\n'
) % {
'code': code,
'name': name,
'description': description
}
}
}
def __init__(self, description=None, response=None):
self.request_type = get_request_type()
description = serialize_response(self.request_type, description)
super(BaseHttpException, self).__init__(description, response)
def get_description(self, environ=None):
"""Get the description."""
return self._template[self.request_type]['description'](self.description)
def get_body(self, environ=None):
"""Get the body"""
return self._template[self.request_type]['body'](self.code, escape(self.name), self.get_description(environ))
def get_headers(self, environ=None):
"""Get a list of headers."""
return [self._template[self.request_type]['headers']]
class HttpBadRequest(BaseHttpException):
code = 400
class HttpUnauthorized(BaseHttpException):
code = 401
class HttpPaymentRequired(BaseHttpException):
code = 402
class HttpForbidden(BaseHttpException):
code = 403
class HttpNotFound(BaseHttpException):
code = 404
class HttpMethodNotAllowed(BaseHttpException):
code = 405
class HttpServiceUnavaible(BaseHttpException):
code = 503
|
Raise exception depending request type
|
Raise exception depending request type
|
Python
|
apache-2.0
|
vtemian/kruncher
|
Raise exception depending request type
|
import json
from werkzeug.exceptions import HTTPException
from werkzeug.utils import escape
from utils.views import serialize_response, get_request_type
class BaseHttpException(HTTPException):
_template = {
'json': {
'description': lambda description: description,
'headers': ('Content-Type', 'application/json'),
'body': lambda code, name, description: json.dumps({'message': description, 'status': 'error'}),
},
'xml': {
'description': lambda description: description,
'headers': ('Content-Type', 'application/xml'),
'body': lambda code, name, description: description,
'': lambda code, name, description: description,
},
'html': {
'description': lambda description: u'<p>%s</p>' % escape(description),
'headers': ('Content-Type', 'application/json'),
'body': lambda code, name, description: (
u'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n'
u'<title>%(code)s %(name)s</title>\n'
u'<h1>%(name)s</h1>\n'
u'%(description)s\n'
) % {
'code': code,
'name': name,
'description': description
}
}
}
def __init__(self, description=None, response=None):
self.request_type = get_request_type()
description = serialize_response(self.request_type, description)
super(BaseHttpException, self).__init__(description, response)
def get_description(self, environ=None):
"""Get the description."""
return self._template[self.request_type]['description'](self.description)
def get_body(self, environ=None):
"""Get the body"""
return self._template[self.request_type]['body'](self.code, escape(self.name), self.get_description(environ))
def get_headers(self, environ=None):
"""Get a list of headers."""
return [self._template[self.request_type]['headers']]
class HttpBadRequest(BaseHttpException):
code = 400
class HttpUnauthorized(BaseHttpException):
code = 401
class HttpPaymentRequired(BaseHttpException):
code = 402
class HttpForbidden(BaseHttpException):
code = 403
class HttpNotFound(BaseHttpException):
code = 404
class HttpMethodNotAllowed(BaseHttpException):
code = 405
class HttpServiceUnavaible(BaseHttpException):
code = 503
|
<commit_before><commit_msg>Raise exception depending request type<commit_after>
|
import json
from werkzeug.exceptions import HTTPException
from werkzeug.utils import escape
from utils.views import serialize_response, get_request_type
class BaseHttpException(HTTPException):
_template = {
'json': {
'description': lambda description: description,
'headers': ('Content-Type', 'application/json'),
'body': lambda code, name, description: json.dumps({'message': description, 'status': 'error'}),
},
'xml': {
'description': lambda description: description,
'headers': ('Content-Type', 'application/xml'),
'body': lambda code, name, description: description,
'': lambda code, name, description: description,
},
'html': {
'description': lambda description: u'<p>%s</p>' % escape(description),
'headers': ('Content-Type', 'application/json'),
'body': lambda code, name, description: (
u'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n'
u'<title>%(code)s %(name)s</title>\n'
u'<h1>%(name)s</h1>\n'
u'%(description)s\n'
) % {
'code': code,
'name': name,
'description': description
}
}
}
def __init__(self, description=None, response=None):
self.request_type = get_request_type()
description = serialize_response(self.request_type, description)
super(BaseHttpException, self).__init__(description, response)
def get_description(self, environ=None):
"""Get the description."""
return self._template[self.request_type]['description'](self.description)
def get_body(self, environ=None):
"""Get the body"""
return self._template[self.request_type]['body'](self.code, escape(self.name), self.get_description(environ))
def get_headers(self, environ=None):
"""Get a list of headers."""
return [self._template[self.request_type]['headers']]
class HttpBadRequest(BaseHttpException):
code = 400
class HttpUnauthorized(BaseHttpException):
code = 401
class HttpPaymentRequired(BaseHttpException):
code = 402
class HttpForbidden(BaseHttpException):
code = 403
class HttpNotFound(BaseHttpException):
code = 404
class HttpMethodNotAllowed(BaseHttpException):
code = 405
class HttpServiceUnavaible(BaseHttpException):
code = 503
|
Raise exception depending request typeimport json
from werkzeug.exceptions import HTTPException
from werkzeug.utils import escape
from utils.views import serialize_response, get_request_type
class BaseHttpException(HTTPException):
_template = {
'json': {
'description': lambda description: description,
'headers': ('Content-Type', 'application/json'),
'body': lambda code, name, description: json.dumps({'message': description, 'status': 'error'}),
},
'xml': {
'description': lambda description: description,
'headers': ('Content-Type', 'application/xml'),
'body': lambda code, name, description: description,
'': lambda code, name, description: description,
},
'html': {
'description': lambda description: u'<p>%s</p>' % escape(description),
'headers': ('Content-Type', 'application/json'),
'body': lambda code, name, description: (
u'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n'
u'<title>%(code)s %(name)s</title>\n'
u'<h1>%(name)s</h1>\n'
u'%(description)s\n'
) % {
'code': code,
'name': name,
'description': description
}
}
}
def __init__(self, description=None, response=None):
self.request_type = get_request_type()
description = serialize_response(self.request_type, description)
super(BaseHttpException, self).__init__(description, response)
def get_description(self, environ=None):
"""Get the description."""
return self._template[self.request_type]['description'](self.description)
def get_body(self, environ=None):
"""Get the body"""
return self._template[self.request_type]['body'](self.code, escape(self.name), self.get_description(environ))
def get_headers(self, environ=None):
"""Get a list of headers."""
return [self._template[self.request_type]['headers']]
class HttpBadRequest(BaseHttpException):
code = 400
class HttpUnauthorized(BaseHttpException):
code = 401
class HttpPaymentRequired(BaseHttpException):
code = 402
class HttpForbidden(BaseHttpException):
code = 403
class HttpNotFound(BaseHttpException):
code = 404
class HttpMethodNotAllowed(BaseHttpException):
code = 405
class HttpServiceUnavaible(BaseHttpException):
code = 503
|
<commit_before><commit_msg>Raise exception depending request type<commit_after>import json
from werkzeug.exceptions import HTTPException
from werkzeug.utils import escape
from utils.views import serialize_response, get_request_type
class BaseHttpException(HTTPException):
_template = {
'json': {
'description': lambda description: description,
'headers': ('Content-Type', 'application/json'),
'body': lambda code, name, description: json.dumps({'message': description, 'status': 'error'}),
},
'xml': {
'description': lambda description: description,
'headers': ('Content-Type', 'application/xml'),
'body': lambda code, name, description: description,
'': lambda code, name, description: description,
},
'html': {
'description': lambda description: u'<p>%s</p>' % escape(description),
'headers': ('Content-Type', 'application/json'),
'body': lambda code, name, description: (
u'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n'
u'<title>%(code)s %(name)s</title>\n'
u'<h1>%(name)s</h1>\n'
u'%(description)s\n'
) % {
'code': code,
'name': name,
'description': description
}
}
}
def __init__(self, description=None, response=None):
self.request_type = get_request_type()
description = serialize_response(self.request_type, description)
super(BaseHttpException, self).__init__(description, response)
def get_description(self, environ=None):
"""Get the description."""
return self._template[self.request_type]['description'](self.description)
def get_body(self, environ=None):
"""Get the body"""
return self._template[self.request_type]['body'](self.code, escape(self.name), self.get_description(environ))
def get_headers(self, environ=None):
"""Get a list of headers."""
return [self._template[self.request_type]['headers']]
class HttpBadRequest(BaseHttpException):
code = 400
class HttpUnauthorized(BaseHttpException):
code = 401
class HttpPaymentRequired(BaseHttpException):
code = 402
class HttpForbidden(BaseHttpException):
code = 403
class HttpNotFound(BaseHttpException):
code = 404
class HttpMethodNotAllowed(BaseHttpException):
code = 405
class HttpServiceUnavaible(BaseHttpException):
code = 503
|
|
ab5fd972b0fcd6d1e418ab00058b6fd31014d38f
|
migrations/versions/0256_set_postage_tmplt_hstr.py
|
migrations/versions/0256_set_postage_tmplt_hstr.py
|
"""
Revision ID: 0256_set_postage_tmplt_hstr
Revises: 0255_another_letter_org
Create Date: 2019-02-05 14:51:30.808067
"""
from alembic import op
import sqlalchemy as sa
revision = '0256_set_postage_tmplt_hstr'
down_revision = '0255_another_letter_org'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute(
"""UPDATE templates_history SET postage = services.postage
FROM services WHERE template_type = 'letter' AND service_id = services.id"""
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("UPDATE templates_history SET postage = null WHERE template_type = 'letter'")
# ### end Alembic commands ###
|
Migrate postage into templates_history table
|
Migrate postage into templates_history table
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Migrate postage into templates_history table
|
"""
Revision ID: 0256_set_postage_tmplt_hstr
Revises: 0255_another_letter_org
Create Date: 2019-02-05 14:51:30.808067
"""
from alembic import op
import sqlalchemy as sa
revision = '0256_set_postage_tmplt_hstr'
down_revision = '0255_another_letter_org'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute(
"""UPDATE templates_history SET postage = services.postage
FROM services WHERE template_type = 'letter' AND service_id = services.id"""
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("UPDATE templates_history SET postage = null WHERE template_type = 'letter'")
# ### end Alembic commands ###
|
<commit_before><commit_msg>Migrate postage into templates_history table<commit_after>
|
"""
Revision ID: 0256_set_postage_tmplt_hstr
Revises: 0255_another_letter_org
Create Date: 2019-02-05 14:51:30.808067
"""
from alembic import op
import sqlalchemy as sa
revision = '0256_set_postage_tmplt_hstr'
down_revision = '0255_another_letter_org'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute(
"""UPDATE templates_history SET postage = services.postage
FROM services WHERE template_type = 'letter' AND service_id = services.id"""
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("UPDATE templates_history SET postage = null WHERE template_type = 'letter'")
# ### end Alembic commands ###
|
Migrate postage into templates_history table"""
Revision ID: 0256_set_postage_tmplt_hstr
Revises: 0255_another_letter_org
Create Date: 2019-02-05 14:51:30.808067
"""
from alembic import op
import sqlalchemy as sa
revision = '0256_set_postage_tmplt_hstr'
down_revision = '0255_another_letter_org'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute(
"""UPDATE templates_history SET postage = services.postage
FROM services WHERE template_type = 'letter' AND service_id = services.id"""
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("UPDATE templates_history SET postage = null WHERE template_type = 'letter'")
# ### end Alembic commands ###
|
<commit_before><commit_msg>Migrate postage into templates_history table<commit_after>"""
Revision ID: 0256_set_postage_tmplt_hstr
Revises: 0255_another_letter_org
Create Date: 2019-02-05 14:51:30.808067
"""
from alembic import op
import sqlalchemy as sa
revision = '0256_set_postage_tmplt_hstr'
down_revision = '0255_another_letter_org'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute(
"""UPDATE templates_history SET postage = services.postage
FROM services WHERE template_type = 'letter' AND service_id = services.id"""
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("UPDATE templates_history SET postage = null WHERE template_type = 'letter'")
# ### end Alembic commands ###
|
|
8113eefe3e80e472ee706c17306bf692c40115df
|
tests/dftModel_test.py
|
tests/dftModel_test.py
|
import numpy as np
from scipy.signal import get_window
import smst.models.dftModel as DFT
def test_simple_sinusoid():
window_size = 1024
t = np.linspace(0, 1, window_size)
x = np.cos(4 * 2 * np.pi * t)
window = get_window('hamming', window_size)
mag_spectrum, phase_spectrum = DFT.dftAnal(x, window, window_size)
x_reconstructed = DFT.dftSynth(mag_spectrum, phase_spectrum, window_size) * sum(window)
assert mag_spectrum.argmax() == 4
assert round(mag_spectrum.max()) == -6
assert round(mag_spectrum.mean()) == -147
assert np.allclose(x_reconstructed, x * window)
|
Add a very simple test for dftModel analysis + synthesis.
|
Add a very simple test for dftModel analysis + synthesis.
|
Python
|
agpl-3.0
|
bzamecnik/sms-tools,bzamecnik/sms-tools,bzamecnik/sms-tools
|
Add a very simple test for dftModel analysis + synthesis.
|
import numpy as np
from scipy.signal import get_window
import smst.models.dftModel as DFT
def test_simple_sinusoid():
window_size = 1024
t = np.linspace(0, 1, window_size)
x = np.cos(4 * 2 * np.pi * t)
window = get_window('hamming', window_size)
mag_spectrum, phase_spectrum = DFT.dftAnal(x, window, window_size)
x_reconstructed = DFT.dftSynth(mag_spectrum, phase_spectrum, window_size) * sum(window)
assert mag_spectrum.argmax() == 4
assert round(mag_spectrum.max()) == -6
assert round(mag_spectrum.mean()) == -147
assert np.allclose(x_reconstructed, x * window)
|
<commit_before><commit_msg>Add a very simple test for dftModel analysis + synthesis.<commit_after>
|
import numpy as np
from scipy.signal import get_window
import smst.models.dftModel as DFT
def test_simple_sinusoid():
window_size = 1024
t = np.linspace(0, 1, window_size)
x = np.cos(4 * 2 * np.pi * t)
window = get_window('hamming', window_size)
mag_spectrum, phase_spectrum = DFT.dftAnal(x, window, window_size)
x_reconstructed = DFT.dftSynth(mag_spectrum, phase_spectrum, window_size) * sum(window)
assert mag_spectrum.argmax() == 4
assert round(mag_spectrum.max()) == -6
assert round(mag_spectrum.mean()) == -147
assert np.allclose(x_reconstructed, x * window)
|
Add a very simple test for dftModel analysis + synthesis.import numpy as np
from scipy.signal import get_window
import smst.models.dftModel as DFT
def test_simple_sinusoid():
window_size = 1024
t = np.linspace(0, 1, window_size)
x = np.cos(4 * 2 * np.pi * t)
window = get_window('hamming', window_size)
mag_spectrum, phase_spectrum = DFT.dftAnal(x, window, window_size)
x_reconstructed = DFT.dftSynth(mag_spectrum, phase_spectrum, window_size) * sum(window)
assert mag_spectrum.argmax() == 4
assert round(mag_spectrum.max()) == -6
assert round(mag_spectrum.mean()) == -147
assert np.allclose(x_reconstructed, x * window)
|
<commit_before><commit_msg>Add a very simple test for dftModel analysis + synthesis.<commit_after>import numpy as np
from scipy.signal import get_window
import smst.models.dftModel as DFT
def test_simple_sinusoid():
window_size = 1024
t = np.linspace(0, 1, window_size)
x = np.cos(4 * 2 * np.pi * t)
window = get_window('hamming', window_size)
mag_spectrum, phase_spectrum = DFT.dftAnal(x, window, window_size)
x_reconstructed = DFT.dftSynth(mag_spectrum, phase_spectrum, window_size) * sum(window)
assert mag_spectrum.argmax() == 4
assert round(mag_spectrum.max()) == -6
assert round(mag_spectrum.mean()) == -147
assert np.allclose(x_reconstructed, x * window)
|
|
295fd3cf2c8e7a37b300798ee96462ab9d3e7cd9
|
flstats/flstats_tests.py
|
flstats/flstats_tests.py
|
# -*- coding: utf-8 -*-
"""
flstats test script
~~~~~~~~~~~~~~~~~~~
This script is intended to test the flstats module.
"""
import json
import random
import unittest
from flstats import statistics, webstatistics
from flask import Flask
class FlstatsTestCase(unittest.TestCase):
def setUp(self):
"""Creates a Flask test app and registers two routes
together with the flstats blueprint.
"""
self.app = Flask(__name__)
self.app.register_blueprint(webstatistics)
self.client = self.app.test_client()
self.urls = ['http://localhost/url1', 'http://localhost/url2']
@self.app.route('/url1')
@statistics
def url1():
return random.choice(range(0, 1000))
@self.app.route('/url2')
@statistics
def url2():
return random.choice(range(0, 1000))
def test_url1(self):
"""Sends a unique request to one URL and tests the
returned statistics.
"""
self.client.get('/url1')
response = self.client.get('/flstats/')
data = json.loads(response.data)
stats = data['stats']
# /flstats access tests
self.assertEqual(response.status, '200 OK')
# Statistics tests
self.assertEqual(len(stats), 1)
stat = stats.pop()
self.assertEqual(stat['url'], 'http://localhost/url1')
self.assertEqual(stat['count'], 1)
self.assertTrue(stat['min'] == stat['avg'] == stat['max'])
def test_url2(self):
"""Sends requests to both URLs and tests the
returned statistics.
"""
for i in range(1, 10):
self.client.get('/url1')
for i in range(0, 10):
self.client.get('/url2')
response = self.client.get('/flstats/')
data = json.loads(response.data)
stats = data['stats']
# /flstats access tests
self.assertEqual(response.status, '200 OK')
# Statistics tests
self.assertEqual(len(stats), 2)
for stat in stats:
self.assertTrue(stat['url'] in self.urls)
self.assertEqual(stat['count'], 10)
self.assertTrue(stat['min'] <= stat['avg'] <= stat['max'])
if __name__ == '__main__':
unittest.main()
|
Add a test script for the flstats module
|
Add a test script for the flstats module
|
Python
|
bsd-3-clause
|
yannlambret/flstats
|
Add a test script for the flstats module
|
# -*- coding: utf-8 -*-
"""
flstats test script
~~~~~~~~~~~~~~~~~~~
This script is intended to test the flstats module.
"""
import json
import random
import unittest
from flstats import statistics, webstatistics
from flask import Flask
class FlstatsTestCase(unittest.TestCase):
def setUp(self):
"""Creates a Flask test app and registers two routes
together with the flstats blueprint.
"""
self.app = Flask(__name__)
self.app.register_blueprint(webstatistics)
self.client = self.app.test_client()
self.urls = ['http://localhost/url1', 'http://localhost/url2']
@self.app.route('/url1')
@statistics
def url1():
return random.choice(range(0, 1000))
@self.app.route('/url2')
@statistics
def url2():
return random.choice(range(0, 1000))
def test_url1(self):
"""Sends a unique request to one URL and tests the
returned statistics.
"""
self.client.get('/url1')
response = self.client.get('/flstats/')
data = json.loads(response.data)
stats = data['stats']
# /flstats access tests
self.assertEqual(response.status, '200 OK')
# Statistics tests
self.assertEqual(len(stats), 1)
stat = stats.pop()
self.assertEqual(stat['url'], 'http://localhost/url1')
self.assertEqual(stat['count'], 1)
self.assertTrue(stat['min'] == stat['avg'] == stat['max'])
def test_url2(self):
"""Sends requests to both URLs and tests the
returned statistics.
"""
for i in range(1, 10):
self.client.get('/url1')
for i in range(0, 10):
self.client.get('/url2')
response = self.client.get('/flstats/')
data = json.loads(response.data)
stats = data['stats']
# /flstats access tests
self.assertEqual(response.status, '200 OK')
# Statistics tests
self.assertEqual(len(stats), 2)
for stat in stats:
self.assertTrue(stat['url'] in self.urls)
self.assertEqual(stat['count'], 10)
self.assertTrue(stat['min'] <= stat['avg'] <= stat['max'])
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a test script for the flstats module<commit_after>
|
# -*- coding: utf-8 -*-
"""
flstats test script
~~~~~~~~~~~~~~~~~~~
This script is intended to test the flstats module.
"""
import json
import random
import unittest
from flstats import statistics, webstatistics
from flask import Flask
class FlstatsTestCase(unittest.TestCase):
def setUp(self):
"""Creates a Flask test app and registers two routes
together with the flstats blueprint.
"""
self.app = Flask(__name__)
self.app.register_blueprint(webstatistics)
self.client = self.app.test_client()
self.urls = ['http://localhost/url1', 'http://localhost/url2']
@self.app.route('/url1')
@statistics
def url1():
return random.choice(range(0, 1000))
@self.app.route('/url2')
@statistics
def url2():
return random.choice(range(0, 1000))
def test_url1(self):
"""Sends a unique request to one URL and tests the
returned statistics.
"""
self.client.get('/url1')
response = self.client.get('/flstats/')
data = json.loads(response.data)
stats = data['stats']
# /flstats access tests
self.assertEqual(response.status, '200 OK')
# Statistics tests
self.assertEqual(len(stats), 1)
stat = stats.pop()
self.assertEqual(stat['url'], 'http://localhost/url1')
self.assertEqual(stat['count'], 1)
self.assertTrue(stat['min'] == stat['avg'] == stat['max'])
def test_url2(self):
"""Sends requests to both URLs and tests the
returned statistics.
"""
for i in range(1, 10):
self.client.get('/url1')
for i in range(0, 10):
self.client.get('/url2')
response = self.client.get('/flstats/')
data = json.loads(response.data)
stats = data['stats']
# /flstats access tests
self.assertEqual(response.status, '200 OK')
# Statistics tests
self.assertEqual(len(stats), 2)
for stat in stats:
self.assertTrue(stat['url'] in self.urls)
self.assertEqual(stat['count'], 10)
self.assertTrue(stat['min'] <= stat['avg'] <= stat['max'])
if __name__ == '__main__':
unittest.main()
|
Add a test script for the flstats module# -*- coding: utf-8 -*-
"""
flstats test script
~~~~~~~~~~~~~~~~~~~
This script is intended to test the flstats module.
"""
import json
import random
import unittest
from flstats import statistics, webstatistics
from flask import Flask
class FlstatsTestCase(unittest.TestCase):
def setUp(self):
"""Creates a Flask test app and registers two routes
together with the flstats blueprint.
"""
self.app = Flask(__name__)
self.app.register_blueprint(webstatistics)
self.client = self.app.test_client()
self.urls = ['http://localhost/url1', 'http://localhost/url2']
@self.app.route('/url1')
@statistics
def url1():
return random.choice(range(0, 1000))
@self.app.route('/url2')
@statistics
def url2():
return random.choice(range(0, 1000))
def test_url1(self):
"""Sends a unique request to one URL and tests the
returned statistics.
"""
self.client.get('/url1')
response = self.client.get('/flstats/')
data = json.loads(response.data)
stats = data['stats']
# /flstats access tests
self.assertEqual(response.status, '200 OK')
# Statistics tests
self.assertEqual(len(stats), 1)
stat = stats.pop()
self.assertEqual(stat['url'], 'http://localhost/url1')
self.assertEqual(stat['count'], 1)
self.assertTrue(stat['min'] == stat['avg'] == stat['max'])
def test_url2(self):
"""Sends requests to both URLs and tests the
returned statistics.
"""
for i in range(1, 10):
self.client.get('/url1')
for i in range(0, 10):
self.client.get('/url2')
response = self.client.get('/flstats/')
data = json.loads(response.data)
stats = data['stats']
# /flstats access tests
self.assertEqual(response.status, '200 OK')
# Statistics tests
self.assertEqual(len(stats), 2)
for stat in stats:
self.assertTrue(stat['url'] in self.urls)
self.assertEqual(stat['count'], 10)
self.assertTrue(stat['min'] <= stat['avg'] <= stat['max'])
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a test script for the flstats module<commit_after># -*- coding: utf-8 -*-
"""
flstats test script
~~~~~~~~~~~~~~~~~~~
This script is intended to test the flstats module.
"""
import json
import random
import unittest
from flstats import statistics, webstatistics
from flask import Flask
class FlstatsTestCase(unittest.TestCase):
def setUp(self):
"""Creates a Flask test app and registers two routes
together with the flstats blueprint.
"""
self.app = Flask(__name__)
self.app.register_blueprint(webstatistics)
self.client = self.app.test_client()
self.urls = ['http://localhost/url1', 'http://localhost/url2']
@self.app.route('/url1')
@statistics
def url1():
return random.choice(range(0, 1000))
@self.app.route('/url2')
@statistics
def url2():
return random.choice(range(0, 1000))
def test_url1(self):
"""Sends a unique request to one URL and tests the
returned statistics.
"""
self.client.get('/url1')
response = self.client.get('/flstats/')
data = json.loads(response.data)
stats = data['stats']
# /flstats access tests
self.assertEqual(response.status, '200 OK')
# Statistics tests
self.assertEqual(len(stats), 1)
stat = stats.pop()
self.assertEqual(stat['url'], 'http://localhost/url1')
self.assertEqual(stat['count'], 1)
self.assertTrue(stat['min'] == stat['avg'] == stat['max'])
def test_url2(self):
"""Sends requests to both URLs and tests the
returned statistics.
"""
for i in range(1, 10):
self.client.get('/url1')
for i in range(0, 10):
self.client.get('/url2')
response = self.client.get('/flstats/')
data = json.loads(response.data)
stats = data['stats']
# /flstats access tests
self.assertEqual(response.status, '200 OK')
# Statistics tests
self.assertEqual(len(stats), 2)
for stat in stats:
self.assertTrue(stat['url'] in self.urls)
self.assertEqual(stat['count'], 10)
self.assertTrue(stat['min'] <= stat['avg'] <= stat['max'])
if __name__ == '__main__':
unittest.main()
|
|
1655a878393dcd1424927f5c3b27e5963769956e
|
motion_tracker/data_setup/move_bad_images.py
|
motion_tracker/data_setup/move_bad_images.py
|
import cv2
import numpy as np
import os
import shutil
import sys
from os.path import join
from tqdm import tqdm
if __name__ == "__main__":
source_dir = sys.argv[1]
dest_dir = sys.argv[2]
os.makedirs(dest_dir, exist_ok=True)
fnames = os.listdir(source_dir)
for fname in tqdm(fnames):
img_path = join(source_dir, fname)
try:
img = cv2.imread(img_path)
assert isinstance(img, np.ndarray)
except:
dest_path = join(dest_dir, fname)
shutil.move(img_path, dest_path)
n_files_in_dest = len(os.listdir(dest_dir))
print('Files in destination: ', n_files_in_dest)
|
Move bad images to new dir.
|
Move bad images to new dir.
|
Python
|
mit
|
dansbecker/motion-tracking
|
Move bad images to new dir.
|
import cv2
import numpy as np
import os
import shutil
import sys
from os.path import join
from tqdm import tqdm
if __name__ == "__main__":
source_dir = sys.argv[1]
dest_dir = sys.argv[2]
os.makedirs(dest_dir, exist_ok=True)
fnames = os.listdir(source_dir)
for fname in tqdm(fnames):
img_path = join(source_dir, fname)
try:
img = cv2.imread(img_path)
assert isinstance(img, np.ndarray)
except:
dest_path = join(dest_dir, fname)
shutil.move(img_path, dest_path)
n_files_in_dest = len(os.listdir(dest_dir))
print('Files in destination: ', n_files_in_dest)
|
<commit_before><commit_msg>Move bad images to new dir.<commit_after>
|
import cv2
import numpy as np
import os
import shutil
import sys
from os.path import join
from tqdm import tqdm
if __name__ == "__main__":
source_dir = sys.argv[1]
dest_dir = sys.argv[2]
os.makedirs(dest_dir, exist_ok=True)
fnames = os.listdir(source_dir)
for fname in tqdm(fnames):
img_path = join(source_dir, fname)
try:
img = cv2.imread(img_path)
assert isinstance(img, np.ndarray)
except:
dest_path = join(dest_dir, fname)
shutil.move(img_path, dest_path)
n_files_in_dest = len(os.listdir(dest_dir))
print('Files in destination: ', n_files_in_dest)
|
Move bad images to new dir.import cv2
import numpy as np
import os
import shutil
import sys
from os.path import join
from tqdm import tqdm
if __name__ == "__main__":
source_dir = sys.argv[1]
dest_dir = sys.argv[2]
os.makedirs(dest_dir, exist_ok=True)
fnames = os.listdir(source_dir)
for fname in tqdm(fnames):
img_path = join(source_dir, fname)
try:
img = cv2.imread(img_path)
assert isinstance(img, np.ndarray)
except:
dest_path = join(dest_dir, fname)
shutil.move(img_path, dest_path)
n_files_in_dest = len(os.listdir(dest_dir))
print('Files in destination: ', n_files_in_dest)
|
<commit_before><commit_msg>Move bad images to new dir.<commit_after>import cv2
import numpy as np
import os
import shutil
import sys
from os.path import join
from tqdm import tqdm
if __name__ == "__main__":
source_dir = sys.argv[1]
dest_dir = sys.argv[2]
os.makedirs(dest_dir, exist_ok=True)
fnames = os.listdir(source_dir)
for fname in tqdm(fnames):
img_path = join(source_dir, fname)
try:
img = cv2.imread(img_path)
assert isinstance(img, np.ndarray)
except:
dest_path = join(dest_dir, fname)
shutil.move(img_path, dest_path)
n_files_in_dest = len(os.listdir(dest_dir))
print('Files in destination: ', n_files_in_dest)
|
|
42ea5a982216285d9844c1405a37eb8c14b18c12
|
mrequests/examples/parse_response_headers.py
|
mrequests/examples/parse_response_headers.py
|
import mrequests
class MyResponse(mrequests.Response):
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
self.headers = {}
def add_header(self, data):
# let base class handle headers, which influence response parsing
self._parse_header(data)
name, value = data.decode('utf-8').rstrip('\r\n').split(':', 1)
self.headers[name.lower()] = value.strip()
def request(*args, **kw):
kw.setdefault('response_class', MyResponse)
return mrequests.request(*args, **kw)
host = "http://httpbin.org/"
#host = "http://localhost/"
url = host + "get"
r = request("GET", url)
if r.status_code == 200:
print("Response headers:")
print("=================\n")
for name, value in r.headers.items():
print("{}: {}".format(name, value))
print()
print("Response body:")
print("==============\n")
print(r.json())
else:
print("Request failed. Status: {}".format(resp.status_code))
|
Add example for custom response header parsing
|
Add example for custom response header parsing
Signed-off-by: Christopher Arndt <711c73f64afdce07b7e38039a96d2224209e9a6c@chrisarndt.de>
|
Python
|
mit
|
SpotlightKid/micropython-stm-lib
|
Add example for custom response header parsing
Signed-off-by: Christopher Arndt <711c73f64afdce07b7e38039a96d2224209e9a6c@chrisarndt.de>
|
import mrequests
class MyResponse(mrequests.Response):
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
self.headers = {}
def add_header(self, data):
# let base class handle headers, which influence response parsing
self._parse_header(data)
name, value = data.decode('utf-8').rstrip('\r\n').split(':', 1)
self.headers[name.lower()] = value.strip()
def request(*args, **kw):
kw.setdefault('response_class', MyResponse)
return mrequests.request(*args, **kw)
host = "http://httpbin.org/"
#host = "http://localhost/"
url = host + "get"
r = request("GET", url)
if r.status_code == 200:
print("Response headers:")
print("=================\n")
for name, value in r.headers.items():
print("{}: {}".format(name, value))
print()
print("Response body:")
print("==============\n")
print(r.json())
else:
print("Request failed. Status: {}".format(resp.status_code))
|
<commit_before><commit_msg>Add example for custom response header parsing
Signed-off-by: Christopher Arndt <711c73f64afdce07b7e38039a96d2224209e9a6c@chrisarndt.de><commit_after>
|
import mrequests
class MyResponse(mrequests.Response):
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
self.headers = {}
def add_header(self, data):
# let base class handle headers, which influence response parsing
self._parse_header(data)
name, value = data.decode('utf-8').rstrip('\r\n').split(':', 1)
self.headers[name.lower()] = value.strip()
def request(*args, **kw):
kw.setdefault('response_class', MyResponse)
return mrequests.request(*args, **kw)
host = "http://httpbin.org/"
#host = "http://localhost/"
url = host + "get"
r = request("GET", url)
if r.status_code == 200:
print("Response headers:")
print("=================\n")
for name, value in r.headers.items():
print("{}: {}".format(name, value))
print()
print("Response body:")
print("==============\n")
print(r.json())
else:
print("Request failed. Status: {}".format(resp.status_code))
|
Add example for custom response header parsing
Signed-off-by: Christopher Arndt <711c73f64afdce07b7e38039a96d2224209e9a6c@chrisarndt.de>import mrequests
class MyResponse(mrequests.Response):
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
self.headers = {}
def add_header(self, data):
# let base class handle headers, which influence response parsing
self._parse_header(data)
name, value = data.decode('utf-8').rstrip('\r\n').split(':', 1)
self.headers[name.lower()] = value.strip()
def request(*args, **kw):
kw.setdefault('response_class', MyResponse)
return mrequests.request(*args, **kw)
host = "http://httpbin.org/"
#host = "http://localhost/"
url = host + "get"
r = request("GET", url)
if r.status_code == 200:
print("Response headers:")
print("=================\n")
for name, value in r.headers.items():
print("{}: {}".format(name, value))
print()
print("Response body:")
print("==============\n")
print(r.json())
else:
print("Request failed. Status: {}".format(resp.status_code))
|
<commit_before><commit_msg>Add example for custom response header parsing
Signed-off-by: Christopher Arndt <711c73f64afdce07b7e38039a96d2224209e9a6c@chrisarndt.de><commit_after>import mrequests
class MyResponse(mrequests.Response):
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
self.headers = {}
def add_header(self, data):
# let base class handle headers, which influence response parsing
self._parse_header(data)
name, value = data.decode('utf-8').rstrip('\r\n').split(':', 1)
self.headers[name.lower()] = value.strip()
def request(*args, **kw):
kw.setdefault('response_class', MyResponse)
return mrequests.request(*args, **kw)
host = "http://httpbin.org/"
#host = "http://localhost/"
url = host + "get"
r = request("GET", url)
if r.status_code == 200:
print("Response headers:")
print("=================\n")
for name, value in r.headers.items():
print("{}: {}".format(name, value))
print()
print("Response body:")
print("==============\n")
print(r.json())
else:
print("Request failed. Status: {}".format(resp.status_code))
|
|
5913a413cf5f39001a82389337c4c9b8bea2c2b7
|
scripts/append_classified_to_lower_ranks.py
|
scripts/append_classified_to_lower_ranks.py
|
#!/usr/bin/env python
import pandas as pd, sys
def append_classified(df):
## Append classified name to each Unclassified rank in turn
p_unc = df[df.phylum=="Unclassified"]
df.loc[p_unc.index,"phylum"] += "."+df.loc[p_unc.index,"superkingdom"]
c_unc = df[df["class"]=="Unclassified"]
df.loc[c_unc.index,"class"] += "."+df.loc[c_unc.index,"phylum"]
o_unc = df[df["order"]=="Unclassified"]
df.loc[o_unc.index,"order"] += "."+df.loc[o_unc.index,"class"]
f_unc = df[df["family"]=="Unclassified"]
df.loc[f_unc.index,"family"] += "."+df.loc[f_unc.index,"order"]
g_unc = df[df["genus"]=="Unclassified"]
df.loc[g_unc.index,"genus"] += "."+df.loc[g_unc.index,"family"]
s_unc = df[df["species"]=="Unclassified"]
df.loc[s_unc.index,"species"] += "."+df.loc[s_unc.index,"genus"]
## Search and replace multiple "Unclassified."
df.replace(to_replace="Unclassified."*6,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*5,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*4,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*3,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*2,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified.Unclassified",value="Unclassified", inplace=True, regex=True)
return df
def main():
infile = sys.argv[1]
df = pd.read_csv(infile, header=0, sep=",", index_col=0)
df.fillna("Unclassified",inplace=True)
df = append_classified(df)
df.to_csv(sys.stdout, sep="\t")
if __name__ == '__main__':
main()
|
Append classified tax name to lower ranking unclassified names
|
Append classified tax name to lower ranking unclassified names
|
Python
|
mit
|
EnvGen/toolbox,EnvGen/toolbox
|
Append classified tax name to lower ranking unclassified names
|
#!/usr/bin/env python
import pandas as pd, sys
def append_classified(df):
## Append classified name to each Unclassified rank in turn
p_unc = df[df.phylum=="Unclassified"]
df.loc[p_unc.index,"phylum"] += "."+df.loc[p_unc.index,"superkingdom"]
c_unc = df[df["class"]=="Unclassified"]
df.loc[c_unc.index,"class"] += "."+df.loc[c_unc.index,"phylum"]
o_unc = df[df["order"]=="Unclassified"]
df.loc[o_unc.index,"order"] += "."+df.loc[o_unc.index,"class"]
f_unc = df[df["family"]=="Unclassified"]
df.loc[f_unc.index,"family"] += "."+df.loc[f_unc.index,"order"]
g_unc = df[df["genus"]=="Unclassified"]
df.loc[g_unc.index,"genus"] += "."+df.loc[g_unc.index,"family"]
s_unc = df[df["species"]=="Unclassified"]
df.loc[s_unc.index,"species"] += "."+df.loc[s_unc.index,"genus"]
## Search and replace multiple "Unclassified."
df.replace(to_replace="Unclassified."*6,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*5,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*4,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*3,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*2,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified.Unclassified",value="Unclassified", inplace=True, regex=True)
return df
def main():
infile = sys.argv[1]
df = pd.read_csv(infile, header=0, sep=",", index_col=0)
df.fillna("Unclassified",inplace=True)
df = append_classified(df)
df.to_csv(sys.stdout, sep="\t")
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Append classified tax name to lower ranking unclassified names<commit_after>
|
#!/usr/bin/env python
import pandas as pd, sys
def append_classified(df):
## Append classified name to each Unclassified rank in turn
p_unc = df[df.phylum=="Unclassified"]
df.loc[p_unc.index,"phylum"] += "."+df.loc[p_unc.index,"superkingdom"]
c_unc = df[df["class"]=="Unclassified"]
df.loc[c_unc.index,"class"] += "."+df.loc[c_unc.index,"phylum"]
o_unc = df[df["order"]=="Unclassified"]
df.loc[o_unc.index,"order"] += "."+df.loc[o_unc.index,"class"]
f_unc = df[df["family"]=="Unclassified"]
df.loc[f_unc.index,"family"] += "."+df.loc[f_unc.index,"order"]
g_unc = df[df["genus"]=="Unclassified"]
df.loc[g_unc.index,"genus"] += "."+df.loc[g_unc.index,"family"]
s_unc = df[df["species"]=="Unclassified"]
df.loc[s_unc.index,"species"] += "."+df.loc[s_unc.index,"genus"]
## Search and replace multiple "Unclassified."
df.replace(to_replace="Unclassified."*6,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*5,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*4,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*3,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*2,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified.Unclassified",value="Unclassified", inplace=True, regex=True)
return df
def main():
infile = sys.argv[1]
df = pd.read_csv(infile, header=0, sep=",", index_col=0)
df.fillna("Unclassified",inplace=True)
df = append_classified(df)
df.to_csv(sys.stdout, sep="\t")
if __name__ == '__main__':
main()
|
Append classified tax name to lower ranking unclassified names#!/usr/bin/env python
import pandas as pd, sys
def append_classified(df):
## Append classified name to each Unclassified rank in turn
p_unc = df[df.phylum=="Unclassified"]
df.loc[p_unc.index,"phylum"] += "."+df.loc[p_unc.index,"superkingdom"]
c_unc = df[df["class"]=="Unclassified"]
df.loc[c_unc.index,"class"] += "."+df.loc[c_unc.index,"phylum"]
o_unc = df[df["order"]=="Unclassified"]
df.loc[o_unc.index,"order"] += "."+df.loc[o_unc.index,"class"]
f_unc = df[df["family"]=="Unclassified"]
df.loc[f_unc.index,"family"] += "."+df.loc[f_unc.index,"order"]
g_unc = df[df["genus"]=="Unclassified"]
df.loc[g_unc.index,"genus"] += "."+df.loc[g_unc.index,"family"]
s_unc = df[df["species"]=="Unclassified"]
df.loc[s_unc.index,"species"] += "."+df.loc[s_unc.index,"genus"]
## Search and replace multiple "Unclassified."
df.replace(to_replace="Unclassified."*6,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*5,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*4,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*3,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*2,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified.Unclassified",value="Unclassified", inplace=True, regex=True)
return df
def main():
infile = sys.argv[1]
df = pd.read_csv(infile, header=0, sep=",", index_col=0)
df.fillna("Unclassified",inplace=True)
df = append_classified(df)
df.to_csv(sys.stdout, sep="\t")
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Append classified tax name to lower ranking unclassified names<commit_after>#!/usr/bin/env python
import pandas as pd, sys
def append_classified(df):
## Append classified name to each Unclassified rank in turn
p_unc = df[df.phylum=="Unclassified"]
df.loc[p_unc.index,"phylum"] += "."+df.loc[p_unc.index,"superkingdom"]
c_unc = df[df["class"]=="Unclassified"]
df.loc[c_unc.index,"class"] += "."+df.loc[c_unc.index,"phylum"]
o_unc = df[df["order"]=="Unclassified"]
df.loc[o_unc.index,"order"] += "."+df.loc[o_unc.index,"class"]
f_unc = df[df["family"]=="Unclassified"]
df.loc[f_unc.index,"family"] += "."+df.loc[f_unc.index,"order"]
g_unc = df[df["genus"]=="Unclassified"]
df.loc[g_unc.index,"genus"] += "."+df.loc[g_unc.index,"family"]
s_unc = df[df["species"]=="Unclassified"]
df.loc[s_unc.index,"species"] += "."+df.loc[s_unc.index,"genus"]
## Search and replace multiple "Unclassified."
df.replace(to_replace="Unclassified."*6,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*5,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*4,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*3,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified."*2,value="Unclassified.", inplace=True, regex=True)
df.replace(to_replace="Unclassified.Unclassified",value="Unclassified", inplace=True, regex=True)
return df
def main():
infile = sys.argv[1]
df = pd.read_csv(infile, header=0, sep=",", index_col=0)
df.fillna("Unclassified",inplace=True)
df = append_classified(df)
df.to_csv(sys.stdout, sep="\t")
if __name__ == '__main__':
main()
|
|
3295b30ba3e243801a520adff332663dbe490cf9
|
tools/mini_spectrum.py
|
tools/mini_spectrum.py
|
# -*- encoding: utf-8 -*-
# JN 2016-02-16
"""
Plot a spectrum from the first 1000 records of data
"""
import sys
import scipy.signal as sig
import matplotlib.pyplot as mpl
from combinato import NcsFile, DefaultFilter
def plot_spectrum(fname):
fid = NcsFile(fname)
rawdata = fid.read(0, 1000)
data = rawdata * (1e6 * fid.header['ADBitVolts'])
fs = 1/fid.timestep
my_filter = DefaultFilter(fid.timestep)
filt_data = my_filter.filter_extract(data)
[f, p] = sig.welch(data, fs, nperseg=32768)
[f_filt, p_filt] = sig.welch(filt_data, fs, nperseg=32768)
fig = mpl.figure()
plot = fig.add_subplot(1, 1, 1)
plot.plot(f, p, label='Unfiltered')
plot.plot(f_filt, p_filt, label='Filtered')
plot.set_yscale('log')
plot.legend()
plot.set_ylabel(r'$\mu\mathrm{V}^2/\mathrm{Hz}$')
plot.set_xlabel(r'$\mathrm{Hz}$')
def main():
plot_spectrum(sys.argv[1])
mpl.show()
if __name__ == '__main__':
main()
|
Add small plot of power spectral density
|
Add small plot of power spectral density
|
Python
|
mit
|
jniediek/combinato
|
Add small plot of power spectral density
|
# -*- encoding: utf-8 -*-
# JN 2016-02-16
"""
Plot a spectrum from the first 1000 records of data
"""
import sys
import scipy.signal as sig
import matplotlib.pyplot as mpl
from combinato import NcsFile, DefaultFilter
def plot_spectrum(fname):
fid = NcsFile(fname)
rawdata = fid.read(0, 1000)
data = rawdata * (1e6 * fid.header['ADBitVolts'])
fs = 1/fid.timestep
my_filter = DefaultFilter(fid.timestep)
filt_data = my_filter.filter_extract(data)
[f, p] = sig.welch(data, fs, nperseg=32768)
[f_filt, p_filt] = sig.welch(filt_data, fs, nperseg=32768)
fig = mpl.figure()
plot = fig.add_subplot(1, 1, 1)
plot.plot(f, p, label='Unfiltered')
plot.plot(f_filt, p_filt, label='Filtered')
plot.set_yscale('log')
plot.legend()
plot.set_ylabel(r'$\mu\mathrm{V}^2/\mathrm{Hz}$')
plot.set_xlabel(r'$\mathrm{Hz}$')
def main():
plot_spectrum(sys.argv[1])
mpl.show()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add small plot of power spectral density<commit_after>
|
# -*- encoding: utf-8 -*-
# JN 2016-02-16
"""
Plot a spectrum from the first 1000 records of data
"""
import sys
import scipy.signal as sig
import matplotlib.pyplot as mpl
from combinato import NcsFile, DefaultFilter
def plot_spectrum(fname):
fid = NcsFile(fname)
rawdata = fid.read(0, 1000)
data = rawdata * (1e6 * fid.header['ADBitVolts'])
fs = 1/fid.timestep
my_filter = DefaultFilter(fid.timestep)
filt_data = my_filter.filter_extract(data)
[f, p] = sig.welch(data, fs, nperseg=32768)
[f_filt, p_filt] = sig.welch(filt_data, fs, nperseg=32768)
fig = mpl.figure()
plot = fig.add_subplot(1, 1, 1)
plot.plot(f, p, label='Unfiltered')
plot.plot(f_filt, p_filt, label='Filtered')
plot.set_yscale('log')
plot.legend()
plot.set_ylabel(r'$\mu\mathrm{V}^2/\mathrm{Hz}$')
plot.set_xlabel(r'$\mathrm{Hz}$')
def main():
plot_spectrum(sys.argv[1])
mpl.show()
if __name__ == '__main__':
main()
|
Add small plot of power spectral density# -*- encoding: utf-8 -*-
# JN 2016-02-16
"""
Plot a spectrum from the first 1000 records of data
"""
import sys
import scipy.signal as sig
import matplotlib.pyplot as mpl
from combinato import NcsFile, DefaultFilter
def plot_spectrum(fname):
fid = NcsFile(fname)
rawdata = fid.read(0, 1000)
data = rawdata * (1e6 * fid.header['ADBitVolts'])
fs = 1/fid.timestep
my_filter = DefaultFilter(fid.timestep)
filt_data = my_filter.filter_extract(data)
[f, p] = sig.welch(data, fs, nperseg=32768)
[f_filt, p_filt] = sig.welch(filt_data, fs, nperseg=32768)
fig = mpl.figure()
plot = fig.add_subplot(1, 1, 1)
plot.plot(f, p, label='Unfiltered')
plot.plot(f_filt, p_filt, label='Filtered')
plot.set_yscale('log')
plot.legend()
plot.set_ylabel(r'$\mu\mathrm{V}^2/\mathrm{Hz}$')
plot.set_xlabel(r'$\mathrm{Hz}$')
def main():
plot_spectrum(sys.argv[1])
mpl.show()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add small plot of power spectral density<commit_after># -*- encoding: utf-8 -*-
# JN 2016-02-16
"""
Plot a spectrum from the first 1000 records of data
"""
import sys
import scipy.signal as sig
import matplotlib.pyplot as mpl
from combinato import NcsFile, DefaultFilter
def plot_spectrum(fname):
fid = NcsFile(fname)
rawdata = fid.read(0, 1000)
data = rawdata * (1e6 * fid.header['ADBitVolts'])
fs = 1/fid.timestep
my_filter = DefaultFilter(fid.timestep)
filt_data = my_filter.filter_extract(data)
[f, p] = sig.welch(data, fs, nperseg=32768)
[f_filt, p_filt] = sig.welch(filt_data, fs, nperseg=32768)
fig = mpl.figure()
plot = fig.add_subplot(1, 1, 1)
plot.plot(f, p, label='Unfiltered')
plot.plot(f_filt, p_filt, label='Filtered')
plot.set_yscale('log')
plot.legend()
plot.set_ylabel(r'$\mu\mathrm{V}^2/\mathrm{Hz}$')
plot.set_xlabel(r'$\mathrm{Hz}$')
def main():
plot_spectrum(sys.argv[1])
mpl.show()
if __name__ == '__main__':
main()
|
|
753803c79b1bc8b5457909a1d2f6779eb72fb36a
|
bindings/python/examples/coverart_fetch.py
|
bindings/python/examples/coverart_fetch.py
|
#!/usr/bin/python
## Copyright (C) 2005 Nick Piper <nick-gtkpod at nickpiper co uk>
## Part of the gtkpod project.
## URL: http://www.gtkpod.org/
## URL: http://gtkpod.sourceforge.net/
## The code contained in this file is free software; you can redistribute
## it and/or modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation; either version
## 2.1 of the License, or (at your option) any later version.
## This file is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
## You should have received a copy of the GNU Lesser General Public
## License along with this code; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
## $Id$
import os, os.path
import gpod
import sys
import amazon
import urllib
import Image
import tempfile
ipod_mount = '/mnt/ipod'
itdb = gpod.itdb_parse(ipod_mount, None)
if not itdb:
print "Failed to read %s" % dbname
sys.exit(2)
# set your key here...
amazon.setLicense('')
for track in gpod.sw_get_tracks(itdb):
output = ("/tmp/%s %s.jpg" % (track.artist, track.album)).replace(' ','_')
if not os.path.exists(output):
print "Searching for %s %s" % (track.artist, track.album)
try:
albums = amazon.searchByKeyword("%s %s" % (track.artist, track.album),
type="lite",product_line="music")
except amazon.AmazonError, e:
print e
albums = []
if len(albums) == 0:
continue
album = albums[0]
i = urllib.urlopen(album.ImageUrlLarge)
o = open(output, "wb")
o.write(i.read())
o.close()
img = Image.open(output)
if not (img.size[0] > 10 or img.size[1] > 10):
os.unlink(output)
else:
print "Fetched image!"
if os.path.exists(output):
if gpod.itdb_track_set_thumbnail(track,output) != 0:
print "Failed to save image thumbnail"
gpod.itdb_write(itdb, None)
print "Saved db"
|
Add a toy script to fetch images from Amazon
|
Add a toy script to fetch images from Amazon
git-svn-id: 76cb8c96a56e2e269d2baf461dc2f0a164399ff5@1178 f01d2545-417e-4e96-918e-98f8d0dbbcb6
|
Python
|
lgpl-2.1
|
hyperair/libgpod,hyperair/libgpod,hyperair/libgpod,neuschaefer/libgpod,neuschaefer/libgpod,neuschaefer/libgpod,hyperair/libgpod,neuschaefer/libgpod,hyperair/libgpod,neuschaefer/libgpod
|
Add a toy script to fetch images from Amazon
git-svn-id: 76cb8c96a56e2e269d2baf461dc2f0a164399ff5@1178 f01d2545-417e-4e96-918e-98f8d0dbbcb6
|
#!/usr/bin/python
## Copyright (C) 2005 Nick Piper <nick-gtkpod at nickpiper co uk>
## Part of the gtkpod project.
## URL: http://www.gtkpod.org/
## URL: http://gtkpod.sourceforge.net/
## The code contained in this file is free software; you can redistribute
## it and/or modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation; either version
## 2.1 of the License, or (at your option) any later version.
## This file is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
## You should have received a copy of the GNU Lesser General Public
## License along with this code; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
## $Id$
import os, os.path
import gpod
import sys
import amazon
import urllib
import Image
import tempfile
ipod_mount = '/mnt/ipod'
itdb = gpod.itdb_parse(ipod_mount, None)
if not itdb:
print "Failed to read %s" % dbname
sys.exit(2)
# set your key here...
amazon.setLicense('')
for track in gpod.sw_get_tracks(itdb):
output = ("/tmp/%s %s.jpg" % (track.artist, track.album)).replace(' ','_')
if not os.path.exists(output):
print "Searching for %s %s" % (track.artist, track.album)
try:
albums = amazon.searchByKeyword("%s %s" % (track.artist, track.album),
type="lite",product_line="music")
except amazon.AmazonError, e:
print e
albums = []
if len(albums) == 0:
continue
album = albums[0]
i = urllib.urlopen(album.ImageUrlLarge)
o = open(output, "wb")
o.write(i.read())
o.close()
img = Image.open(output)
if not (img.size[0] > 10 or img.size[1] > 10):
os.unlink(output)
else:
print "Fetched image!"
if os.path.exists(output):
if gpod.itdb_track_set_thumbnail(track,output) != 0:
print "Failed to save image thumbnail"
gpod.itdb_write(itdb, None)
print "Saved db"
|
<commit_before><commit_msg>Add a toy script to fetch images from Amazon
git-svn-id: 76cb8c96a56e2e269d2baf461dc2f0a164399ff5@1178 f01d2545-417e-4e96-918e-98f8d0dbbcb6<commit_after>
|
#!/usr/bin/python
## Copyright (C) 2005 Nick Piper <nick-gtkpod at nickpiper co uk>
## Part of the gtkpod project.
## URL: http://www.gtkpod.org/
## URL: http://gtkpod.sourceforge.net/
## The code contained in this file is free software; you can redistribute
## it and/or modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation; either version
## 2.1 of the License, or (at your option) any later version.
## This file is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
## You should have received a copy of the GNU Lesser General Public
## License along with this code; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
## $Id$
import os, os.path
import gpod
import sys
import amazon
import urllib
import Image
import tempfile
ipod_mount = '/mnt/ipod'
itdb = gpod.itdb_parse(ipod_mount, None)
if not itdb:
print "Failed to read %s" % dbname
sys.exit(2)
# set your key here...
amazon.setLicense('')
for track in gpod.sw_get_tracks(itdb):
output = ("/tmp/%s %s.jpg" % (track.artist, track.album)).replace(' ','_')
if not os.path.exists(output):
print "Searching for %s %s" % (track.artist, track.album)
try:
albums = amazon.searchByKeyword("%s %s" % (track.artist, track.album),
type="lite",product_line="music")
except amazon.AmazonError, e:
print e
albums = []
if len(albums) == 0:
continue
album = albums[0]
i = urllib.urlopen(album.ImageUrlLarge)
o = open(output, "wb")
o.write(i.read())
o.close()
img = Image.open(output)
if not (img.size[0] > 10 or img.size[1] > 10):
os.unlink(output)
else:
print "Fetched image!"
if os.path.exists(output):
if gpod.itdb_track_set_thumbnail(track,output) != 0:
print "Failed to save image thumbnail"
gpod.itdb_write(itdb, None)
print "Saved db"
|
Add a toy script to fetch images from Amazon
git-svn-id: 76cb8c96a56e2e269d2baf461dc2f0a164399ff5@1178 f01d2545-417e-4e96-918e-98f8d0dbbcb6#!/usr/bin/python
## Copyright (C) 2005 Nick Piper <nick-gtkpod at nickpiper co uk>
## Part of the gtkpod project.
## URL: http://www.gtkpod.org/
## URL: http://gtkpod.sourceforge.net/
## The code contained in this file is free software; you can redistribute
## it and/or modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation; either version
## 2.1 of the License, or (at your option) any later version.
## This file is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
## You should have received a copy of the GNU Lesser General Public
## License along with this code; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
## $Id$
import os, os.path
import gpod
import sys
import amazon
import urllib
import Image
import tempfile
ipod_mount = '/mnt/ipod'
itdb = gpod.itdb_parse(ipod_mount, None)
if not itdb:
print "Failed to read %s" % dbname
sys.exit(2)
# set your key here...
amazon.setLicense('')
for track in gpod.sw_get_tracks(itdb):
output = ("/tmp/%s %s.jpg" % (track.artist, track.album)).replace(' ','_')
if not os.path.exists(output):
print "Searching for %s %s" % (track.artist, track.album)
try:
albums = amazon.searchByKeyword("%s %s" % (track.artist, track.album),
type="lite",product_line="music")
except amazon.AmazonError, e:
print e
albums = []
if len(albums) == 0:
continue
album = albums[0]
i = urllib.urlopen(album.ImageUrlLarge)
o = open(output, "wb")
o.write(i.read())
o.close()
img = Image.open(output)
if not (img.size[0] > 10 or img.size[1] > 10):
os.unlink(output)
else:
print "Fetched image!"
if os.path.exists(output):
if gpod.itdb_track_set_thumbnail(track,output) != 0:
print "Failed to save image thumbnail"
gpod.itdb_write(itdb, None)
print "Saved db"
|
<commit_before><commit_msg>Add a toy script to fetch images from Amazon
git-svn-id: 76cb8c96a56e2e269d2baf461dc2f0a164399ff5@1178 f01d2545-417e-4e96-918e-98f8d0dbbcb6<commit_after>#!/usr/bin/python
## Copyright (C) 2005 Nick Piper <nick-gtkpod at nickpiper co uk>
## Part of the gtkpod project.
## URL: http://www.gtkpod.org/
## URL: http://gtkpod.sourceforge.net/
## The code contained in this file is free software; you can redistribute
## it and/or modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation; either version
## 2.1 of the License, or (at your option) any later version.
## This file is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
## You should have received a copy of the GNU Lesser General Public
## License along with this code; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
## $Id$
import os, os.path
import gpod
import sys
import amazon
import urllib
import Image
import tempfile
ipod_mount = '/mnt/ipod'
itdb = gpod.itdb_parse(ipod_mount, None)
if not itdb:
print "Failed to read %s" % dbname
sys.exit(2)
# set your key here...
amazon.setLicense('')
for track in gpod.sw_get_tracks(itdb):
output = ("/tmp/%s %s.jpg" % (track.artist, track.album)).replace(' ','_')
if not os.path.exists(output):
print "Searching for %s %s" % (track.artist, track.album)
try:
albums = amazon.searchByKeyword("%s %s" % (track.artist, track.album),
type="lite",product_line="music")
except amazon.AmazonError, e:
print e
albums = []
if len(albums) == 0:
continue
album = albums[0]
i = urllib.urlopen(album.ImageUrlLarge)
o = open(output, "wb")
o.write(i.read())
o.close()
img = Image.open(output)
if not (img.size[0] > 10 or img.size[1] > 10):
os.unlink(output)
else:
print "Fetched image!"
if os.path.exists(output):
if gpod.itdb_track_set_thumbnail(track,output) != 0:
print "Failed to save image thumbnail"
gpod.itdb_write(itdb, None)
print "Saved db"
|
|
c078cc3fc0cf86b01fcec6c5ea6de9c4d3ee4ef5
|
CSVTODSS.py
|
CSVTODSS.py
|
from hec.script import MessageBox
from hec.heclib.dss import HecDss
from hec.heclib.util import HecTime
from hec.io import TimeSeriesContainer
import java
import csv
try :
try :
#print 'Jython version: ', sys.version
NUM_METADATA_LINES = 3;
DSS_FILE_PATH = './2008_2_Events/2008_2_Events_force.dss'
CSV_FILE_PATH = 'DailyRain.csv'
myDss = HecDss.open(DSS_FILE_PATH)
csvReader = csv.reader(open(CSV_FILE_PATH, 'r'), delimiter=',', quotechar='|')
csvList = list(csvReader)
numLocations = len(csvList[0]) - 1
numValues = len(csvList) - NUM_METADATA_LINES # Ignore Metadata
locationIds = csvList[1][1:]
print 'Start reading', numLocations, csvList[0][0], ':', ', '.join(csvList[0][1:])
print 'Period of ', numValues, 'values'
print 'Location Ids :', locationIds
for i in range(0, numLocations):
print '\n>>>>>>> Start processing ', locationIds[i], '<<<<<<<<<<<<'
precipitations = []
for j in range(NUM_METADATA_LINES, numValues + NUM_METADATA_LINES):
p = float(csvList[j][i+1])
precipitations.append(p)
print 'Precipitation of ', locationIds[i], precipitations[:10]
tsc = TimeSeriesContainer()
# tsc.fullName = "/BASIN/LOC/FLOW//1HOUR/OBS/"
tsc.fullName = '//' + locationIds[i].upper() + '/PRECIP-INC//1DAY/GAGE/'
print 'Start time : ', csvList[NUM_METADATA_LINES][0]
start = HecTime(csvList[NUM_METADATA_LINES][0])
tsc.interval = 24 * 60
times = []
for value in precipitations :
times.append(start.value())
start.add(tsc.interval)
tsc.times = times
tsc.values = precipitations
tsc.numberValues = len(precipitations)
tsc.units = "MM"
tsc.type = "PER-CUM"
myDss.put(tsc)
except Exception, e :
MessageBox.showError(' '.join(e.args), "Python Error")
except java.lang.Exception, e :
MessageBox.showError(e.getMessage(), "Error")
finally :
myDss.done()
print '\nCompleted converting ', CSV_FILE_PATH, ' to ', DSS_FILE_PATH
|
Store .csv daily precipitation in hec .dss database
|
Store .csv daily precipitation in hec .dss database
- Store precipitation data in .dss before running HEC-HMS model
- Also updated the Jython version of Hec-dssuve into 2.5
More details - http://resourceoptimism.blogspot.com/2017/03/store-csv-data-on-hec-dssuve-dss-for.html
|
Python
|
apache-2.0
|
gihankarunarathne/udp,gihankarunarathne/udp
|
Store .csv daily precipitation in hec .dss database
- Store precipitation data in .dss before running HEC-HMS model
- Also updated the Jython version of Hec-dssuve into 2.5
More details - http://resourceoptimism.blogspot.com/2017/03/store-csv-data-on-hec-dssuve-dss-for.html
|
from hec.script import MessageBox
from hec.heclib.dss import HecDss
from hec.heclib.util import HecTime
from hec.io import TimeSeriesContainer
import java
import csv
try :
try :
#print 'Jython version: ', sys.version
NUM_METADATA_LINES = 3;
DSS_FILE_PATH = './2008_2_Events/2008_2_Events_force.dss'
CSV_FILE_PATH = 'DailyRain.csv'
myDss = HecDss.open(DSS_FILE_PATH)
csvReader = csv.reader(open(CSV_FILE_PATH, 'r'), delimiter=',', quotechar='|')
csvList = list(csvReader)
numLocations = len(csvList[0]) - 1
numValues = len(csvList) - NUM_METADATA_LINES # Ignore Metadata
locationIds = csvList[1][1:]
print 'Start reading', numLocations, csvList[0][0], ':', ', '.join(csvList[0][1:])
print 'Period of ', numValues, 'values'
print 'Location Ids :', locationIds
for i in range(0, numLocations):
print '\n>>>>>>> Start processing ', locationIds[i], '<<<<<<<<<<<<'
precipitations = []
for j in range(NUM_METADATA_LINES, numValues + NUM_METADATA_LINES):
p = float(csvList[j][i+1])
precipitations.append(p)
print 'Precipitation of ', locationIds[i], precipitations[:10]
tsc = TimeSeriesContainer()
# tsc.fullName = "/BASIN/LOC/FLOW//1HOUR/OBS/"
tsc.fullName = '//' + locationIds[i].upper() + '/PRECIP-INC//1DAY/GAGE/'
print 'Start time : ', csvList[NUM_METADATA_LINES][0]
start = HecTime(csvList[NUM_METADATA_LINES][0])
tsc.interval = 24 * 60
times = []
for value in precipitations :
times.append(start.value())
start.add(tsc.interval)
tsc.times = times
tsc.values = precipitations
tsc.numberValues = len(precipitations)
tsc.units = "MM"
tsc.type = "PER-CUM"
myDss.put(tsc)
except Exception, e :
MessageBox.showError(' '.join(e.args), "Python Error")
except java.lang.Exception, e :
MessageBox.showError(e.getMessage(), "Error")
finally :
myDss.done()
print '\nCompleted converting ', CSV_FILE_PATH, ' to ', DSS_FILE_PATH
|
<commit_before><commit_msg>Store .csv daily precipitation in hec .dss database
- Store precipitation data in .dss before running HEC-HMS model
- Also updated the Jython version of Hec-dssuve into 2.5
More details - http://resourceoptimism.blogspot.com/2017/03/store-csv-data-on-hec-dssuve-dss-for.html<commit_after>
|
from hec.script import MessageBox
from hec.heclib.dss import HecDss
from hec.heclib.util import HecTime
from hec.io import TimeSeriesContainer
import java
import csv
try :
try :
#print 'Jython version: ', sys.version
NUM_METADATA_LINES = 3;
DSS_FILE_PATH = './2008_2_Events/2008_2_Events_force.dss'
CSV_FILE_PATH = 'DailyRain.csv'
myDss = HecDss.open(DSS_FILE_PATH)
csvReader = csv.reader(open(CSV_FILE_PATH, 'r'), delimiter=',', quotechar='|')
csvList = list(csvReader)
numLocations = len(csvList[0]) - 1
numValues = len(csvList) - NUM_METADATA_LINES # Ignore Metadata
locationIds = csvList[1][1:]
print 'Start reading', numLocations, csvList[0][0], ':', ', '.join(csvList[0][1:])
print 'Period of ', numValues, 'values'
print 'Location Ids :', locationIds
for i in range(0, numLocations):
print '\n>>>>>>> Start processing ', locationIds[i], '<<<<<<<<<<<<'
precipitations = []
for j in range(NUM_METADATA_LINES, numValues + NUM_METADATA_LINES):
p = float(csvList[j][i+1])
precipitations.append(p)
print 'Precipitation of ', locationIds[i], precipitations[:10]
tsc = TimeSeriesContainer()
# tsc.fullName = "/BASIN/LOC/FLOW//1HOUR/OBS/"
tsc.fullName = '//' + locationIds[i].upper() + '/PRECIP-INC//1DAY/GAGE/'
print 'Start time : ', csvList[NUM_METADATA_LINES][0]
start = HecTime(csvList[NUM_METADATA_LINES][0])
tsc.interval = 24 * 60
times = []
for value in precipitations :
times.append(start.value())
start.add(tsc.interval)
tsc.times = times
tsc.values = precipitations
tsc.numberValues = len(precipitations)
tsc.units = "MM"
tsc.type = "PER-CUM"
myDss.put(tsc)
except Exception, e :
MessageBox.showError(' '.join(e.args), "Python Error")
except java.lang.Exception, e :
MessageBox.showError(e.getMessage(), "Error")
finally :
myDss.done()
print '\nCompleted converting ', CSV_FILE_PATH, ' to ', DSS_FILE_PATH
|
Store .csv daily precipitation in hec .dss database
- Store precipitation data in .dss before running HEC-HMS model
- Also updated the Jython version of Hec-dssuve into 2.5
More details - http://resourceoptimism.blogspot.com/2017/03/store-csv-data-on-hec-dssuve-dss-for.htmlfrom hec.script import MessageBox
from hec.heclib.dss import HecDss
from hec.heclib.util import HecTime
from hec.io import TimeSeriesContainer
import java
import csv
try :
try :
#print 'Jython version: ', sys.version
NUM_METADATA_LINES = 3;
DSS_FILE_PATH = './2008_2_Events/2008_2_Events_force.dss'
CSV_FILE_PATH = 'DailyRain.csv'
myDss = HecDss.open(DSS_FILE_PATH)
csvReader = csv.reader(open(CSV_FILE_PATH, 'r'), delimiter=',', quotechar='|')
csvList = list(csvReader)
numLocations = len(csvList[0]) - 1
numValues = len(csvList) - NUM_METADATA_LINES # Ignore Metadata
locationIds = csvList[1][1:]
print 'Start reading', numLocations, csvList[0][0], ':', ', '.join(csvList[0][1:])
print 'Period of ', numValues, 'values'
print 'Location Ids :', locationIds
for i in range(0, numLocations):
print '\n>>>>>>> Start processing ', locationIds[i], '<<<<<<<<<<<<'
precipitations = []
for j in range(NUM_METADATA_LINES, numValues + NUM_METADATA_LINES):
p = float(csvList[j][i+1])
precipitations.append(p)
print 'Precipitation of ', locationIds[i], precipitations[:10]
tsc = TimeSeriesContainer()
# tsc.fullName = "/BASIN/LOC/FLOW//1HOUR/OBS/"
tsc.fullName = '//' + locationIds[i].upper() + '/PRECIP-INC//1DAY/GAGE/'
print 'Start time : ', csvList[NUM_METADATA_LINES][0]
start = HecTime(csvList[NUM_METADATA_LINES][0])
tsc.interval = 24 * 60
times = []
for value in precipitations :
times.append(start.value())
start.add(tsc.interval)
tsc.times = times
tsc.values = precipitations
tsc.numberValues = len(precipitations)
tsc.units = "MM"
tsc.type = "PER-CUM"
myDss.put(tsc)
except Exception, e :
MessageBox.showError(' '.join(e.args), "Python Error")
except java.lang.Exception, e :
MessageBox.showError(e.getMessage(), "Error")
finally :
myDss.done()
print '\nCompleted converting ', CSV_FILE_PATH, ' to ', DSS_FILE_PATH
|
<commit_before><commit_msg>Store .csv daily precipitation in hec .dss database
- Store precipitation data in .dss before running HEC-HMS model
- Also updated the Jython version of Hec-dssuve into 2.5
More details - http://resourceoptimism.blogspot.com/2017/03/store-csv-data-on-hec-dssuve-dss-for.html<commit_after>from hec.script import MessageBox
from hec.heclib.dss import HecDss
from hec.heclib.util import HecTime
from hec.io import TimeSeriesContainer
import java
import csv
try :
try :
#print 'Jython version: ', sys.version
NUM_METADATA_LINES = 3;
DSS_FILE_PATH = './2008_2_Events/2008_2_Events_force.dss'
CSV_FILE_PATH = 'DailyRain.csv'
myDss = HecDss.open(DSS_FILE_PATH)
csvReader = csv.reader(open(CSV_FILE_PATH, 'r'), delimiter=',', quotechar='|')
csvList = list(csvReader)
numLocations = len(csvList[0]) - 1
numValues = len(csvList) - NUM_METADATA_LINES # Ignore Metadata
locationIds = csvList[1][1:]
print 'Start reading', numLocations, csvList[0][0], ':', ', '.join(csvList[0][1:])
print 'Period of ', numValues, 'values'
print 'Location Ids :', locationIds
for i in range(0, numLocations):
print '\n>>>>>>> Start processing ', locationIds[i], '<<<<<<<<<<<<'
precipitations = []
for j in range(NUM_METADATA_LINES, numValues + NUM_METADATA_LINES):
p = float(csvList[j][i+1])
precipitations.append(p)
print 'Precipitation of ', locationIds[i], precipitations[:10]
tsc = TimeSeriesContainer()
# tsc.fullName = "/BASIN/LOC/FLOW//1HOUR/OBS/"
tsc.fullName = '//' + locationIds[i].upper() + '/PRECIP-INC//1DAY/GAGE/'
print 'Start time : ', csvList[NUM_METADATA_LINES][0]
start = HecTime(csvList[NUM_METADATA_LINES][0])
tsc.interval = 24 * 60
times = []
for value in precipitations :
times.append(start.value())
start.add(tsc.interval)
tsc.times = times
tsc.values = precipitations
tsc.numberValues = len(precipitations)
tsc.units = "MM"
tsc.type = "PER-CUM"
myDss.put(tsc)
except Exception, e :
MessageBox.showError(' '.join(e.args), "Python Error")
except java.lang.Exception, e :
MessageBox.showError(e.getMessage(), "Error")
finally :
myDss.done()
print '\nCompleted converting ', CSV_FILE_PATH, ' to ', DSS_FILE_PATH
|
|
eb0dacc8af287c35e851c88792e795a79afda238
|
tests/unit/states/test_loop.py
|
tests/unit/states/test_loop.py
|
# -*- coding: utf-8 -*-
'''
Tests for loop state(s)
'''
# Import Python Libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
patch,
MagicMock,
NO_MOCK,
NO_MOCK_REASON
)
# Import Salt Libs
import salt.states.loop as loop
@skipIf(NO_MOCK, NO_MOCK_REASON)
class LoopTestCase(TestCase, LoaderModuleMockMixin):
mock = MagicMock(return_value=True)
func = 'foo.bar'
m_args = ['foo', 'bar', 'baz']
m_kwargs = {'hello': 'world'}
condition = 'm_ret is True'
period = 1
timeout = 3
def setup_loader_modules(self):
return {
loop: {
'__opts__': {'test': False},
'__salt__': {self.func: self.mock},
}
}
def setUp(self):
self.mock.reset_mock()
def test_until(self):
ret = loop.until(
name=self.func,
m_args=self.m_args,
m_kwargs=self.m_kwargs,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(*self.m_args, **self.m_kwargs)
def test_until_without_args(self):
ret = loop.until(
name=self.func,
m_kwargs=self.m_kwargs,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(**self.m_kwargs)
def test_until_without_kwargs(self):
ret = loop.until(
name=self.func,
m_args=self.m_args,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(*self.m_args)
def test_until_without_args_or_kwargs(self):
ret = loop.until(
name=self.func,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with()
|
Add unit tests for loop.until state
|
Add unit tests for loop.until state
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Add unit tests for loop.until state
|
# -*- coding: utf-8 -*-
'''
Tests for loop state(s)
'''
# Import Python Libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
patch,
MagicMock,
NO_MOCK,
NO_MOCK_REASON
)
# Import Salt Libs
import salt.states.loop as loop
@skipIf(NO_MOCK, NO_MOCK_REASON)
class LoopTestCase(TestCase, LoaderModuleMockMixin):
mock = MagicMock(return_value=True)
func = 'foo.bar'
m_args = ['foo', 'bar', 'baz']
m_kwargs = {'hello': 'world'}
condition = 'm_ret is True'
period = 1
timeout = 3
def setup_loader_modules(self):
return {
loop: {
'__opts__': {'test': False},
'__salt__': {self.func: self.mock},
}
}
def setUp(self):
self.mock.reset_mock()
def test_until(self):
ret = loop.until(
name=self.func,
m_args=self.m_args,
m_kwargs=self.m_kwargs,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(*self.m_args, **self.m_kwargs)
def test_until_without_args(self):
ret = loop.until(
name=self.func,
m_kwargs=self.m_kwargs,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(**self.m_kwargs)
def test_until_without_kwargs(self):
ret = loop.until(
name=self.func,
m_args=self.m_args,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(*self.m_args)
def test_until_without_args_or_kwargs(self):
ret = loop.until(
name=self.func,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with()
|
<commit_before><commit_msg>Add unit tests for loop.until state<commit_after>
|
# -*- coding: utf-8 -*-
'''
Tests for loop state(s)
'''
# Import Python Libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
patch,
MagicMock,
NO_MOCK,
NO_MOCK_REASON
)
# Import Salt Libs
import salt.states.loop as loop
@skipIf(NO_MOCK, NO_MOCK_REASON)
class LoopTestCase(TestCase, LoaderModuleMockMixin):
mock = MagicMock(return_value=True)
func = 'foo.bar'
m_args = ['foo', 'bar', 'baz']
m_kwargs = {'hello': 'world'}
condition = 'm_ret is True'
period = 1
timeout = 3
def setup_loader_modules(self):
return {
loop: {
'__opts__': {'test': False},
'__salt__': {self.func: self.mock},
}
}
def setUp(self):
self.mock.reset_mock()
def test_until(self):
ret = loop.until(
name=self.func,
m_args=self.m_args,
m_kwargs=self.m_kwargs,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(*self.m_args, **self.m_kwargs)
def test_until_without_args(self):
ret = loop.until(
name=self.func,
m_kwargs=self.m_kwargs,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(**self.m_kwargs)
def test_until_without_kwargs(self):
ret = loop.until(
name=self.func,
m_args=self.m_args,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(*self.m_args)
def test_until_without_args_or_kwargs(self):
ret = loop.until(
name=self.func,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with()
|
Add unit tests for loop.until state# -*- coding: utf-8 -*-
'''
Tests for loop state(s)
'''
# Import Python Libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
patch,
MagicMock,
NO_MOCK,
NO_MOCK_REASON
)
# Import Salt Libs
import salt.states.loop as loop
@skipIf(NO_MOCK, NO_MOCK_REASON)
class LoopTestCase(TestCase, LoaderModuleMockMixin):
mock = MagicMock(return_value=True)
func = 'foo.bar'
m_args = ['foo', 'bar', 'baz']
m_kwargs = {'hello': 'world'}
condition = 'm_ret is True'
period = 1
timeout = 3
def setup_loader_modules(self):
return {
loop: {
'__opts__': {'test': False},
'__salt__': {self.func: self.mock},
}
}
def setUp(self):
self.mock.reset_mock()
def test_until(self):
ret = loop.until(
name=self.func,
m_args=self.m_args,
m_kwargs=self.m_kwargs,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(*self.m_args, **self.m_kwargs)
def test_until_without_args(self):
ret = loop.until(
name=self.func,
m_kwargs=self.m_kwargs,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(**self.m_kwargs)
def test_until_without_kwargs(self):
ret = loop.until(
name=self.func,
m_args=self.m_args,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(*self.m_args)
def test_until_without_args_or_kwargs(self):
ret = loop.until(
name=self.func,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with()
|
<commit_before><commit_msg>Add unit tests for loop.until state<commit_after># -*- coding: utf-8 -*-
'''
Tests for loop state(s)
'''
# Import Python Libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
patch,
MagicMock,
NO_MOCK,
NO_MOCK_REASON
)
# Import Salt Libs
import salt.states.loop as loop
@skipIf(NO_MOCK, NO_MOCK_REASON)
class LoopTestCase(TestCase, LoaderModuleMockMixin):
mock = MagicMock(return_value=True)
func = 'foo.bar'
m_args = ['foo', 'bar', 'baz']
m_kwargs = {'hello': 'world'}
condition = 'm_ret is True'
period = 1
timeout = 3
def setup_loader_modules(self):
return {
loop: {
'__opts__': {'test': False},
'__salt__': {self.func: self.mock},
}
}
def setUp(self):
self.mock.reset_mock()
def test_until(self):
ret = loop.until(
name=self.func,
m_args=self.m_args,
m_kwargs=self.m_kwargs,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(*self.m_args, **self.m_kwargs)
def test_until_without_args(self):
ret = loop.until(
name=self.func,
m_kwargs=self.m_kwargs,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(**self.m_kwargs)
def test_until_without_kwargs(self):
ret = loop.until(
name=self.func,
m_args=self.m_args,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with(*self.m_args)
def test_until_without_args_or_kwargs(self):
ret = loop.until(
name=self.func,
condition=self.condition,
period=self.period,
timeout=self.timeout)
assert ret['result'] is True
self.mock.assert_called_once_with()
|
|
1f7b33d90844019b4ef23c9a871408e02f7a96eb
|
tools/verify_tempest_config.py
|
tools/verify_tempest_config.py
|
#!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from tempest import clients
from tempest import config
CONF = config.TempestConfig()
#Dicts matching extension names to config options
NOVA_EXTENSIONS = {
'disk_config': 'DiskConfig',
'change_password': 'ServerPassword',
'flavor_extra': 'FlavorExtraSpecs'
}
def verify_extensions(os):
results = {}
extensions_client = os.extensions_client
__, resp = extensions_client.list_extensions()
resp = resp['extensions']
extensions = map(lambda x: x['name'], resp)
results['nova_features'] = {}
for extension in NOVA_EXTENSIONS.keys():
if NOVA_EXTENSIONS[extension] in extensions:
results['nova_features'][extension] = True
else:
results['nova_features'][extension] = False
return results
def display_results(results):
for option in NOVA_EXTENSIONS.keys():
config_value = getattr(CONF.compute_feature_enabled, option)
if config_value != results['nova_features'][option]:
print "Config option: %s should be changed to: %s" % (
option, not config_value)
def main(argv):
os = clients.ComputeAdminManager(interface='json')
results = verify_extensions(os)
display_results(results)
if __name__ == "__main__":
main(sys.argv)
|
Add config feature verification script
|
Add config feature verification script
This commit adds a new tool to tempest that will verify that API
queryable config options are set correctly. Right now the list of
options that are verified is very short. Later on additional checks
will be added to verify other services features when the tempest
clients for the other services get functions to query enabled
extensions.
Partially Implements: blueprint config-verification
Change-Id: Ie3b5fadd74460fb7199bff7046ec0efe37268b94
|
Python
|
apache-2.0
|
redhat-cip/tempest,tudorvio/tempest,LIS/lis-tempest,cloudbase/lis-tempest,vedujoshi/os_tempest,danielmellado/tempest,xbezdick/tempest,adkerr/tempest,openstack/tempest,rzarzynski/tempest,vedujoshi/os_tempest,BeenzSyed/tempest,LIS/lis-tempest,bigswitch/tempest,tudorvio/tempest,nunogt/tempest,rakeshmi/tempest,Tesora/tesora-tempest,alinbalutoiu/tempest,zsoltdudas/lis-tempest,rakeshmi/tempest,vmahuli/tempest,ebagdasa/tempest,hpcloud-mon/tempest,hayderimran7/tempest,cisco-openstack/tempest,neerja28/Tempest,FujitsuEnablingSoftwareTechnologyGmbH/tempest,CiscoSystems/tempest,FujitsuEnablingSoftwareTechnologyGmbH/tempest,adkerr/tempest,pandeyop/tempest,dkalashnik/tempest,ebagdasa/tempest,flyingfish007/tempest,Mirantis/tempest,izadorozhna/tempest,pczerkas/tempest,izadorozhna/tempest,afaheem88/tempest_neutron,armando-migliaccio/tempest,afaheem88/tempest,flyingfish007/tempest,vedujoshi/tempest,Tesora/tesora-tempest,alinbalutoiu/tempest,JioCloud/tempest,tonyli71/tempest,yamt/tempest,Vaidyanath/tempest,varunarya10/tempest,manasi24/tempest,xbezdick/tempest,vmahuli/tempest,roopali8/tempest,Vaidyanath/tempest,Lilywei123/tempest,eggmaster/tempest,afaheem88/tempest,Lilywei123/tempest,roopali8/tempest,zsoltdudas/lis-tempest,bigswitch/tempest,NexusIS/tempest,nunogt/tempest,openstack/tempest,ntymtsiv/tempest,Juniper/tempest,vedujoshi/tempest,eggmaster/tempest,pczerkas/tempest,cisco-openstack/tempest,armando-migliaccio/tempest,neerja28/Tempest,redhat-cip/tempest,Juraci/tempest,JioCloud/tempest,jaspreetw/tempest,queria/my-tempest,manasi24/jiocloud-tempest-qatempest,varunarya10/tempest,yamt/tempest,CiscoSystems/tempest,sebrandon1/tempest,NexusIS/tempest,rzarzynski/tempest,queria/my-tempest,dkalashnik/tempest,akash1808/tempest,Mirantis/tempest,akash1808/tempest,manasi24/jiocloud-tempest-qatempest,hpcloud-mon/tempest,masayukig/tempest,cloudbase/lis-tempest,pandeyop/tempest,BeenzSyed/tempest,jamielennox/tempest,Juniper/tempest,danielmellado/tempest,Juraci/tempest,masayukig/tempest,manasi24/tempest,afaheem88/tempest_neutron,ntymtsiv/tempest,sebrandon1/tempest,tonyli71/tempest,jaspreetw/tempest,hayderimran7/tempest,jamielennox/tempest
|
Add config feature verification script
This commit adds a new tool to tempest that will verify that API
queryable config options are set correctly. Right now the list of
options that are verified is very short. Later on additional checks
will be added to verify other services features when the tempest
clients for the other services get functions to query enabled
extensions.
Partially Implements: blueprint config-verification
Change-Id: Ie3b5fadd74460fb7199bff7046ec0efe37268b94
|
#!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from tempest import clients
from tempest import config
CONF = config.TempestConfig()
#Dicts matching extension names to config options
NOVA_EXTENSIONS = {
'disk_config': 'DiskConfig',
'change_password': 'ServerPassword',
'flavor_extra': 'FlavorExtraSpecs'
}
def verify_extensions(os):
results = {}
extensions_client = os.extensions_client
__, resp = extensions_client.list_extensions()
resp = resp['extensions']
extensions = map(lambda x: x['name'], resp)
results['nova_features'] = {}
for extension in NOVA_EXTENSIONS.keys():
if NOVA_EXTENSIONS[extension] in extensions:
results['nova_features'][extension] = True
else:
results['nova_features'][extension] = False
return results
def display_results(results):
for option in NOVA_EXTENSIONS.keys():
config_value = getattr(CONF.compute_feature_enabled, option)
if config_value != results['nova_features'][option]:
print "Config option: %s should be changed to: %s" % (
option, not config_value)
def main(argv):
os = clients.ComputeAdminManager(interface='json')
results = verify_extensions(os)
display_results(results)
if __name__ == "__main__":
main(sys.argv)
|
<commit_before><commit_msg>Add config feature verification script
This commit adds a new tool to tempest that will verify that API
queryable config options are set correctly. Right now the list of
options that are verified is very short. Later on additional checks
will be added to verify other services features when the tempest
clients for the other services get functions to query enabled
extensions.
Partially Implements: blueprint config-verification
Change-Id: Ie3b5fadd74460fb7199bff7046ec0efe37268b94<commit_after>
|
#!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from tempest import clients
from tempest import config
CONF = config.TempestConfig()
#Dicts matching extension names to config options
NOVA_EXTENSIONS = {
'disk_config': 'DiskConfig',
'change_password': 'ServerPassword',
'flavor_extra': 'FlavorExtraSpecs'
}
def verify_extensions(os):
results = {}
extensions_client = os.extensions_client
__, resp = extensions_client.list_extensions()
resp = resp['extensions']
extensions = map(lambda x: x['name'], resp)
results['nova_features'] = {}
for extension in NOVA_EXTENSIONS.keys():
if NOVA_EXTENSIONS[extension] in extensions:
results['nova_features'][extension] = True
else:
results['nova_features'][extension] = False
return results
def display_results(results):
for option in NOVA_EXTENSIONS.keys():
config_value = getattr(CONF.compute_feature_enabled, option)
if config_value != results['nova_features'][option]:
print "Config option: %s should be changed to: %s" % (
option, not config_value)
def main(argv):
os = clients.ComputeAdminManager(interface='json')
results = verify_extensions(os)
display_results(results)
if __name__ == "__main__":
main(sys.argv)
|
Add config feature verification script
This commit adds a new tool to tempest that will verify that API
queryable config options are set correctly. Right now the list of
options that are verified is very short. Later on additional checks
will be added to verify other services features when the tempest
clients for the other services get functions to query enabled
extensions.
Partially Implements: blueprint config-verification
Change-Id: Ie3b5fadd74460fb7199bff7046ec0efe37268b94#!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from tempest import clients
from tempest import config
CONF = config.TempestConfig()
#Dicts matching extension names to config options
NOVA_EXTENSIONS = {
'disk_config': 'DiskConfig',
'change_password': 'ServerPassword',
'flavor_extra': 'FlavorExtraSpecs'
}
def verify_extensions(os):
results = {}
extensions_client = os.extensions_client
__, resp = extensions_client.list_extensions()
resp = resp['extensions']
extensions = map(lambda x: x['name'], resp)
results['nova_features'] = {}
for extension in NOVA_EXTENSIONS.keys():
if NOVA_EXTENSIONS[extension] in extensions:
results['nova_features'][extension] = True
else:
results['nova_features'][extension] = False
return results
def display_results(results):
for option in NOVA_EXTENSIONS.keys():
config_value = getattr(CONF.compute_feature_enabled, option)
if config_value != results['nova_features'][option]:
print "Config option: %s should be changed to: %s" % (
option, not config_value)
def main(argv):
os = clients.ComputeAdminManager(interface='json')
results = verify_extensions(os)
display_results(results)
if __name__ == "__main__":
main(sys.argv)
|
<commit_before><commit_msg>Add config feature verification script
This commit adds a new tool to tempest that will verify that API
queryable config options are set correctly. Right now the list of
options that are verified is very short. Later on additional checks
will be added to verify other services features when the tempest
clients for the other services get functions to query enabled
extensions.
Partially Implements: blueprint config-verification
Change-Id: Ie3b5fadd74460fb7199bff7046ec0efe37268b94<commit_after>#!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from tempest import clients
from tempest import config
CONF = config.TempestConfig()
#Dicts matching extension names to config options
NOVA_EXTENSIONS = {
'disk_config': 'DiskConfig',
'change_password': 'ServerPassword',
'flavor_extra': 'FlavorExtraSpecs'
}
def verify_extensions(os):
results = {}
extensions_client = os.extensions_client
__, resp = extensions_client.list_extensions()
resp = resp['extensions']
extensions = map(lambda x: x['name'], resp)
results['nova_features'] = {}
for extension in NOVA_EXTENSIONS.keys():
if NOVA_EXTENSIONS[extension] in extensions:
results['nova_features'][extension] = True
else:
results['nova_features'][extension] = False
return results
def display_results(results):
for option in NOVA_EXTENSIONS.keys():
config_value = getattr(CONF.compute_feature_enabled, option)
if config_value != results['nova_features'][option]:
print "Config option: %s should be changed to: %s" % (
option, not config_value)
def main(argv):
os = clients.ComputeAdminManager(interface='json')
results = verify_extensions(os)
display_results(results)
if __name__ == "__main__":
main(sys.argv)
|
|
ffd57de470f488793e0beda9ead552d43663f6b9
|
designate/tests/test_backend/test_bind9.py
|
designate/tests/test_backend/test_bind9.py
|
# Copyright 2015 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from designate import objects
from designate.tests.test_backend import BackendTestCase
from designate.backend.impl_bind9 import Bind9Backend
class Bind9BackendTestCase(BackendTestCase):
def setUp(self):
super(Bind9BackendTestCase, self).setUp()
self.zone = objects.Zone(id='cca7908b-dad4-4c50-adba-fb67d4c556e8',
name='example.com.',
email='example@example.com')
target = objects.PoolTarget.from_dict({
'id': '4588652b-50e7-46b9-b688-a9bad40a873e',
'type': 'powerdns',
'masters': [{'host': '192.0.2.1', 'port': 53},
{'host': '192.0.2.2', 'port': 35}],
'options': [{'key': 'host', 'value': '192.0.2.3'},
{'key': 'port', 'value': 53},
{'key': 'rndc_host', 'value': '192.0.2.4'},
{'key': 'rndc_port', 'value': 953},
{'key': 'rndc_config_file', 'value': '/etc/rndc.conf'},
{'key': 'rndc_key_file', 'value': '/etc/rndc.key'},
{'key': 'clean_zonefile', 'value': 'true'}],
})
self.backend = Bind9Backend(target)
@mock.patch('designate.utils.execute')
def test_create_zone(self, execute):
context = self.get_context()
self.backend.create_zone(context, self.zone)
@mock.patch('designate.utils.execute')
def test_delete_zone(self, execute):
context = self.get_context()
self.backend.delete_zone(context, self.zone)
|
Add test of BIND9 backend
|
Add test of BIND9 backend
Add test of the following source code.
designate/designate/backend/impl_bind9.py
Change-Id: If2c3292de483881d732d88397574de8e5a12f78a
|
Python
|
apache-2.0
|
grahamhayes/designate,grahamhayes/designate,openstack/designate,grahamhayes/designate,openstack/designate,openstack/designate
|
Add test of BIND9 backend
Add test of the following source code.
designate/designate/backend/impl_bind9.py
Change-Id: If2c3292de483881d732d88397574de8e5a12f78a
|
# Copyright 2015 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from designate import objects
from designate.tests.test_backend import BackendTestCase
from designate.backend.impl_bind9 import Bind9Backend
class Bind9BackendTestCase(BackendTestCase):
def setUp(self):
super(Bind9BackendTestCase, self).setUp()
self.zone = objects.Zone(id='cca7908b-dad4-4c50-adba-fb67d4c556e8',
name='example.com.',
email='example@example.com')
target = objects.PoolTarget.from_dict({
'id': '4588652b-50e7-46b9-b688-a9bad40a873e',
'type': 'powerdns',
'masters': [{'host': '192.0.2.1', 'port': 53},
{'host': '192.0.2.2', 'port': 35}],
'options': [{'key': 'host', 'value': '192.0.2.3'},
{'key': 'port', 'value': 53},
{'key': 'rndc_host', 'value': '192.0.2.4'},
{'key': 'rndc_port', 'value': 953},
{'key': 'rndc_config_file', 'value': '/etc/rndc.conf'},
{'key': 'rndc_key_file', 'value': '/etc/rndc.key'},
{'key': 'clean_zonefile', 'value': 'true'}],
})
self.backend = Bind9Backend(target)
@mock.patch('designate.utils.execute')
def test_create_zone(self, execute):
context = self.get_context()
self.backend.create_zone(context, self.zone)
@mock.patch('designate.utils.execute')
def test_delete_zone(self, execute):
context = self.get_context()
self.backend.delete_zone(context, self.zone)
|
<commit_before><commit_msg>Add test of BIND9 backend
Add test of the following source code.
designate/designate/backend/impl_bind9.py
Change-Id: If2c3292de483881d732d88397574de8e5a12f78a<commit_after>
|
# Copyright 2015 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from designate import objects
from designate.tests.test_backend import BackendTestCase
from designate.backend.impl_bind9 import Bind9Backend
class Bind9BackendTestCase(BackendTestCase):
def setUp(self):
super(Bind9BackendTestCase, self).setUp()
self.zone = objects.Zone(id='cca7908b-dad4-4c50-adba-fb67d4c556e8',
name='example.com.',
email='example@example.com')
target = objects.PoolTarget.from_dict({
'id': '4588652b-50e7-46b9-b688-a9bad40a873e',
'type': 'powerdns',
'masters': [{'host': '192.0.2.1', 'port': 53},
{'host': '192.0.2.2', 'port': 35}],
'options': [{'key': 'host', 'value': '192.0.2.3'},
{'key': 'port', 'value': 53},
{'key': 'rndc_host', 'value': '192.0.2.4'},
{'key': 'rndc_port', 'value': 953},
{'key': 'rndc_config_file', 'value': '/etc/rndc.conf'},
{'key': 'rndc_key_file', 'value': '/etc/rndc.key'},
{'key': 'clean_zonefile', 'value': 'true'}],
})
self.backend = Bind9Backend(target)
@mock.patch('designate.utils.execute')
def test_create_zone(self, execute):
context = self.get_context()
self.backend.create_zone(context, self.zone)
@mock.patch('designate.utils.execute')
def test_delete_zone(self, execute):
context = self.get_context()
self.backend.delete_zone(context, self.zone)
|
Add test of BIND9 backend
Add test of the following source code.
designate/designate/backend/impl_bind9.py
Change-Id: If2c3292de483881d732d88397574de8e5a12f78a# Copyright 2015 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from designate import objects
from designate.tests.test_backend import BackendTestCase
from designate.backend.impl_bind9 import Bind9Backend
class Bind9BackendTestCase(BackendTestCase):
def setUp(self):
super(Bind9BackendTestCase, self).setUp()
self.zone = objects.Zone(id='cca7908b-dad4-4c50-adba-fb67d4c556e8',
name='example.com.',
email='example@example.com')
target = objects.PoolTarget.from_dict({
'id': '4588652b-50e7-46b9-b688-a9bad40a873e',
'type': 'powerdns',
'masters': [{'host': '192.0.2.1', 'port': 53},
{'host': '192.0.2.2', 'port': 35}],
'options': [{'key': 'host', 'value': '192.0.2.3'},
{'key': 'port', 'value': 53},
{'key': 'rndc_host', 'value': '192.0.2.4'},
{'key': 'rndc_port', 'value': 953},
{'key': 'rndc_config_file', 'value': '/etc/rndc.conf'},
{'key': 'rndc_key_file', 'value': '/etc/rndc.key'},
{'key': 'clean_zonefile', 'value': 'true'}],
})
self.backend = Bind9Backend(target)
@mock.patch('designate.utils.execute')
def test_create_zone(self, execute):
context = self.get_context()
self.backend.create_zone(context, self.zone)
@mock.patch('designate.utils.execute')
def test_delete_zone(self, execute):
context = self.get_context()
self.backend.delete_zone(context, self.zone)
|
<commit_before><commit_msg>Add test of BIND9 backend
Add test of the following source code.
designate/designate/backend/impl_bind9.py
Change-Id: If2c3292de483881d732d88397574de8e5a12f78a<commit_after># Copyright 2015 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from designate import objects
from designate.tests.test_backend import BackendTestCase
from designate.backend.impl_bind9 import Bind9Backend
class Bind9BackendTestCase(BackendTestCase):
def setUp(self):
super(Bind9BackendTestCase, self).setUp()
self.zone = objects.Zone(id='cca7908b-dad4-4c50-adba-fb67d4c556e8',
name='example.com.',
email='example@example.com')
target = objects.PoolTarget.from_dict({
'id': '4588652b-50e7-46b9-b688-a9bad40a873e',
'type': 'powerdns',
'masters': [{'host': '192.0.2.1', 'port': 53},
{'host': '192.0.2.2', 'port': 35}],
'options': [{'key': 'host', 'value': '192.0.2.3'},
{'key': 'port', 'value': 53},
{'key': 'rndc_host', 'value': '192.0.2.4'},
{'key': 'rndc_port', 'value': 953},
{'key': 'rndc_config_file', 'value': '/etc/rndc.conf'},
{'key': 'rndc_key_file', 'value': '/etc/rndc.key'},
{'key': 'clean_zonefile', 'value': 'true'}],
})
self.backend = Bind9Backend(target)
@mock.patch('designate.utils.execute')
def test_create_zone(self, execute):
context = self.get_context()
self.backend.create_zone(context, self.zone)
@mock.patch('designate.utils.execute')
def test_delete_zone(self, execute):
context = self.get_context()
self.backend.delete_zone(context, self.zone)
|
|
d8e78b058239e7b23a1b69ffb25dc520b7487e1d
|
rayleigh-ritz-truss.py
|
rayleigh-ritz-truss.py
|
import string
import numpy as np
from scipy.optimize import fsolve
from scipy.optimize import minimize
letter_list=list(string.ascii_uppercase)
poly_size=input('Polynomial Size: ')
poly_helper=[]
helper_string=''
for x in range(0,int(poly_size)):
helper_string+=str(letter_list[x])+'*x^'+str(int(poly_size)-x-1)+' + '
helper_string=helper_string[0:len(poly_helper)-3]
print(helper_string)
u_coeffs=[]
for x in range(0,int(poly_size)):
poly_val=input('Polynomial term '+letter_list[x]+': ')
u_coeffs.append(int(poly_val))
u=np.poly1d(u_coeffs)
# All good up to here
|
Add framework for Rayleigh-Ritz polynomial approximations for a single truss
|
Add framework for Rayleigh-Ritz polynomial approximations for a single truss
|
Python
|
mit
|
ndebuhr/openfea,ndebuhr/openfea
|
Add framework for Rayleigh-Ritz polynomial approximations for a single truss
|
import string
import numpy as np
from scipy.optimize import fsolve
from scipy.optimize import minimize
letter_list=list(string.ascii_uppercase)
poly_size=input('Polynomial Size: ')
poly_helper=[]
helper_string=''
for x in range(0,int(poly_size)):
helper_string+=str(letter_list[x])+'*x^'+str(int(poly_size)-x-1)+' + '
helper_string=helper_string[0:len(poly_helper)-3]
print(helper_string)
u_coeffs=[]
for x in range(0,int(poly_size)):
poly_val=input('Polynomial term '+letter_list[x]+': ')
u_coeffs.append(int(poly_val))
u=np.poly1d(u_coeffs)
# All good up to here
|
<commit_before><commit_msg>Add framework for Rayleigh-Ritz polynomial approximations for a single truss<commit_after>
|
import string
import numpy as np
from scipy.optimize import fsolve
from scipy.optimize import minimize
letter_list=list(string.ascii_uppercase)
poly_size=input('Polynomial Size: ')
poly_helper=[]
helper_string=''
for x in range(0,int(poly_size)):
helper_string+=str(letter_list[x])+'*x^'+str(int(poly_size)-x-1)+' + '
helper_string=helper_string[0:len(poly_helper)-3]
print(helper_string)
u_coeffs=[]
for x in range(0,int(poly_size)):
poly_val=input('Polynomial term '+letter_list[x]+': ')
u_coeffs.append(int(poly_val))
u=np.poly1d(u_coeffs)
# All good up to here
|
Add framework for Rayleigh-Ritz polynomial approximations for a single trussimport string
import numpy as np
from scipy.optimize import fsolve
from scipy.optimize import minimize
letter_list=list(string.ascii_uppercase)
poly_size=input('Polynomial Size: ')
poly_helper=[]
helper_string=''
for x in range(0,int(poly_size)):
helper_string+=str(letter_list[x])+'*x^'+str(int(poly_size)-x-1)+' + '
helper_string=helper_string[0:len(poly_helper)-3]
print(helper_string)
u_coeffs=[]
for x in range(0,int(poly_size)):
poly_val=input('Polynomial term '+letter_list[x]+': ')
u_coeffs.append(int(poly_val))
u=np.poly1d(u_coeffs)
# All good up to here
|
<commit_before><commit_msg>Add framework for Rayleigh-Ritz polynomial approximations for a single truss<commit_after>import string
import numpy as np
from scipy.optimize import fsolve
from scipy.optimize import minimize
letter_list=list(string.ascii_uppercase)
poly_size=input('Polynomial Size: ')
poly_helper=[]
helper_string=''
for x in range(0,int(poly_size)):
helper_string+=str(letter_list[x])+'*x^'+str(int(poly_size)-x-1)+' + '
helper_string=helper_string[0:len(poly_helper)-3]
print(helper_string)
u_coeffs=[]
for x in range(0,int(poly_size)):
poly_val=input('Polynomial term '+letter_list[x]+': ')
u_coeffs.append(int(poly_val))
u=np.poly1d(u_coeffs)
# All good up to here
|
|
1b49bc51c60a2c257ff842fefda17c9a6e2ef2a8
|
fabfile/testbeds/testbed_nsheth_a27_a28.py
|
fabfile/testbeds/testbed_nsheth_a27_a28.py
|
from fabric.api import env
os_username = 'admin'
os_password = 'contrail123'
os_tenant_name = 'demo'
host1 = 'root@10.84.5.27'
host2 = 'root@10.84.5.28'
ext_routers = []
router_asn = 64512
public_vn_rtgt = 10000
host_build = 'nsheth@10.84.5.31'
env.roledefs = {
'all': [host1, host2],
'cfgm': [host1],
'openstack': [host1],
'webui': [host1],
'control': [host1, host2],
'collector': [host1],
'database': [host1],
'compute': [host1, host2],
'build': [host_build],
}
env.ostypes = {
host1:'ubuntu',
host2:'ubuntu',
}
env.hostnames = {
'all': ['a1s27', 'a1s28']
}
env.passwords = {
host1: 'c0ntrail123',
host2: 'c0ntrail123',
host_build: 'c0ntrail123',
}
|
Save testbed for a27 + a28.
|
Save testbed for a27 + a28.
|
Python
|
apache-2.0
|
Juniper/contrail-fabric-utils,Juniper/contrail-fabric-utils
|
Save testbed for a27 + a28.
|
from fabric.api import env
os_username = 'admin'
os_password = 'contrail123'
os_tenant_name = 'demo'
host1 = 'root@10.84.5.27'
host2 = 'root@10.84.5.28'
ext_routers = []
router_asn = 64512
public_vn_rtgt = 10000
host_build = 'nsheth@10.84.5.31'
env.roledefs = {
'all': [host1, host2],
'cfgm': [host1],
'openstack': [host1],
'webui': [host1],
'control': [host1, host2],
'collector': [host1],
'database': [host1],
'compute': [host1, host2],
'build': [host_build],
}
env.ostypes = {
host1:'ubuntu',
host2:'ubuntu',
}
env.hostnames = {
'all': ['a1s27', 'a1s28']
}
env.passwords = {
host1: 'c0ntrail123',
host2: 'c0ntrail123',
host_build: 'c0ntrail123',
}
|
<commit_before><commit_msg>Save testbed for a27 + a28.<commit_after>
|
from fabric.api import env
os_username = 'admin'
os_password = 'contrail123'
os_tenant_name = 'demo'
host1 = 'root@10.84.5.27'
host2 = 'root@10.84.5.28'
ext_routers = []
router_asn = 64512
public_vn_rtgt = 10000
host_build = 'nsheth@10.84.5.31'
env.roledefs = {
'all': [host1, host2],
'cfgm': [host1],
'openstack': [host1],
'webui': [host1],
'control': [host1, host2],
'collector': [host1],
'database': [host1],
'compute': [host1, host2],
'build': [host_build],
}
env.ostypes = {
host1:'ubuntu',
host2:'ubuntu',
}
env.hostnames = {
'all': ['a1s27', 'a1s28']
}
env.passwords = {
host1: 'c0ntrail123',
host2: 'c0ntrail123',
host_build: 'c0ntrail123',
}
|
Save testbed for a27 + a28.from fabric.api import env
os_username = 'admin'
os_password = 'contrail123'
os_tenant_name = 'demo'
host1 = 'root@10.84.5.27'
host2 = 'root@10.84.5.28'
ext_routers = []
router_asn = 64512
public_vn_rtgt = 10000
host_build = 'nsheth@10.84.5.31'
env.roledefs = {
'all': [host1, host2],
'cfgm': [host1],
'openstack': [host1],
'webui': [host1],
'control': [host1, host2],
'collector': [host1],
'database': [host1],
'compute': [host1, host2],
'build': [host_build],
}
env.ostypes = {
host1:'ubuntu',
host2:'ubuntu',
}
env.hostnames = {
'all': ['a1s27', 'a1s28']
}
env.passwords = {
host1: 'c0ntrail123',
host2: 'c0ntrail123',
host_build: 'c0ntrail123',
}
|
<commit_before><commit_msg>Save testbed for a27 + a28.<commit_after>from fabric.api import env
os_username = 'admin'
os_password = 'contrail123'
os_tenant_name = 'demo'
host1 = 'root@10.84.5.27'
host2 = 'root@10.84.5.28'
ext_routers = []
router_asn = 64512
public_vn_rtgt = 10000
host_build = 'nsheth@10.84.5.31'
env.roledefs = {
'all': [host1, host2],
'cfgm': [host1],
'openstack': [host1],
'webui': [host1],
'control': [host1, host2],
'collector': [host1],
'database': [host1],
'compute': [host1, host2],
'build': [host_build],
}
env.ostypes = {
host1:'ubuntu',
host2:'ubuntu',
}
env.hostnames = {
'all': ['a1s27', 'a1s28']
}
env.passwords = {
host1: 'c0ntrail123',
host2: 'c0ntrail123',
host_build: 'c0ntrail123',
}
|
|
1b036dc1de34e11122723caa464ea9a3748288fa
|
tests/test_bulk.py
|
tests/test_bulk.py
|
import json
from django.db import models
from django.conf import settings
from django.test import TestCase
from localized_fields.fields import LocalizedField
from .data import get_init_values
from .fake_model import get_fake_model
class LocalizedBulkTestCase(TestCase):
"""Tests bulk operations with data structures provided
by the django-localized-fields library."""
@staticmethod
def test_localized_bulk_insert():
model = get_fake_model(
'BulkInsertModel',
{
'name': LocalizedField(),
'score': models.IntegerField()
}
)
objects = model.objects.bulk_create([
model(name={'en': 'english name 1', 'ro': 'romanian name 1'}, score=1),
model(name={'en': 'english name 2', 'ro': 'romanian name 2'}, score=2),
model(name={'en': 'english name 3', 'ro': 'romanian name 3'}, score=3)
])
assert model.objects.all().count() == 3
|
Add simple test to verify LocalizedField can be used in bulk_create
|
Add simple test to verify LocalizedField can be used in bulk_create
|
Python
|
mit
|
SectorLabs/django-localized-fields,SectorLabs/django-localized-fields,SectorLabs/django-localized-fields
|
Add simple test to verify LocalizedField can be used in bulk_create
|
import json
from django.db import models
from django.conf import settings
from django.test import TestCase
from localized_fields.fields import LocalizedField
from .data import get_init_values
from .fake_model import get_fake_model
class LocalizedBulkTestCase(TestCase):
"""Tests bulk operations with data structures provided
by the django-localized-fields library."""
@staticmethod
def test_localized_bulk_insert():
model = get_fake_model(
'BulkInsertModel',
{
'name': LocalizedField(),
'score': models.IntegerField()
}
)
objects = model.objects.bulk_create([
model(name={'en': 'english name 1', 'ro': 'romanian name 1'}, score=1),
model(name={'en': 'english name 2', 'ro': 'romanian name 2'}, score=2),
model(name={'en': 'english name 3', 'ro': 'romanian name 3'}, score=3)
])
assert model.objects.all().count() == 3
|
<commit_before><commit_msg>Add simple test to verify LocalizedField can be used in bulk_create<commit_after>
|
import json
from django.db import models
from django.conf import settings
from django.test import TestCase
from localized_fields.fields import LocalizedField
from .data import get_init_values
from .fake_model import get_fake_model
class LocalizedBulkTestCase(TestCase):
"""Tests bulk operations with data structures provided
by the django-localized-fields library."""
@staticmethod
def test_localized_bulk_insert():
model = get_fake_model(
'BulkInsertModel',
{
'name': LocalizedField(),
'score': models.IntegerField()
}
)
objects = model.objects.bulk_create([
model(name={'en': 'english name 1', 'ro': 'romanian name 1'}, score=1),
model(name={'en': 'english name 2', 'ro': 'romanian name 2'}, score=2),
model(name={'en': 'english name 3', 'ro': 'romanian name 3'}, score=3)
])
assert model.objects.all().count() == 3
|
Add simple test to verify LocalizedField can be used in bulk_createimport json
from django.db import models
from django.conf import settings
from django.test import TestCase
from localized_fields.fields import LocalizedField
from .data import get_init_values
from .fake_model import get_fake_model
class LocalizedBulkTestCase(TestCase):
"""Tests bulk operations with data structures provided
by the django-localized-fields library."""
@staticmethod
def test_localized_bulk_insert():
model = get_fake_model(
'BulkInsertModel',
{
'name': LocalizedField(),
'score': models.IntegerField()
}
)
objects = model.objects.bulk_create([
model(name={'en': 'english name 1', 'ro': 'romanian name 1'}, score=1),
model(name={'en': 'english name 2', 'ro': 'romanian name 2'}, score=2),
model(name={'en': 'english name 3', 'ro': 'romanian name 3'}, score=3)
])
assert model.objects.all().count() == 3
|
<commit_before><commit_msg>Add simple test to verify LocalizedField can be used in bulk_create<commit_after>import json
from django.db import models
from django.conf import settings
from django.test import TestCase
from localized_fields.fields import LocalizedField
from .data import get_init_values
from .fake_model import get_fake_model
class LocalizedBulkTestCase(TestCase):
"""Tests bulk operations with data structures provided
by the django-localized-fields library."""
@staticmethod
def test_localized_bulk_insert():
model = get_fake_model(
'BulkInsertModel',
{
'name': LocalizedField(),
'score': models.IntegerField()
}
)
objects = model.objects.bulk_create([
model(name={'en': 'english name 1', 'ro': 'romanian name 1'}, score=1),
model(name={'en': 'english name 2', 'ro': 'romanian name 2'}, score=2),
model(name={'en': 'english name 3', 'ro': 'romanian name 3'}, score=3)
])
assert model.objects.all().count() == 3
|
|
5cd5a8453cc866cba9441700144b4fd36f017b1d
|
turbustat/simulator/tests/test_extended_fields.py
|
turbustat/simulator/tests/test_extended_fields.py
|
from ..gen_field import make_3dfield, make_extended
import pytest
import numpy as np
import numpy.testing as npt
@pytest.mark.parametrize(('shape', 'slope'), [(shape, slope) for shape in
[32, 33] for slope in
np.arange(0.0, 5.5, 1.)])
def test_3D_gen_field(shape, slope):
'''
Power needs to be conserved between the fft and real versions.
'''
cube_fft = make_3dfield(shape, powerlaw=slope, return_fft=True)
cube = make_3dfield(shape, powerlaw=slope, return_fft=False)
refft = np.fft.rfftn(cube)
npt.assert_allclose(refft, cube_fft, rtol=1e-8, atol=5e-9)
power = np.sum(np.abs(cube_fft)**2) / float(cube_fft.size)**2
power_cube = np.sum(np.abs(refft)**2) / float(refft.size)**2
npt.assert_allclose(power, power_cube, rtol=1e-8)
@pytest.mark.parametrize(('shape', 'slope'), [(shape, slope) for shape in
[32, 33] for slope in
np.arange(0.0, 5.5, 0.5)])
def test_2D_gen_field(shape, slope):
'''
Power needs to be conserved between the fft and real versions.
'''
seed = np.random.randint(0, 2**31 - 1)
img_fft = make_extended(shape, powerlaw=slope, return_fft=True,
randomseed=seed)
img = make_extended(shape, powerlaw=slope, return_fft=False,
randomseed=seed)
refft = np.fft.rfft2(img)
npt.assert_allclose(refft, img_fft, rtol=1e-8, atol=5e-9)
power = np.sum(np.abs(img_fft)**2) / float(img_fft.size)**2
power_img = np.sum(np.abs(refft)**2) / float(refft.size)**2
npt.assert_allclose(power, power_img, rtol=1e-8)
|
Add tests for power-law functions
|
Add tests for power-law functions
|
Python
|
mit
|
e-koch/TurbuStat,Astroua/TurbuStat
|
Add tests for power-law functions
|
from ..gen_field import make_3dfield, make_extended
import pytest
import numpy as np
import numpy.testing as npt
@pytest.mark.parametrize(('shape', 'slope'), [(shape, slope) for shape in
[32, 33] for slope in
np.arange(0.0, 5.5, 1.)])
def test_3D_gen_field(shape, slope):
'''
Power needs to be conserved between the fft and real versions.
'''
cube_fft = make_3dfield(shape, powerlaw=slope, return_fft=True)
cube = make_3dfield(shape, powerlaw=slope, return_fft=False)
refft = np.fft.rfftn(cube)
npt.assert_allclose(refft, cube_fft, rtol=1e-8, atol=5e-9)
power = np.sum(np.abs(cube_fft)**2) / float(cube_fft.size)**2
power_cube = np.sum(np.abs(refft)**2) / float(refft.size)**2
npt.assert_allclose(power, power_cube, rtol=1e-8)
@pytest.mark.parametrize(('shape', 'slope'), [(shape, slope) for shape in
[32, 33] for slope in
np.arange(0.0, 5.5, 0.5)])
def test_2D_gen_field(shape, slope):
'''
Power needs to be conserved between the fft and real versions.
'''
seed = np.random.randint(0, 2**31 - 1)
img_fft = make_extended(shape, powerlaw=slope, return_fft=True,
randomseed=seed)
img = make_extended(shape, powerlaw=slope, return_fft=False,
randomseed=seed)
refft = np.fft.rfft2(img)
npt.assert_allclose(refft, img_fft, rtol=1e-8, atol=5e-9)
power = np.sum(np.abs(img_fft)**2) / float(img_fft.size)**2
power_img = np.sum(np.abs(refft)**2) / float(refft.size)**2
npt.assert_allclose(power, power_img, rtol=1e-8)
|
<commit_before><commit_msg>Add tests for power-law functions<commit_after>
|
from ..gen_field import make_3dfield, make_extended
import pytest
import numpy as np
import numpy.testing as npt
@pytest.mark.parametrize(('shape', 'slope'), [(shape, slope) for shape in
[32, 33] for slope in
np.arange(0.0, 5.5, 1.)])
def test_3D_gen_field(shape, slope):
'''
Power needs to be conserved between the fft and real versions.
'''
cube_fft = make_3dfield(shape, powerlaw=slope, return_fft=True)
cube = make_3dfield(shape, powerlaw=slope, return_fft=False)
refft = np.fft.rfftn(cube)
npt.assert_allclose(refft, cube_fft, rtol=1e-8, atol=5e-9)
power = np.sum(np.abs(cube_fft)**2) / float(cube_fft.size)**2
power_cube = np.sum(np.abs(refft)**2) / float(refft.size)**2
npt.assert_allclose(power, power_cube, rtol=1e-8)
@pytest.mark.parametrize(('shape', 'slope'), [(shape, slope) for shape in
[32, 33] for slope in
np.arange(0.0, 5.5, 0.5)])
def test_2D_gen_field(shape, slope):
'''
Power needs to be conserved between the fft and real versions.
'''
seed = np.random.randint(0, 2**31 - 1)
img_fft = make_extended(shape, powerlaw=slope, return_fft=True,
randomseed=seed)
img = make_extended(shape, powerlaw=slope, return_fft=False,
randomseed=seed)
refft = np.fft.rfft2(img)
npt.assert_allclose(refft, img_fft, rtol=1e-8, atol=5e-9)
power = np.sum(np.abs(img_fft)**2) / float(img_fft.size)**2
power_img = np.sum(np.abs(refft)**2) / float(refft.size)**2
npt.assert_allclose(power, power_img, rtol=1e-8)
|
Add tests for power-law functions
from ..gen_field import make_3dfield, make_extended
import pytest
import numpy as np
import numpy.testing as npt
@pytest.mark.parametrize(('shape', 'slope'), [(shape, slope) for shape in
[32, 33] for slope in
np.arange(0.0, 5.5, 1.)])
def test_3D_gen_field(shape, slope):
'''
Power needs to be conserved between the fft and real versions.
'''
cube_fft = make_3dfield(shape, powerlaw=slope, return_fft=True)
cube = make_3dfield(shape, powerlaw=slope, return_fft=False)
refft = np.fft.rfftn(cube)
npt.assert_allclose(refft, cube_fft, rtol=1e-8, atol=5e-9)
power = np.sum(np.abs(cube_fft)**2) / float(cube_fft.size)**2
power_cube = np.sum(np.abs(refft)**2) / float(refft.size)**2
npt.assert_allclose(power, power_cube, rtol=1e-8)
@pytest.mark.parametrize(('shape', 'slope'), [(shape, slope) for shape in
[32, 33] for slope in
np.arange(0.0, 5.5, 0.5)])
def test_2D_gen_field(shape, slope):
'''
Power needs to be conserved between the fft and real versions.
'''
seed = np.random.randint(0, 2**31 - 1)
img_fft = make_extended(shape, powerlaw=slope, return_fft=True,
randomseed=seed)
img = make_extended(shape, powerlaw=slope, return_fft=False,
randomseed=seed)
refft = np.fft.rfft2(img)
npt.assert_allclose(refft, img_fft, rtol=1e-8, atol=5e-9)
power = np.sum(np.abs(img_fft)**2) / float(img_fft.size)**2
power_img = np.sum(np.abs(refft)**2) / float(refft.size)**2
npt.assert_allclose(power, power_img, rtol=1e-8)
|
<commit_before><commit_msg>Add tests for power-law functions<commit_after>
from ..gen_field import make_3dfield, make_extended
import pytest
import numpy as np
import numpy.testing as npt
@pytest.mark.parametrize(('shape', 'slope'), [(shape, slope) for shape in
[32, 33] for slope in
np.arange(0.0, 5.5, 1.)])
def test_3D_gen_field(shape, slope):
'''
Power needs to be conserved between the fft and real versions.
'''
cube_fft = make_3dfield(shape, powerlaw=slope, return_fft=True)
cube = make_3dfield(shape, powerlaw=slope, return_fft=False)
refft = np.fft.rfftn(cube)
npt.assert_allclose(refft, cube_fft, rtol=1e-8, atol=5e-9)
power = np.sum(np.abs(cube_fft)**2) / float(cube_fft.size)**2
power_cube = np.sum(np.abs(refft)**2) / float(refft.size)**2
npt.assert_allclose(power, power_cube, rtol=1e-8)
@pytest.mark.parametrize(('shape', 'slope'), [(shape, slope) for shape in
[32, 33] for slope in
np.arange(0.0, 5.5, 0.5)])
def test_2D_gen_field(shape, slope):
'''
Power needs to be conserved between the fft and real versions.
'''
seed = np.random.randint(0, 2**31 - 1)
img_fft = make_extended(shape, powerlaw=slope, return_fft=True,
randomseed=seed)
img = make_extended(shape, powerlaw=slope, return_fft=False,
randomseed=seed)
refft = np.fft.rfft2(img)
npt.assert_allclose(refft, img_fft, rtol=1e-8, atol=5e-9)
power = np.sum(np.abs(img_fft)**2) / float(img_fft.size)**2
power_img = np.sum(np.abs(refft)**2) / float(refft.size)**2
npt.assert_allclose(power, power_img, rtol=1e-8)
|
|
e20ba0715def82abddfdc964b9adc13fad308a95
|
tools/mknbindex.py
|
tools/mknbindex.py
|
#!/usr/bin/env python
"""Simple script to auto-generate the index of notebooks in a given directory.
"""
import glob
import urllib
notebooks = sorted(glob.glob('*.ipynb'))
tpl = ( '* [{0}](http://nbviewer.ipython.org/url/github.com/ipython/ipython/'
'raw/master/examples/notebooks/{1})' )
idx = [
"""# A collection of Notebooks for using IPython effectively
The following notebooks showcase multiple aspects of IPython, from its basic
use to more advanced scenarios. They introduce you to the use of the Notebook
and also cover aspects of IPython that are available in other clients, such as
the cell magics for multi-language integration or our extended display
protocol.
For beginners, we recommend that you start with the 5-part series that
introduces the system, and later read others as the topics interest you.
Once you are familiar with the notebook system, we encourage you to visit our
[gallery](https://github.com/ipython/ipython/wiki/A-gallery-of-interesting-IPython-Notebooks)
where you will find many more examples that cover areas from basic Python
programming to advanced topics in scientific computing.
"""]
idx.extend(tpl.format(nb.replace('.ipynb',''), urllib.quote(nb))
for nb in notebooks)
with open('README.md', 'w') as f:
f.write('\n'.join(idx))
f.write('\n')
|
Add script to auto-generate our index of example notebooks.
|
Add script to auto-generate our index of example notebooks.
It's highly hard-coded for now, but will do in the meantime. We can
generalize it later, but we're really hurting by not having this index
anywhere.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
Add script to auto-generate our index of example notebooks.
It's highly hard-coded for now, but will do in the meantime. We can
generalize it later, but we're really hurting by not having this index
anywhere.
|
#!/usr/bin/env python
"""Simple script to auto-generate the index of notebooks in a given directory.
"""
import glob
import urllib
notebooks = sorted(glob.glob('*.ipynb'))
tpl = ( '* [{0}](http://nbviewer.ipython.org/url/github.com/ipython/ipython/'
'raw/master/examples/notebooks/{1})' )
idx = [
"""# A collection of Notebooks for using IPython effectively
The following notebooks showcase multiple aspects of IPython, from its basic
use to more advanced scenarios. They introduce you to the use of the Notebook
and also cover aspects of IPython that are available in other clients, such as
the cell magics for multi-language integration or our extended display
protocol.
For beginners, we recommend that you start with the 5-part series that
introduces the system, and later read others as the topics interest you.
Once you are familiar with the notebook system, we encourage you to visit our
[gallery](https://github.com/ipython/ipython/wiki/A-gallery-of-interesting-IPython-Notebooks)
where you will find many more examples that cover areas from basic Python
programming to advanced topics in scientific computing.
"""]
idx.extend(tpl.format(nb.replace('.ipynb',''), urllib.quote(nb))
for nb in notebooks)
with open('README.md', 'w') as f:
f.write('\n'.join(idx))
f.write('\n')
|
<commit_before><commit_msg>Add script to auto-generate our index of example notebooks.
It's highly hard-coded for now, but will do in the meantime. We can
generalize it later, but we're really hurting by not having this index
anywhere.<commit_after>
|
#!/usr/bin/env python
"""Simple script to auto-generate the index of notebooks in a given directory.
"""
import glob
import urllib
notebooks = sorted(glob.glob('*.ipynb'))
tpl = ( '* [{0}](http://nbviewer.ipython.org/url/github.com/ipython/ipython/'
'raw/master/examples/notebooks/{1})' )
idx = [
"""# A collection of Notebooks for using IPython effectively
The following notebooks showcase multiple aspects of IPython, from its basic
use to more advanced scenarios. They introduce you to the use of the Notebook
and also cover aspects of IPython that are available in other clients, such as
the cell magics for multi-language integration or our extended display
protocol.
For beginners, we recommend that you start with the 5-part series that
introduces the system, and later read others as the topics interest you.
Once you are familiar with the notebook system, we encourage you to visit our
[gallery](https://github.com/ipython/ipython/wiki/A-gallery-of-interesting-IPython-Notebooks)
where you will find many more examples that cover areas from basic Python
programming to advanced topics in scientific computing.
"""]
idx.extend(tpl.format(nb.replace('.ipynb',''), urllib.quote(nb))
for nb in notebooks)
with open('README.md', 'w') as f:
f.write('\n'.join(idx))
f.write('\n')
|
Add script to auto-generate our index of example notebooks.
It's highly hard-coded for now, but will do in the meantime. We can
generalize it later, but we're really hurting by not having this index
anywhere.#!/usr/bin/env python
"""Simple script to auto-generate the index of notebooks in a given directory.
"""
import glob
import urllib
notebooks = sorted(glob.glob('*.ipynb'))
tpl = ( '* [{0}](http://nbviewer.ipython.org/url/github.com/ipython/ipython/'
'raw/master/examples/notebooks/{1})' )
idx = [
"""# A collection of Notebooks for using IPython effectively
The following notebooks showcase multiple aspects of IPython, from its basic
use to more advanced scenarios. They introduce you to the use of the Notebook
and also cover aspects of IPython that are available in other clients, such as
the cell magics for multi-language integration or our extended display
protocol.
For beginners, we recommend that you start with the 5-part series that
introduces the system, and later read others as the topics interest you.
Once you are familiar with the notebook system, we encourage you to visit our
[gallery](https://github.com/ipython/ipython/wiki/A-gallery-of-interesting-IPython-Notebooks)
where you will find many more examples that cover areas from basic Python
programming to advanced topics in scientific computing.
"""]
idx.extend(tpl.format(nb.replace('.ipynb',''), urllib.quote(nb))
for nb in notebooks)
with open('README.md', 'w') as f:
f.write('\n'.join(idx))
f.write('\n')
|
<commit_before><commit_msg>Add script to auto-generate our index of example notebooks.
It's highly hard-coded for now, but will do in the meantime. We can
generalize it later, but we're really hurting by not having this index
anywhere.<commit_after>#!/usr/bin/env python
"""Simple script to auto-generate the index of notebooks in a given directory.
"""
import glob
import urllib
notebooks = sorted(glob.glob('*.ipynb'))
tpl = ( '* [{0}](http://nbviewer.ipython.org/url/github.com/ipython/ipython/'
'raw/master/examples/notebooks/{1})' )
idx = [
"""# A collection of Notebooks for using IPython effectively
The following notebooks showcase multiple aspects of IPython, from its basic
use to more advanced scenarios. They introduce you to the use of the Notebook
and also cover aspects of IPython that are available in other clients, such as
the cell magics for multi-language integration or our extended display
protocol.
For beginners, we recommend that you start with the 5-part series that
introduces the system, and later read others as the topics interest you.
Once you are familiar with the notebook system, we encourage you to visit our
[gallery](https://github.com/ipython/ipython/wiki/A-gallery-of-interesting-IPython-Notebooks)
where you will find many more examples that cover areas from basic Python
programming to advanced topics in scientific computing.
"""]
idx.extend(tpl.format(nb.replace('.ipynb',''), urllib.quote(nb))
for nb in notebooks)
with open('README.md', 'w') as f:
f.write('\n'.join(idx))
f.write('\n')
|
|
a16aa19208ba9dc23708ade128383f06a3df3f77
|
tests/test_managed.py
|
tests/test_managed.py
|
import glob
from jgo.jgo import InvalidEndpoint
import jgo
import os
import pathlib
import unittest
import shutil
import tempfile
import logging
_logger = logging.getLogger(__name__)
_logger.level = logging.INFO
SJC_VERSION = "2.87.0"
SJC_OPTIONAL_VERSION = "1.0.0"
MANAGED_ENDPOINT = (
"org.scijava:scijava-common:{}+org.scijava:scijava-optional:MANAGED".format(
SJC_VERSION
)
)
MANAGED_PRIMARY_ENDPOINT = "org.scijava:scijava-common:MANAGED"
REPOSITORIES = {"scijava.public": "https://maven.scijava.org/content/groups/public"}
def resolve_managed(endpoint, cache_dir, m2_repo):
return jgo.resolve_dependencies(
endpoint,
m2_repo=m2_repo,
cache_dir=cache_dir,
manage_dependencies=True,
repositories=REPOSITORIES,
)
class ManagedDependencyTest(unittest.TestCase):
def test_resolve_managed(self):
tmp_dir = tempfile.mkdtemp(prefix="jgo-test-cache-dir")
m2_repo = os.path.join(str(pathlib.Path.home()), ".m2", "repository")
try:
_, workspace = resolve_managed(
MANAGED_ENDPOINT, cache_dir=tmp_dir, m2_repo=m2_repo
)
jars = glob.glob(os.path.join(workspace, "*jar"))
self.assertEqual(len(jars), 4, "Expected two jars in workspace")
self.assertEqual(
jars[2],
os.path.join(workspace, "scijava-common-%s.jar" % SJC_VERSION),
"Expected scijava-common jar",
)
self.assertEqual(
jars[3],
os.path.join(
workspace, "scijava-optional-%s.jar" % SJC_OPTIONAL_VERSION
),
"Expected scijava-optional jar",
)
pom = (
tmp_dir
+ "\\org.scijava\\scijava-common\\cdcf7e6e4f89d0815be7f9c57eae1fa3361f9b75f0eaa89d4099a731690d0c5e\\pom.xml"
)
with open(pom) as f:
if "RELEASE" in f.read():
self.fail(
"Expected no RELEASE version string in managed dependency"
)
finally:
shutil.rmtree(tmp_dir)
def test_managed_primary(self):
tmp_dir = tempfile.mkdtemp(prefix="jgo-test-cache-dir")
m2_repo = os.path.join(str(pathlib.Path.home()), ".m2", "repository")
try:
with self.assertRaises(InvalidEndpoint) as context:
resolve_managed(
MANAGED_PRIMARY_ENDPOINT, cache_dir=tmp_dir, m2_repo=m2_repo
)
finally:
shutil.rmtree(tmp_dir)
if __name__ == "__main__":
unittest.main()
|
Add managed Endpoint unit tests
|
Add managed Endpoint unit tests
|
Python
|
unlicense
|
ctrueden/jrun,ctrueden/jrun
|
Add managed Endpoint unit tests
|
import glob
from jgo.jgo import InvalidEndpoint
import jgo
import os
import pathlib
import unittest
import shutil
import tempfile
import logging
_logger = logging.getLogger(__name__)
_logger.level = logging.INFO
SJC_VERSION = "2.87.0"
SJC_OPTIONAL_VERSION = "1.0.0"
MANAGED_ENDPOINT = (
"org.scijava:scijava-common:{}+org.scijava:scijava-optional:MANAGED".format(
SJC_VERSION
)
)
MANAGED_PRIMARY_ENDPOINT = "org.scijava:scijava-common:MANAGED"
REPOSITORIES = {"scijava.public": "https://maven.scijava.org/content/groups/public"}
def resolve_managed(endpoint, cache_dir, m2_repo):
return jgo.resolve_dependencies(
endpoint,
m2_repo=m2_repo,
cache_dir=cache_dir,
manage_dependencies=True,
repositories=REPOSITORIES,
)
class ManagedDependencyTest(unittest.TestCase):
def test_resolve_managed(self):
tmp_dir = tempfile.mkdtemp(prefix="jgo-test-cache-dir")
m2_repo = os.path.join(str(pathlib.Path.home()), ".m2", "repository")
try:
_, workspace = resolve_managed(
MANAGED_ENDPOINT, cache_dir=tmp_dir, m2_repo=m2_repo
)
jars = glob.glob(os.path.join(workspace, "*jar"))
self.assertEqual(len(jars), 4, "Expected two jars in workspace")
self.assertEqual(
jars[2],
os.path.join(workspace, "scijava-common-%s.jar" % SJC_VERSION),
"Expected scijava-common jar",
)
self.assertEqual(
jars[3],
os.path.join(
workspace, "scijava-optional-%s.jar" % SJC_OPTIONAL_VERSION
),
"Expected scijava-optional jar",
)
pom = (
tmp_dir
+ "\\org.scijava\\scijava-common\\cdcf7e6e4f89d0815be7f9c57eae1fa3361f9b75f0eaa89d4099a731690d0c5e\\pom.xml"
)
with open(pom) as f:
if "RELEASE" in f.read():
self.fail(
"Expected no RELEASE version string in managed dependency"
)
finally:
shutil.rmtree(tmp_dir)
def test_managed_primary(self):
tmp_dir = tempfile.mkdtemp(prefix="jgo-test-cache-dir")
m2_repo = os.path.join(str(pathlib.Path.home()), ".m2", "repository")
try:
with self.assertRaises(InvalidEndpoint) as context:
resolve_managed(
MANAGED_PRIMARY_ENDPOINT, cache_dir=tmp_dir, m2_repo=m2_repo
)
finally:
shutil.rmtree(tmp_dir)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add managed Endpoint unit tests<commit_after>
|
import glob
from jgo.jgo import InvalidEndpoint
import jgo
import os
import pathlib
import unittest
import shutil
import tempfile
import logging
_logger = logging.getLogger(__name__)
_logger.level = logging.INFO
SJC_VERSION = "2.87.0"
SJC_OPTIONAL_VERSION = "1.0.0"
MANAGED_ENDPOINT = (
"org.scijava:scijava-common:{}+org.scijava:scijava-optional:MANAGED".format(
SJC_VERSION
)
)
MANAGED_PRIMARY_ENDPOINT = "org.scijava:scijava-common:MANAGED"
REPOSITORIES = {"scijava.public": "https://maven.scijava.org/content/groups/public"}
def resolve_managed(endpoint, cache_dir, m2_repo):
return jgo.resolve_dependencies(
endpoint,
m2_repo=m2_repo,
cache_dir=cache_dir,
manage_dependencies=True,
repositories=REPOSITORIES,
)
class ManagedDependencyTest(unittest.TestCase):
def test_resolve_managed(self):
tmp_dir = tempfile.mkdtemp(prefix="jgo-test-cache-dir")
m2_repo = os.path.join(str(pathlib.Path.home()), ".m2", "repository")
try:
_, workspace = resolve_managed(
MANAGED_ENDPOINT, cache_dir=tmp_dir, m2_repo=m2_repo
)
jars = glob.glob(os.path.join(workspace, "*jar"))
self.assertEqual(len(jars), 4, "Expected two jars in workspace")
self.assertEqual(
jars[2],
os.path.join(workspace, "scijava-common-%s.jar" % SJC_VERSION),
"Expected scijava-common jar",
)
self.assertEqual(
jars[3],
os.path.join(
workspace, "scijava-optional-%s.jar" % SJC_OPTIONAL_VERSION
),
"Expected scijava-optional jar",
)
pom = (
tmp_dir
+ "\\org.scijava\\scijava-common\\cdcf7e6e4f89d0815be7f9c57eae1fa3361f9b75f0eaa89d4099a731690d0c5e\\pom.xml"
)
with open(pom) as f:
if "RELEASE" in f.read():
self.fail(
"Expected no RELEASE version string in managed dependency"
)
finally:
shutil.rmtree(tmp_dir)
def test_managed_primary(self):
tmp_dir = tempfile.mkdtemp(prefix="jgo-test-cache-dir")
m2_repo = os.path.join(str(pathlib.Path.home()), ".m2", "repository")
try:
with self.assertRaises(InvalidEndpoint) as context:
resolve_managed(
MANAGED_PRIMARY_ENDPOINT, cache_dir=tmp_dir, m2_repo=m2_repo
)
finally:
shutil.rmtree(tmp_dir)
if __name__ == "__main__":
unittest.main()
|
Add managed Endpoint unit testsimport glob
from jgo.jgo import InvalidEndpoint
import jgo
import os
import pathlib
import unittest
import shutil
import tempfile
import logging
_logger = logging.getLogger(__name__)
_logger.level = logging.INFO
SJC_VERSION = "2.87.0"
SJC_OPTIONAL_VERSION = "1.0.0"
MANAGED_ENDPOINT = (
"org.scijava:scijava-common:{}+org.scijava:scijava-optional:MANAGED".format(
SJC_VERSION
)
)
MANAGED_PRIMARY_ENDPOINT = "org.scijava:scijava-common:MANAGED"
REPOSITORIES = {"scijava.public": "https://maven.scijava.org/content/groups/public"}
def resolve_managed(endpoint, cache_dir, m2_repo):
return jgo.resolve_dependencies(
endpoint,
m2_repo=m2_repo,
cache_dir=cache_dir,
manage_dependencies=True,
repositories=REPOSITORIES,
)
class ManagedDependencyTest(unittest.TestCase):
def test_resolve_managed(self):
tmp_dir = tempfile.mkdtemp(prefix="jgo-test-cache-dir")
m2_repo = os.path.join(str(pathlib.Path.home()), ".m2", "repository")
try:
_, workspace = resolve_managed(
MANAGED_ENDPOINT, cache_dir=tmp_dir, m2_repo=m2_repo
)
jars = glob.glob(os.path.join(workspace, "*jar"))
self.assertEqual(len(jars), 4, "Expected two jars in workspace")
self.assertEqual(
jars[2],
os.path.join(workspace, "scijava-common-%s.jar" % SJC_VERSION),
"Expected scijava-common jar",
)
self.assertEqual(
jars[3],
os.path.join(
workspace, "scijava-optional-%s.jar" % SJC_OPTIONAL_VERSION
),
"Expected scijava-optional jar",
)
pom = (
tmp_dir
+ "\\org.scijava\\scijava-common\\cdcf7e6e4f89d0815be7f9c57eae1fa3361f9b75f0eaa89d4099a731690d0c5e\\pom.xml"
)
with open(pom) as f:
if "RELEASE" in f.read():
self.fail(
"Expected no RELEASE version string in managed dependency"
)
finally:
shutil.rmtree(tmp_dir)
def test_managed_primary(self):
tmp_dir = tempfile.mkdtemp(prefix="jgo-test-cache-dir")
m2_repo = os.path.join(str(pathlib.Path.home()), ".m2", "repository")
try:
with self.assertRaises(InvalidEndpoint) as context:
resolve_managed(
MANAGED_PRIMARY_ENDPOINT, cache_dir=tmp_dir, m2_repo=m2_repo
)
finally:
shutil.rmtree(tmp_dir)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add managed Endpoint unit tests<commit_after>import glob
from jgo.jgo import InvalidEndpoint
import jgo
import os
import pathlib
import unittest
import shutil
import tempfile
import logging
_logger = logging.getLogger(__name__)
_logger.level = logging.INFO
SJC_VERSION = "2.87.0"
SJC_OPTIONAL_VERSION = "1.0.0"
MANAGED_ENDPOINT = (
"org.scijava:scijava-common:{}+org.scijava:scijava-optional:MANAGED".format(
SJC_VERSION
)
)
MANAGED_PRIMARY_ENDPOINT = "org.scijava:scijava-common:MANAGED"
REPOSITORIES = {"scijava.public": "https://maven.scijava.org/content/groups/public"}
def resolve_managed(endpoint, cache_dir, m2_repo):
return jgo.resolve_dependencies(
endpoint,
m2_repo=m2_repo,
cache_dir=cache_dir,
manage_dependencies=True,
repositories=REPOSITORIES,
)
class ManagedDependencyTest(unittest.TestCase):
def test_resolve_managed(self):
tmp_dir = tempfile.mkdtemp(prefix="jgo-test-cache-dir")
m2_repo = os.path.join(str(pathlib.Path.home()), ".m2", "repository")
try:
_, workspace = resolve_managed(
MANAGED_ENDPOINT, cache_dir=tmp_dir, m2_repo=m2_repo
)
jars = glob.glob(os.path.join(workspace, "*jar"))
self.assertEqual(len(jars), 4, "Expected two jars in workspace")
self.assertEqual(
jars[2],
os.path.join(workspace, "scijava-common-%s.jar" % SJC_VERSION),
"Expected scijava-common jar",
)
self.assertEqual(
jars[3],
os.path.join(
workspace, "scijava-optional-%s.jar" % SJC_OPTIONAL_VERSION
),
"Expected scijava-optional jar",
)
pom = (
tmp_dir
+ "\\org.scijava\\scijava-common\\cdcf7e6e4f89d0815be7f9c57eae1fa3361f9b75f0eaa89d4099a731690d0c5e\\pom.xml"
)
with open(pom) as f:
if "RELEASE" in f.read():
self.fail(
"Expected no RELEASE version string in managed dependency"
)
finally:
shutil.rmtree(tmp_dir)
def test_managed_primary(self):
tmp_dir = tempfile.mkdtemp(prefix="jgo-test-cache-dir")
m2_repo = os.path.join(str(pathlib.Path.home()), ".m2", "repository")
try:
with self.assertRaises(InvalidEndpoint) as context:
resolve_managed(
MANAGED_PRIMARY_ENDPOINT, cache_dir=tmp_dir, m2_repo=m2_repo
)
finally:
shutil.rmtree(tmp_dir)
if __name__ == "__main__":
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.