commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
26585bcc2a4da3a53b1b13d4d1728e2533b12140 | move_tab.py | move_tab.py | # -*- coding: utf-8 -*-
"""
Move Tab
Plugin for Sublime Text to move tabs around
Copyright (c) 2012 Frédéric Massart - FMCorz.net
Licensed under The MIT License
Redistributions of files must retain the above copyright notice.
http://github.com/FMCorz/MoveTab
"""
import sublime_plugin
class MoveTabCommand(sublime_plugin.WindowCommand):
def run(self, position):
view = self.window.active_view()
group, index = self.window.get_view_index(view)
if index < 0:
return
count = len(self.window.views_in_group(group))
if isinstance(position, str) and position[0] in '-+':
position = (index + int(position)) % count
else:
position = min(count - 1, max(0, int(position)))
# Avoid flashing tab when moving to same index
if position != index:
self.window.set_view_index(view, group, position)
self.window.focus_view(view)
def is_enabled(self):
(group, index) = self.window.get_view_index(self.window.active_view())
return -1 not in (group, index) and len(self.window.views_in_group(group)) > 1
| """
Move Tab
Plugin for Sublime Text to move tabs around
Copyright (c) 2012 Frédéric Massart - FMCorz.net
Licensed under The MIT License
Redistributions of files must retain the above copyright notice.
http://github.com/FMCorz/MoveTab
"""
import sublime_plugin
class MoveTabCommand(sublime_plugin.WindowCommand):
def run(self, position):
view = self.window.active_view()
group, index = self.window.get_view_index(view)
if index < 0:
return
count = len(self.window.views_in_group(group))
if isinstance(position, str) and position[0] in '-+':
position = (index + int(position)) % count
else:
position = min(count - 1, max(0, int(position)))
# Avoid flashing tab when moving to same index
if position != index:
self.window.set_view_index(view, group, position)
self.window.focus_view(view)
def is_enabled(self):
(group, index) = self.window.get_view_index(self.window.active_view())
return -1 not in (group, index) and len(self.window.views_in_group(group)) > 1
| Remove outdated and redundant encoding specifier | Remove outdated and redundant encoding specifier
| Python | mit | SublimeText/MoveTab | # -*- coding: utf-8 -*-
"""
Move Tab
Plugin for Sublime Text to move tabs around
Copyright (c) 2012 Frédéric Massart - FMCorz.net
Licensed under The MIT License
Redistributions of files must retain the above copyright notice.
http://github.com/FMCorz/MoveTab
"""
import sublime_plugin
class MoveTabCommand(sublime_plugin.WindowCommand):
def run(self, position):
view = self.window.active_view()
group, index = self.window.get_view_index(view)
if index < 0:
return
count = len(self.window.views_in_group(group))
if isinstance(position, str) and position[0] in '-+':
position = (index + int(position)) % count
else:
position = min(count - 1, max(0, int(position)))
# Avoid flashing tab when moving to same index
if position != index:
self.window.set_view_index(view, group, position)
self.window.focus_view(view)
def is_enabled(self):
(group, index) = self.window.get_view_index(self.window.active_view())
return -1 not in (group, index) and len(self.window.views_in_group(group)) > 1
Remove outdated and redundant encoding specifier | """
Move Tab
Plugin for Sublime Text to move tabs around
Copyright (c) 2012 Frédéric Massart - FMCorz.net
Licensed under The MIT License
Redistributions of files must retain the above copyright notice.
http://github.com/FMCorz/MoveTab
"""
import sublime_plugin
class MoveTabCommand(sublime_plugin.WindowCommand):
def run(self, position):
view = self.window.active_view()
group, index = self.window.get_view_index(view)
if index < 0:
return
count = len(self.window.views_in_group(group))
if isinstance(position, str) and position[0] in '-+':
position = (index + int(position)) % count
else:
position = min(count - 1, max(0, int(position)))
# Avoid flashing tab when moving to same index
if position != index:
self.window.set_view_index(view, group, position)
self.window.focus_view(view)
def is_enabled(self):
(group, index) = self.window.get_view_index(self.window.active_view())
return -1 not in (group, index) and len(self.window.views_in_group(group)) > 1
| <commit_before># -*- coding: utf-8 -*-
"""
Move Tab
Plugin for Sublime Text to move tabs around
Copyright (c) 2012 Frédéric Massart - FMCorz.net
Licensed under The MIT License
Redistributions of files must retain the above copyright notice.
http://github.com/FMCorz/MoveTab
"""
import sublime_plugin
class MoveTabCommand(sublime_plugin.WindowCommand):
def run(self, position):
view = self.window.active_view()
group, index = self.window.get_view_index(view)
if index < 0:
return
count = len(self.window.views_in_group(group))
if isinstance(position, str) and position[0] in '-+':
position = (index + int(position)) % count
else:
position = min(count - 1, max(0, int(position)))
# Avoid flashing tab when moving to same index
if position != index:
self.window.set_view_index(view, group, position)
self.window.focus_view(view)
def is_enabled(self):
(group, index) = self.window.get_view_index(self.window.active_view())
return -1 not in (group, index) and len(self.window.views_in_group(group)) > 1
<commit_msg>Remove outdated and redundant encoding specifier<commit_after> | """
Move Tab
Plugin for Sublime Text to move tabs around
Copyright (c) 2012 Frédéric Massart - FMCorz.net
Licensed under The MIT License
Redistributions of files must retain the above copyright notice.
http://github.com/FMCorz/MoveTab
"""
import sublime_plugin
class MoveTabCommand(sublime_plugin.WindowCommand):
def run(self, position):
view = self.window.active_view()
group, index = self.window.get_view_index(view)
if index < 0:
return
count = len(self.window.views_in_group(group))
if isinstance(position, str) and position[0] in '-+':
position = (index + int(position)) % count
else:
position = min(count - 1, max(0, int(position)))
# Avoid flashing tab when moving to same index
if position != index:
self.window.set_view_index(view, group, position)
self.window.focus_view(view)
def is_enabled(self):
(group, index) = self.window.get_view_index(self.window.active_view())
return -1 not in (group, index) and len(self.window.views_in_group(group)) > 1
| # -*- coding: utf-8 -*-
"""
Move Tab
Plugin for Sublime Text to move tabs around
Copyright (c) 2012 Frédéric Massart - FMCorz.net
Licensed under The MIT License
Redistributions of files must retain the above copyright notice.
http://github.com/FMCorz/MoveTab
"""
import sublime_plugin
class MoveTabCommand(sublime_plugin.WindowCommand):
def run(self, position):
view = self.window.active_view()
group, index = self.window.get_view_index(view)
if index < 0:
return
count = len(self.window.views_in_group(group))
if isinstance(position, str) and position[0] in '-+':
position = (index + int(position)) % count
else:
position = min(count - 1, max(0, int(position)))
# Avoid flashing tab when moving to same index
if position != index:
self.window.set_view_index(view, group, position)
self.window.focus_view(view)
def is_enabled(self):
(group, index) = self.window.get_view_index(self.window.active_view())
return -1 not in (group, index) and len(self.window.views_in_group(group)) > 1
Remove outdated and redundant encoding specifier"""
Move Tab
Plugin for Sublime Text to move tabs around
Copyright (c) 2012 Frédéric Massart - FMCorz.net
Licensed under The MIT License
Redistributions of files must retain the above copyright notice.
http://github.com/FMCorz/MoveTab
"""
import sublime_plugin
class MoveTabCommand(sublime_plugin.WindowCommand):
def run(self, position):
view = self.window.active_view()
group, index = self.window.get_view_index(view)
if index < 0:
return
count = len(self.window.views_in_group(group))
if isinstance(position, str) and position[0] in '-+':
position = (index + int(position)) % count
else:
position = min(count - 1, max(0, int(position)))
# Avoid flashing tab when moving to same index
if position != index:
self.window.set_view_index(view, group, position)
self.window.focus_view(view)
def is_enabled(self):
(group, index) = self.window.get_view_index(self.window.active_view())
return -1 not in (group, index) and len(self.window.views_in_group(group)) > 1
| <commit_before># -*- coding: utf-8 -*-
"""
Move Tab
Plugin for Sublime Text to move tabs around
Copyright (c) 2012 Frédéric Massart - FMCorz.net
Licensed under The MIT License
Redistributions of files must retain the above copyright notice.
http://github.com/FMCorz/MoveTab
"""
import sublime_plugin
class MoveTabCommand(sublime_plugin.WindowCommand):
def run(self, position):
view = self.window.active_view()
group, index = self.window.get_view_index(view)
if index < 0:
return
count = len(self.window.views_in_group(group))
if isinstance(position, str) and position[0] in '-+':
position = (index + int(position)) % count
else:
position = min(count - 1, max(0, int(position)))
# Avoid flashing tab when moving to same index
if position != index:
self.window.set_view_index(view, group, position)
self.window.focus_view(view)
def is_enabled(self):
(group, index) = self.window.get_view_index(self.window.active_view())
return -1 not in (group, index) and len(self.window.views_in_group(group)) > 1
<commit_msg>Remove outdated and redundant encoding specifier<commit_after>"""
Move Tab
Plugin for Sublime Text to move tabs around
Copyright (c) 2012 Frédéric Massart - FMCorz.net
Licensed under The MIT License
Redistributions of files must retain the above copyright notice.
http://github.com/FMCorz/MoveTab
"""
import sublime_plugin
class MoveTabCommand(sublime_plugin.WindowCommand):
def run(self, position):
view = self.window.active_view()
group, index = self.window.get_view_index(view)
if index < 0:
return
count = len(self.window.views_in_group(group))
if isinstance(position, str) and position[0] in '-+':
position = (index + int(position)) % count
else:
position = min(count - 1, max(0, int(position)))
# Avoid flashing tab when moving to same index
if position != index:
self.window.set_view_index(view, group, position)
self.window.focus_view(view)
def is_enabled(self):
(group, index) = self.window.get_view_index(self.window.active_view())
return -1 not in (group, index) and len(self.window.views_in_group(group)) > 1
|
caaa59ca23d7405ff16726d509e3c0d4e659baec | djstripe/migrations/0023_auto_20170307_0937.py | djstripe/migrations/0023_auto_20170307_0937.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 09:37
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('djstripe', '0022_fix_subscriber_delete'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='subscriber',
field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='customer',
unique_together=set([('subscriber', 'livemode')]),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 09:37
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
DJSTRIPE_SUBSCRIBER_MODEL = getattr(settings, "DJSTRIPE_SUBSCRIBER_MODEL", settings.AUTH_USER_MODEL)
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('djstripe', '0022_fix_subscriber_delete'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='subscriber',
field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=DJSTRIPE_SUBSCRIBER_MODEL),
),
migrations.AlterUniqueTogether(
name='customer',
unique_together=set([('subscriber', 'livemode')]),
),
]
| Fix migration 0023 subscriber model reference | Fix migration 0023 subscriber model reference
| Python | mit | pydanny/dj-stripe,jameshiew/dj-stripe,tkwon/dj-stripe,pydanny/dj-stripe,jleclanche/dj-stripe,jleclanche/dj-stripe,jameshiew/dj-stripe,tkwon/dj-stripe,dj-stripe/dj-stripe,kavdev/dj-stripe,kavdev/dj-stripe,dj-stripe/dj-stripe | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 09:37
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('djstripe', '0022_fix_subscriber_delete'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='subscriber',
field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='customer',
unique_together=set([('subscriber', 'livemode')]),
),
]
Fix migration 0023 subscriber model reference | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 09:37
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
DJSTRIPE_SUBSCRIBER_MODEL = getattr(settings, "DJSTRIPE_SUBSCRIBER_MODEL", settings.AUTH_USER_MODEL)
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('djstripe', '0022_fix_subscriber_delete'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='subscriber',
field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=DJSTRIPE_SUBSCRIBER_MODEL),
),
migrations.AlterUniqueTogether(
name='customer',
unique_together=set([('subscriber', 'livemode')]),
),
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 09:37
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('djstripe', '0022_fix_subscriber_delete'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='subscriber',
field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='customer',
unique_together=set([('subscriber', 'livemode')]),
),
]
<commit_msg>Fix migration 0023 subscriber model reference<commit_after> | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 09:37
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
DJSTRIPE_SUBSCRIBER_MODEL = getattr(settings, "DJSTRIPE_SUBSCRIBER_MODEL", settings.AUTH_USER_MODEL)
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('djstripe', '0022_fix_subscriber_delete'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='subscriber',
field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=DJSTRIPE_SUBSCRIBER_MODEL),
),
migrations.AlterUniqueTogether(
name='customer',
unique_together=set([('subscriber', 'livemode')]),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 09:37
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('djstripe', '0022_fix_subscriber_delete'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='subscriber',
field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='customer',
unique_together=set([('subscriber', 'livemode')]),
),
]
Fix migration 0023 subscriber model reference# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 09:37
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
DJSTRIPE_SUBSCRIBER_MODEL = getattr(settings, "DJSTRIPE_SUBSCRIBER_MODEL", settings.AUTH_USER_MODEL)
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('djstripe', '0022_fix_subscriber_delete'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='subscriber',
field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=DJSTRIPE_SUBSCRIBER_MODEL),
),
migrations.AlterUniqueTogether(
name='customer',
unique_together=set([('subscriber', 'livemode')]),
),
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 09:37
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('djstripe', '0022_fix_subscriber_delete'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='subscriber',
field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='customer',
unique_together=set([('subscriber', 'livemode')]),
),
]
<commit_msg>Fix migration 0023 subscriber model reference<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 09:37
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
DJSTRIPE_SUBSCRIBER_MODEL = getattr(settings, "DJSTRIPE_SUBSCRIBER_MODEL", settings.AUTH_USER_MODEL)
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('djstripe', '0022_fix_subscriber_delete'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='subscriber',
field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=DJSTRIPE_SUBSCRIBER_MODEL),
),
migrations.AlterUniqueTogether(
name='customer',
unique_together=set([('subscriber', 'livemode')]),
),
]
|
a55765af4af8646a3ec95de2e8274b1c5584ee10 | nova/policies/fixed_ips.py | nova/policies/fixed_ips.py | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-fixed-ips'
fixed_ips_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_API),
]
def list_rules():
return fixed_ips_policies
| # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-fixed-ips'
fixed_ips_policies = [
base.create_rule_default(
BASE_POLICY_NAME,
base.RULE_ADMIN_API,
"""Shows details for, reserve and unreserve a fixed IP address.
These APIs are only available with nova-network which is deprecated.""",
[
{
'method': 'GET',
'path': '/os-fixed-ips/{fixed_ip}'
},
{
'method': 'POST',
'path': '/os-fixed-ips/{fixed_ip}/action (reserve)'
},
{
'method': 'POST',
'path': '/os-fixed-ips/{fixed_ip}/action (unreserve)'
}
]),
]
def list_rules():
return fixed_ips_policies
| Add policy description for os-fixed-ips | Add policy description for os-fixed-ips
This commit adds policy doc for os-fixed-ips policies.
Partial implement blueprint policy-docs
Change-Id: Ief255af699cee217ebf963a2c36f9e819ef4ef90
| Python | apache-2.0 | openstack/nova,jianghuaw/nova,vmturbo/nova,gooddata/openstack-nova,Juniper/nova,rahulunair/nova,Juniper/nova,jianghuaw/nova,jianghuaw/nova,gooddata/openstack-nova,vmturbo/nova,vmturbo/nova,rahulunair/nova,klmitch/nova,mahak/nova,rahulunair/nova,vmturbo/nova,klmitch/nova,mikalstill/nova,Juniper/nova,phenoxim/nova,mahak/nova,mahak/nova,openstack/nova,gooddata/openstack-nova,klmitch/nova,Juniper/nova,klmitch/nova,jianghuaw/nova,openstack/nova,mikalstill/nova,gooddata/openstack-nova,phenoxim/nova,mikalstill/nova | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-fixed-ips'
fixed_ips_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_API),
]
def list_rules():
return fixed_ips_policies
Add policy description for os-fixed-ips
This commit adds policy doc for os-fixed-ips policies.
Partial implement blueprint policy-docs
Change-Id: Ief255af699cee217ebf963a2c36f9e819ef4ef90 | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-fixed-ips'
fixed_ips_policies = [
base.create_rule_default(
BASE_POLICY_NAME,
base.RULE_ADMIN_API,
"""Shows details for, reserve and unreserve a fixed IP address.
These APIs are only available with nova-network which is deprecated.""",
[
{
'method': 'GET',
'path': '/os-fixed-ips/{fixed_ip}'
},
{
'method': 'POST',
'path': '/os-fixed-ips/{fixed_ip}/action (reserve)'
},
{
'method': 'POST',
'path': '/os-fixed-ips/{fixed_ip}/action (unreserve)'
}
]),
]
def list_rules():
return fixed_ips_policies
| <commit_before># Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-fixed-ips'
fixed_ips_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_API),
]
def list_rules():
return fixed_ips_policies
<commit_msg>Add policy description for os-fixed-ips
This commit adds policy doc for os-fixed-ips policies.
Partial implement blueprint policy-docs
Change-Id: Ief255af699cee217ebf963a2c36f9e819ef4ef90<commit_after> | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-fixed-ips'
fixed_ips_policies = [
base.create_rule_default(
BASE_POLICY_NAME,
base.RULE_ADMIN_API,
"""Shows details for, reserve and unreserve a fixed IP address.
These APIs are only available with nova-network which is deprecated.""",
[
{
'method': 'GET',
'path': '/os-fixed-ips/{fixed_ip}'
},
{
'method': 'POST',
'path': '/os-fixed-ips/{fixed_ip}/action (reserve)'
},
{
'method': 'POST',
'path': '/os-fixed-ips/{fixed_ip}/action (unreserve)'
}
]),
]
def list_rules():
return fixed_ips_policies
| # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-fixed-ips'
fixed_ips_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_API),
]
def list_rules():
return fixed_ips_policies
Add policy description for os-fixed-ips
This commit adds policy doc for os-fixed-ips policies.
Partial implement blueprint policy-docs
Change-Id: Ief255af699cee217ebf963a2c36f9e819ef4ef90# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-fixed-ips'
fixed_ips_policies = [
base.create_rule_default(
BASE_POLICY_NAME,
base.RULE_ADMIN_API,
"""Shows details for, reserve and unreserve a fixed IP address.
These APIs are only available with nova-network which is deprecated.""",
[
{
'method': 'GET',
'path': '/os-fixed-ips/{fixed_ip}'
},
{
'method': 'POST',
'path': '/os-fixed-ips/{fixed_ip}/action (reserve)'
},
{
'method': 'POST',
'path': '/os-fixed-ips/{fixed_ip}/action (unreserve)'
}
]),
]
def list_rules():
return fixed_ips_policies
| <commit_before># Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-fixed-ips'
fixed_ips_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_API),
]
def list_rules():
return fixed_ips_policies
<commit_msg>Add policy description for os-fixed-ips
This commit adds policy doc for os-fixed-ips policies.
Partial implement blueprint policy-docs
Change-Id: Ief255af699cee217ebf963a2c36f9e819ef4ef90<commit_after># Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-fixed-ips'
fixed_ips_policies = [
base.create_rule_default(
BASE_POLICY_NAME,
base.RULE_ADMIN_API,
"""Shows details for, reserve and unreserve a fixed IP address.
These APIs are only available with nova-network which is deprecated.""",
[
{
'method': 'GET',
'path': '/os-fixed-ips/{fixed_ip}'
},
{
'method': 'POST',
'path': '/os-fixed-ips/{fixed_ip}/action (reserve)'
},
{
'method': 'POST',
'path': '/os-fixed-ips/{fixed_ip}/action (unreserve)'
}
]),
]
def list_rules():
return fixed_ips_policies
|
6d37cb94c13c26ae82bc3b67a7ff03c2a032d7fc | test/test_message.py | test/test_message.py | import quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü'
def test_quopri_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
def test_quopri_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(b'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
| from base64 import b64encode
import quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü\n'
# Something in the email module implicitly adds a newline to the body text if
# one isn't present, so we need to include one here lest the base64 encodings
# not match up.
TEXT_ENC = TEXT.encode('utf-8')
def test_7bit_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT_ENC not in blob
assert quopri.encodestring(TEXT_ENC) in blob or b64encode(TEXT_ENC) in blob
def test_7bit_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(b'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT_ENC not in blob
assert quopri.encodestring(TEXT_ENC) in blob or b64encode(TEXT_ENC) in blob
| Make tests forwards-compatible with new email API | Make tests forwards-compatible with new email API
| Python | mit | jwodder/daemail | import quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü'
def test_quopri_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
def test_quopri_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(b'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
Make tests forwards-compatible with new email API | from base64 import b64encode
import quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü\n'
# Something in the email module implicitly adds a newline to the body text if
# one isn't present, so we need to include one here lest the base64 encodings
# not match up.
TEXT_ENC = TEXT.encode('utf-8')
def test_7bit_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT_ENC not in blob
assert quopri.encodestring(TEXT_ENC) in blob or b64encode(TEXT_ENC) in blob
def test_7bit_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(b'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT_ENC not in blob
assert quopri.encodestring(TEXT_ENC) in blob or b64encode(TEXT_ENC) in blob
| <commit_before>import quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü'
def test_quopri_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
def test_quopri_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(b'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
<commit_msg>Make tests forwards-compatible with new email API<commit_after> | from base64 import b64encode
import quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü\n'
# Something in the email module implicitly adds a newline to the body text if
# one isn't present, so we need to include one here lest the base64 encodings
# not match up.
TEXT_ENC = TEXT.encode('utf-8')
def test_7bit_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT_ENC not in blob
assert quopri.encodestring(TEXT_ENC) in blob or b64encode(TEXT_ENC) in blob
def test_7bit_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(b'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT_ENC not in blob
assert quopri.encodestring(TEXT_ENC) in blob or b64encode(TEXT_ENC) in blob
| import quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü'
def test_quopri_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
def test_quopri_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(b'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
Make tests forwards-compatible with new email APIfrom base64 import b64encode
import quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü\n'
# Something in the email module implicitly adds a newline to the body text if
# one isn't present, so we need to include one here lest the base64 encodings
# not match up.
TEXT_ENC = TEXT.encode('utf-8')
def test_7bit_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT_ENC not in blob
assert quopri.encodestring(TEXT_ENC) in blob or b64encode(TEXT_ENC) in blob
def test_7bit_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(b'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT_ENC not in blob
assert quopri.encodestring(TEXT_ENC) in blob or b64encode(TEXT_ENC) in blob
| <commit_before>import quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü'
def test_quopri_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
def test_quopri_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(b'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT.encode('utf-8') not in blob
assert quopri.encodestring(TEXT.encode('utf-8')) in blob
<commit_msg>Make tests forwards-compatible with new email API<commit_after>from base64 import b64encode
import quopri
from daemail.message import DraftMessage
TEXT = 'àéîøü\n'
# Something in the email module implicitly adds a newline to the body text if
# one isn't present, so we need to include one here lest the base64 encodings
# not match up.
TEXT_ENC = TEXT.encode('utf-8')
def test_7bit_text():
msg = DraftMessage()
msg.addtext(TEXT)
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT_ENC not in blob
assert quopri.encodestring(TEXT_ENC) in blob or b64encode(TEXT_ENC) in blob
def test_7bit_multipart():
msg = DraftMessage()
msg.addtext(TEXT)
msg.addmimeblob(b'\0\0\0\0', 'application/octet-stream', 'null.dat')
blob = msg.compile()
assert isinstance(blob, bytes)
assert TEXT_ENC not in blob
assert quopri.encodestring(TEXT_ENC) in blob or b64encode(TEXT_ENC) in blob
|
e9e3c474194393a0586709a9cc59b02cf8573b32 | gearstore/cmd.py | gearstore/cmd.py | # Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import socket
from gearstore import stocker
def main():
parser = argparse.ArgumentParser()
parser.add_argument('servers', nargs='+', help='Servers to connect to, '
' format of host/port')
parser.add_argument('--sqlalchemy-dsn', help='SQLAlchemy DSN to store in')
args = parser.parse_args()
stkr = stocker.Stocker(
client_id=socket.gethostname(), dsn=args.sqlalchemy_dsn)
for s in args.servers:
if '/' in s:
(host, port) = s.split('/', 2)
else:
host = s
port = None
stkr.addServer(host, port)
stkr.waitForServer()
while True:
stkr.run()
stkr.ship()
| # Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import socket
from gearstore import stocker
def main():
parser = argparse.ArgumentParser()
parser.add_argument('servers', nargs='+', help='Servers to connect to, '
' format of host/port')
parser.add_argument('--sqlalchemy-dsn', help='SQLAlchemy DSN to store in')
args = parser.parse_args()
stkr = stocker.Stocker(
client_id=socket.gethostname(), dsn=args.sqlalchemy_dsn)
for s in args.servers:
if '/' in s:
(host, port) = s.split('/', 2)
else:
host = s
port = None
stkr.addServer(host, port)
stkr.waitForServer()
while True:
stkr.stock()
stkr.ship()
| Fix call to stock instead of run | Fix call to stock instead of run
| Python | apache-2.0 | SpamapS/gearstore | # Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import socket
from gearstore import stocker
def main():
parser = argparse.ArgumentParser()
parser.add_argument('servers', nargs='+', help='Servers to connect to, '
' format of host/port')
parser.add_argument('--sqlalchemy-dsn', help='SQLAlchemy DSN to store in')
args = parser.parse_args()
stkr = stocker.Stocker(
client_id=socket.gethostname(), dsn=args.sqlalchemy_dsn)
for s in args.servers:
if '/' in s:
(host, port) = s.split('/', 2)
else:
host = s
port = None
stkr.addServer(host, port)
stkr.waitForServer()
while True:
stkr.run()
stkr.ship()
Fix call to stock instead of run | # Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import socket
from gearstore import stocker
def main():
parser = argparse.ArgumentParser()
parser.add_argument('servers', nargs='+', help='Servers to connect to, '
' format of host/port')
parser.add_argument('--sqlalchemy-dsn', help='SQLAlchemy DSN to store in')
args = parser.parse_args()
stkr = stocker.Stocker(
client_id=socket.gethostname(), dsn=args.sqlalchemy_dsn)
for s in args.servers:
if '/' in s:
(host, port) = s.split('/', 2)
else:
host = s
port = None
stkr.addServer(host, port)
stkr.waitForServer()
while True:
stkr.stock()
stkr.ship()
| <commit_before># Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import socket
from gearstore import stocker
def main():
parser = argparse.ArgumentParser()
parser.add_argument('servers', nargs='+', help='Servers to connect to, '
' format of host/port')
parser.add_argument('--sqlalchemy-dsn', help='SQLAlchemy DSN to store in')
args = parser.parse_args()
stkr = stocker.Stocker(
client_id=socket.gethostname(), dsn=args.sqlalchemy_dsn)
for s in args.servers:
if '/' in s:
(host, port) = s.split('/', 2)
else:
host = s
port = None
stkr.addServer(host, port)
stkr.waitForServer()
while True:
stkr.run()
stkr.ship()
<commit_msg>Fix call to stock instead of run<commit_after> | # Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import socket
from gearstore import stocker
def main():
parser = argparse.ArgumentParser()
parser.add_argument('servers', nargs='+', help='Servers to connect to, '
' format of host/port')
parser.add_argument('--sqlalchemy-dsn', help='SQLAlchemy DSN to store in')
args = parser.parse_args()
stkr = stocker.Stocker(
client_id=socket.gethostname(), dsn=args.sqlalchemy_dsn)
for s in args.servers:
if '/' in s:
(host, port) = s.split('/', 2)
else:
host = s
port = None
stkr.addServer(host, port)
stkr.waitForServer()
while True:
stkr.stock()
stkr.ship()
| # Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import socket
from gearstore import stocker
def main():
parser = argparse.ArgumentParser()
parser.add_argument('servers', nargs='+', help='Servers to connect to, '
' format of host/port')
parser.add_argument('--sqlalchemy-dsn', help='SQLAlchemy DSN to store in')
args = parser.parse_args()
stkr = stocker.Stocker(
client_id=socket.gethostname(), dsn=args.sqlalchemy_dsn)
for s in args.servers:
if '/' in s:
(host, port) = s.split('/', 2)
else:
host = s
port = None
stkr.addServer(host, port)
stkr.waitForServer()
while True:
stkr.run()
stkr.ship()
Fix call to stock instead of run# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import socket
from gearstore import stocker
def main():
parser = argparse.ArgumentParser()
parser.add_argument('servers', nargs='+', help='Servers to connect to, '
' format of host/port')
parser.add_argument('--sqlalchemy-dsn', help='SQLAlchemy DSN to store in')
args = parser.parse_args()
stkr = stocker.Stocker(
client_id=socket.gethostname(), dsn=args.sqlalchemy_dsn)
for s in args.servers:
if '/' in s:
(host, port) = s.split('/', 2)
else:
host = s
port = None
stkr.addServer(host, port)
stkr.waitForServer()
while True:
stkr.stock()
stkr.ship()
| <commit_before># Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import socket
from gearstore import stocker
def main():
parser = argparse.ArgumentParser()
parser.add_argument('servers', nargs='+', help='Servers to connect to, '
' format of host/port')
parser.add_argument('--sqlalchemy-dsn', help='SQLAlchemy DSN to store in')
args = parser.parse_args()
stkr = stocker.Stocker(
client_id=socket.gethostname(), dsn=args.sqlalchemy_dsn)
for s in args.servers:
if '/' in s:
(host, port) = s.split('/', 2)
else:
host = s
port = None
stkr.addServer(host, port)
stkr.waitForServer()
while True:
stkr.run()
stkr.ship()
<commit_msg>Fix call to stock instead of run<commit_after># Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import socket
from gearstore import stocker
def main():
parser = argparse.ArgumentParser()
parser.add_argument('servers', nargs='+', help='Servers to connect to, '
' format of host/port')
parser.add_argument('--sqlalchemy-dsn', help='SQLAlchemy DSN to store in')
args = parser.parse_args()
stkr = stocker.Stocker(
client_id=socket.gethostname(), dsn=args.sqlalchemy_dsn)
for s in args.servers:
if '/' in s:
(host, port) = s.split('/', 2)
else:
host = s
port = None
stkr.addServer(host, port)
stkr.waitForServer()
while True:
stkr.stock()
stkr.ship()
|
14d4bd44fda88ff512f0a2581539e41d51063744 | pyconde/checkin/templatetags/checkin_tags.py | pyconde/checkin/templatetags/checkin_tags.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import template
from django.utils.html import format_html
from django.utils.encoding import force_text
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.simple_tag(takes_context=True)
def highlight(context, value):
if any(term in force_text(value) for term in context['search_terms']):
value = format_html('<strong>{0}</strong>', value)
return value
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import template
from django.utils.html import format_html
from django.utils.encoding import force_text
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.simple_tag(takes_context=True)
def highlight(context, value):
if any(term.lower() in force_text(value).lower() for term in context['search_terms']):
value = format_html('<strong>{0}</strong>', value)
return value
| Make highlight template tag case insensitive | Make highlight template tag case insensitive
| Python | bsd-3-clause | pysv/djep,pysv/djep,EuroPython/djep,EuroPython/djep,pysv/djep,pysv/djep,pysv/djep,EuroPython/djep,EuroPython/djep | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import template
from django.utils.html import format_html
from django.utils.encoding import force_text
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.simple_tag(takes_context=True)
def highlight(context, value):
if any(term in force_text(value) for term in context['search_terms']):
value = format_html('<strong>{0}</strong>', value)
return value
Make highlight template tag case insensitive | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import template
from django.utils.html import format_html
from django.utils.encoding import force_text
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.simple_tag(takes_context=True)
def highlight(context, value):
if any(term.lower() in force_text(value).lower() for term in context['search_terms']):
value = format_html('<strong>{0}</strong>', value)
return value
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import template
from django.utils.html import format_html
from django.utils.encoding import force_text
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.simple_tag(takes_context=True)
def highlight(context, value):
if any(term in force_text(value) for term in context['search_terms']):
value = format_html('<strong>{0}</strong>', value)
return value
<commit_msg>Make highlight template tag case insensitive<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import template
from django.utils.html import format_html
from django.utils.encoding import force_text
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.simple_tag(takes_context=True)
def highlight(context, value):
if any(term.lower() in force_text(value).lower() for term in context['search_terms']):
value = format_html('<strong>{0}</strong>', value)
return value
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import template
from django.utils.html import format_html
from django.utils.encoding import force_text
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.simple_tag(takes_context=True)
def highlight(context, value):
if any(term in force_text(value) for term in context['search_terms']):
value = format_html('<strong>{0}</strong>', value)
return value
Make highlight template tag case insensitive# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import template
from django.utils.html import format_html
from django.utils.encoding import force_text
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.simple_tag(takes_context=True)
def highlight(context, value):
if any(term.lower() in force_text(value).lower() for term in context['search_terms']):
value = format_html('<strong>{0}</strong>', value)
return value
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import template
from django.utils.html import format_html
from django.utils.encoding import force_text
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.simple_tag(takes_context=True)
def highlight(context, value):
if any(term in force_text(value) for term in context['search_terms']):
value = format_html('<strong>{0}</strong>', value)
return value
<commit_msg>Make highlight template tag case insensitive<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import template
from django.utils.html import format_html
from django.utils.encoding import force_text
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.simple_tag(takes_context=True)
def highlight(context, value):
if any(term.lower() in force_text(value).lower() for term in context['search_terms']):
value = format_html('<strong>{0}</strong>', value)
return value
|
4af6e0b4c514b65c5bd9251398947e830ce9f26e | quickphotos/templatetags/quickphotos_tags.py | quickphotos/templatetags/quickphotos_tags.py | from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
photos = Photo.objects.all()
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
| from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
liked_by = kwargs.pop('liked_by', None)
photos = Photo.objects.all()
if liked_by:
photos = photos.filter(like__user=liked_by)
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
| Tag filtering by liked photos | Tag filtering by liked photos
| Python | bsd-3-clause | blancltd/django-quick-photos,kmlebedev/mezzanine-instagram-quickphotos | from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
photos = Photo.objects.all()
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
Tag filtering by liked photos | from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
liked_by = kwargs.pop('liked_by', None)
photos = Photo.objects.all()
if liked_by:
photos = photos.filter(like__user=liked_by)
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
| <commit_before>from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
photos = Photo.objects.all()
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
<commit_msg>Tag filtering by liked photos<commit_after> | from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
liked_by = kwargs.pop('liked_by', None)
photos = Photo.objects.all()
if liked_by:
photos = photos.filter(like__user=liked_by)
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
| from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
photos = Photo.objects.all()
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
Tag filtering by liked photosfrom django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
liked_by = kwargs.pop('liked_by', None)
photos = Photo.objects.all()
if liked_by:
photos = photos.filter(like__user=liked_by)
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
| <commit_before>from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
photos = Photo.objects.all()
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
<commit_msg>Tag filtering by liked photos<commit_after>from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
liked_by = kwargs.pop('liked_by', None)
photos = Photo.objects.all()
if liked_by:
photos = photos.filter(like__user=liked_by)
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
|
9d846cf6d22eb7a577f09918c2e48f6484a75962 | tests/test__utils.py | tests/test__utils.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import pytest
import numpy as np
import dask_distance._utils
@pytest.mark.parametrize("et, u, v", [
(ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)),
])
def test__bool_cmp_mtx_cnt_err(et, u, v):
with pytest.raises(et):
dask_distance._utils._bool_cmp_mtx_cnt(u, v)
def test__bool_cmp_mtx_cnt():
u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool)
v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool)
uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v)
uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float)
assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import pytest
import numpy as np
import dask_distance._utils
@pytest.mark.parametrize("et, u, v", [
(ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)),
(ValueError, np.zeros((1, 2,), dtype=bool), np.zeros((2,), dtype=bool)),
(ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2,), dtype=bool)),
])
def test__bool_cmp_mtx_cnt_err(et, u, v):
with pytest.raises(et):
dask_distance._utils._bool_cmp_mtx_cnt(u, v)
def test__bool_cmp_mtx_cnt():
u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool)
v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool)
uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v)
uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float)
assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
| Test non-1D array error in utils | Test non-1D array error in utils
Adds a test for `_bool_cmp_mtx_cnt` to make sure that both arrays are 1D
only. Will raise a `ValueError` if this is not the case.
| Python | bsd-3-clause | jakirkham/dask-distance | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import pytest
import numpy as np
import dask_distance._utils
@pytest.mark.parametrize("et, u, v", [
(ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)),
])
def test__bool_cmp_mtx_cnt_err(et, u, v):
with pytest.raises(et):
dask_distance._utils._bool_cmp_mtx_cnt(u, v)
def test__bool_cmp_mtx_cnt():
u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool)
v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool)
uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v)
uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float)
assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
Test non-1D array error in utils
Adds a test for `_bool_cmp_mtx_cnt` to make sure that both arrays are 1D
only. Will raise a `ValueError` if this is not the case. | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import pytest
import numpy as np
import dask_distance._utils
@pytest.mark.parametrize("et, u, v", [
(ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)),
(ValueError, np.zeros((1, 2,), dtype=bool), np.zeros((2,), dtype=bool)),
(ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2,), dtype=bool)),
])
def test__bool_cmp_mtx_cnt_err(et, u, v):
with pytest.raises(et):
dask_distance._utils._bool_cmp_mtx_cnt(u, v)
def test__bool_cmp_mtx_cnt():
u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool)
v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool)
uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v)
uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float)
assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import pytest
import numpy as np
import dask_distance._utils
@pytest.mark.parametrize("et, u, v", [
(ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)),
])
def test__bool_cmp_mtx_cnt_err(et, u, v):
with pytest.raises(et):
dask_distance._utils._bool_cmp_mtx_cnt(u, v)
def test__bool_cmp_mtx_cnt():
u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool)
v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool)
uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v)
uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float)
assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
<commit_msg>Test non-1D array error in utils
Adds a test for `_bool_cmp_mtx_cnt` to make sure that both arrays are 1D
only. Will raise a `ValueError` if this is not the case.<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import pytest
import numpy as np
import dask_distance._utils
@pytest.mark.parametrize("et, u, v", [
(ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)),
(ValueError, np.zeros((1, 2,), dtype=bool), np.zeros((2,), dtype=bool)),
(ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2,), dtype=bool)),
])
def test__bool_cmp_mtx_cnt_err(et, u, v):
with pytest.raises(et):
dask_distance._utils._bool_cmp_mtx_cnt(u, v)
def test__bool_cmp_mtx_cnt():
u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool)
v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool)
uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v)
uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float)
assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import pytest
import numpy as np
import dask_distance._utils
@pytest.mark.parametrize("et, u, v", [
(ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)),
])
def test__bool_cmp_mtx_cnt_err(et, u, v):
with pytest.raises(et):
dask_distance._utils._bool_cmp_mtx_cnt(u, v)
def test__bool_cmp_mtx_cnt():
u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool)
v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool)
uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v)
uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float)
assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
Test non-1D array error in utils
Adds a test for `_bool_cmp_mtx_cnt` to make sure that both arrays are 1D
only. Will raise a `ValueError` if this is not the case.#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import pytest
import numpy as np
import dask_distance._utils
@pytest.mark.parametrize("et, u, v", [
(ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)),
(ValueError, np.zeros((1, 2,), dtype=bool), np.zeros((2,), dtype=bool)),
(ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2,), dtype=bool)),
])
def test__bool_cmp_mtx_cnt_err(et, u, v):
with pytest.raises(et):
dask_distance._utils._bool_cmp_mtx_cnt(u, v)
def test__bool_cmp_mtx_cnt():
u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool)
v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool)
uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v)
uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float)
assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import pytest
import numpy as np
import dask_distance._utils
@pytest.mark.parametrize("et, u, v", [
(ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)),
])
def test__bool_cmp_mtx_cnt_err(et, u, v):
with pytest.raises(et):
dask_distance._utils._bool_cmp_mtx_cnt(u, v)
def test__bool_cmp_mtx_cnt():
u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool)
v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool)
uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v)
uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float)
assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
<commit_msg>Test non-1D array error in utils
Adds a test for `_bool_cmp_mtx_cnt` to make sure that both arrays are 1D
only. Will raise a `ValueError` if this is not the case.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import pytest
import numpy as np
import dask_distance._utils
@pytest.mark.parametrize("et, u, v", [
(ValueError, np.zeros((2,), dtype=bool), np.zeros((3,), dtype=bool)),
(ValueError, np.zeros((1, 2,), dtype=bool), np.zeros((2,), dtype=bool)),
(ValueError, np.zeros((2,), dtype=bool), np.zeros((1, 2,), dtype=bool)),
])
def test__bool_cmp_mtx_cnt_err(et, u, v):
with pytest.raises(et):
dask_distance._utils._bool_cmp_mtx_cnt(u, v)
def test__bool_cmp_mtx_cnt():
u = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 1], dtype=bool)
v = np.array([0, 1, 1, 0, 0, 0, 1, 1, 1, 1], dtype=bool)
uv_cmp_mtx = dask_distance._utils._bool_cmp_mtx_cnt(u, v)
uv_cmp_mtx_exp = np.array([[1, 2], [3, 4]], dtype=float)
assert (np.array(uv_cmp_mtx) == uv_cmp_mtx_exp).all()
|
c4153cc69238054ddbdb8b385325f5a8701e98f8 | taxiexpress/serializers.py | taxiexpress/serializers.py | from django.forms import widgets
from rest_framework import serializers
from taxiexpress.models import Customer, Country, State, City, Driver, Travel, Car
class CarSerializer(serializers.ModelSerializer):
class Meta:
model = Car
fields = ('plate', 'model', 'company', 'capacity', 'accessible', 'animals', 'appPayment')
class DriverSerializer(serializers.ModelSerializer):
valuation = serializers.SerializerMethodField('get_valuation')
car = CarSerializer()
def get_valuation(self, obj):
return int(5*obj.positiveVotes/(obj.positiveVotes+obj.negativeVotes))
class Meta:
model = Driver
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'valuation', 'car')
class TravelSerializer(serializers.ModelSerializer):
driver= DriverSerializer()
class Meta:
model = Travel
fields = ('id', 'driver', 'starttime', 'endtime', 'cost', 'startpoint', 'origin', 'endpoint', 'destination')
class CustomerSerializer(serializers.ModelSerializer):
favlist = DriverSerializer(many=True)
travel_set = TravelSerializer(many=True)
class Meta:
model = Customer
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'lastUpdate', 'favlist', 'travel_set')
| from django.forms import widgets
from rest_framework import serializers
from taxiexpress.models import Customer, Country, State, City, Driver, Travel, Car
class CarSerializer(serializers.ModelSerializer):
class Meta:
model = Car
fields = ('plate', 'model', 'company', 'capacity', 'accessible', 'animals', 'appPayment')
class DriverSerializer(serializers.ModelSerializer):
valuation = serializers.SerializerMethodField('get_valuation')
car = CarSerializer()
def get_valuation(self, obj):
return int(5*obj.positiveVotes/(obj.positiveVotes+obj.negativeVotes))
class Meta:
model = Driver
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'valuation', 'car')
class TravelSerializer(serializers.ModelSerializer):
driver= DriverSerializer()
class Meta:
model = Travel
fields = ('id', 'driver', 'starttime', 'endtime', 'cost', 'startpoint', 'origin', 'endpoint', 'destination')
class CustomerSerializer(serializers.ModelSerializer):
favlist = DriverSerializer(many=True)
travel_set = TravelSerializer(many=True)
class Meta:
model = Customer
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'lastUpdate', 'favlist', 'fAccessible', 'fAnimals', 'fAppPayment', 'fCapacity', 'travel_set')
| Add filters to Customer serializer | Add filters to Customer serializer
| Python | mit | TaxiExpress/server,TaxiExpress/server | from django.forms import widgets
from rest_framework import serializers
from taxiexpress.models import Customer, Country, State, City, Driver, Travel, Car
class CarSerializer(serializers.ModelSerializer):
class Meta:
model = Car
fields = ('plate', 'model', 'company', 'capacity', 'accessible', 'animals', 'appPayment')
class DriverSerializer(serializers.ModelSerializer):
valuation = serializers.SerializerMethodField('get_valuation')
car = CarSerializer()
def get_valuation(self, obj):
return int(5*obj.positiveVotes/(obj.positiveVotes+obj.negativeVotes))
class Meta:
model = Driver
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'valuation', 'car')
class TravelSerializer(serializers.ModelSerializer):
driver= DriverSerializer()
class Meta:
model = Travel
fields = ('id', 'driver', 'starttime', 'endtime', 'cost', 'startpoint', 'origin', 'endpoint', 'destination')
class CustomerSerializer(serializers.ModelSerializer):
favlist = DriverSerializer(many=True)
travel_set = TravelSerializer(many=True)
class Meta:
model = Customer
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'lastUpdate', 'favlist', 'travel_set')
Add filters to Customer serializer | from django.forms import widgets
from rest_framework import serializers
from taxiexpress.models import Customer, Country, State, City, Driver, Travel, Car
class CarSerializer(serializers.ModelSerializer):
class Meta:
model = Car
fields = ('plate', 'model', 'company', 'capacity', 'accessible', 'animals', 'appPayment')
class DriverSerializer(serializers.ModelSerializer):
valuation = serializers.SerializerMethodField('get_valuation')
car = CarSerializer()
def get_valuation(self, obj):
return int(5*obj.positiveVotes/(obj.positiveVotes+obj.negativeVotes))
class Meta:
model = Driver
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'valuation', 'car')
class TravelSerializer(serializers.ModelSerializer):
driver= DriverSerializer()
class Meta:
model = Travel
fields = ('id', 'driver', 'starttime', 'endtime', 'cost', 'startpoint', 'origin', 'endpoint', 'destination')
class CustomerSerializer(serializers.ModelSerializer):
favlist = DriverSerializer(many=True)
travel_set = TravelSerializer(many=True)
class Meta:
model = Customer
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'lastUpdate', 'favlist', 'fAccessible', 'fAnimals', 'fAppPayment', 'fCapacity', 'travel_set')
| <commit_before>from django.forms import widgets
from rest_framework import serializers
from taxiexpress.models import Customer, Country, State, City, Driver, Travel, Car
class CarSerializer(serializers.ModelSerializer):
class Meta:
model = Car
fields = ('plate', 'model', 'company', 'capacity', 'accessible', 'animals', 'appPayment')
class DriverSerializer(serializers.ModelSerializer):
valuation = serializers.SerializerMethodField('get_valuation')
car = CarSerializer()
def get_valuation(self, obj):
return int(5*obj.positiveVotes/(obj.positiveVotes+obj.negativeVotes))
class Meta:
model = Driver
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'valuation', 'car')
class TravelSerializer(serializers.ModelSerializer):
driver= DriverSerializer()
class Meta:
model = Travel
fields = ('id', 'driver', 'starttime', 'endtime', 'cost', 'startpoint', 'origin', 'endpoint', 'destination')
class CustomerSerializer(serializers.ModelSerializer):
favlist = DriverSerializer(many=True)
travel_set = TravelSerializer(many=True)
class Meta:
model = Customer
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'lastUpdate', 'favlist', 'travel_set')
<commit_msg>Add filters to Customer serializer<commit_after> | from django.forms import widgets
from rest_framework import serializers
from taxiexpress.models import Customer, Country, State, City, Driver, Travel, Car
class CarSerializer(serializers.ModelSerializer):
class Meta:
model = Car
fields = ('plate', 'model', 'company', 'capacity', 'accessible', 'animals', 'appPayment')
class DriverSerializer(serializers.ModelSerializer):
valuation = serializers.SerializerMethodField('get_valuation')
car = CarSerializer()
def get_valuation(self, obj):
return int(5*obj.positiveVotes/(obj.positiveVotes+obj.negativeVotes))
class Meta:
model = Driver
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'valuation', 'car')
class TravelSerializer(serializers.ModelSerializer):
driver= DriverSerializer()
class Meta:
model = Travel
fields = ('id', 'driver', 'starttime', 'endtime', 'cost', 'startpoint', 'origin', 'endpoint', 'destination')
class CustomerSerializer(serializers.ModelSerializer):
favlist = DriverSerializer(many=True)
travel_set = TravelSerializer(many=True)
class Meta:
model = Customer
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'lastUpdate', 'favlist', 'fAccessible', 'fAnimals', 'fAppPayment', 'fCapacity', 'travel_set')
| from django.forms import widgets
from rest_framework import serializers
from taxiexpress.models import Customer, Country, State, City, Driver, Travel, Car
class CarSerializer(serializers.ModelSerializer):
class Meta:
model = Car
fields = ('plate', 'model', 'company', 'capacity', 'accessible', 'animals', 'appPayment')
class DriverSerializer(serializers.ModelSerializer):
valuation = serializers.SerializerMethodField('get_valuation')
car = CarSerializer()
def get_valuation(self, obj):
return int(5*obj.positiveVotes/(obj.positiveVotes+obj.negativeVotes))
class Meta:
model = Driver
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'valuation', 'car')
class TravelSerializer(serializers.ModelSerializer):
driver= DriverSerializer()
class Meta:
model = Travel
fields = ('id', 'driver', 'starttime', 'endtime', 'cost', 'startpoint', 'origin', 'endpoint', 'destination')
class CustomerSerializer(serializers.ModelSerializer):
favlist = DriverSerializer(many=True)
travel_set = TravelSerializer(many=True)
class Meta:
model = Customer
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'lastUpdate', 'favlist', 'travel_set')
Add filters to Customer serializerfrom django.forms import widgets
from rest_framework import serializers
from taxiexpress.models import Customer, Country, State, City, Driver, Travel, Car
class CarSerializer(serializers.ModelSerializer):
class Meta:
model = Car
fields = ('plate', 'model', 'company', 'capacity', 'accessible', 'animals', 'appPayment')
class DriverSerializer(serializers.ModelSerializer):
valuation = serializers.SerializerMethodField('get_valuation')
car = CarSerializer()
def get_valuation(self, obj):
return int(5*obj.positiveVotes/(obj.positiveVotes+obj.negativeVotes))
class Meta:
model = Driver
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'valuation', 'car')
class TravelSerializer(serializers.ModelSerializer):
driver= DriverSerializer()
class Meta:
model = Travel
fields = ('id', 'driver', 'starttime', 'endtime', 'cost', 'startpoint', 'origin', 'endpoint', 'destination')
class CustomerSerializer(serializers.ModelSerializer):
favlist = DriverSerializer(many=True)
travel_set = TravelSerializer(many=True)
class Meta:
model = Customer
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'lastUpdate', 'favlist', 'fAccessible', 'fAnimals', 'fAppPayment', 'fCapacity', 'travel_set')
| <commit_before>from django.forms import widgets
from rest_framework import serializers
from taxiexpress.models import Customer, Country, State, City, Driver, Travel, Car
class CarSerializer(serializers.ModelSerializer):
class Meta:
model = Car
fields = ('plate', 'model', 'company', 'capacity', 'accessible', 'animals', 'appPayment')
class DriverSerializer(serializers.ModelSerializer):
valuation = serializers.SerializerMethodField('get_valuation')
car = CarSerializer()
def get_valuation(self, obj):
return int(5*obj.positiveVotes/(obj.positiveVotes+obj.negativeVotes))
class Meta:
model = Driver
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'valuation', 'car')
class TravelSerializer(serializers.ModelSerializer):
driver= DriverSerializer()
class Meta:
model = Travel
fields = ('id', 'driver', 'starttime', 'endtime', 'cost', 'startpoint', 'origin', 'endpoint', 'destination')
class CustomerSerializer(serializers.ModelSerializer):
favlist = DriverSerializer(many=True)
travel_set = TravelSerializer(many=True)
class Meta:
model = Customer
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'lastUpdate', 'favlist', 'travel_set')
<commit_msg>Add filters to Customer serializer<commit_after>from django.forms import widgets
from rest_framework import serializers
from taxiexpress.models import Customer, Country, State, City, Driver, Travel, Car
class CarSerializer(serializers.ModelSerializer):
class Meta:
model = Car
fields = ('plate', 'model', 'company', 'capacity', 'accessible', 'animals', 'appPayment')
class DriverSerializer(serializers.ModelSerializer):
valuation = serializers.SerializerMethodField('get_valuation')
car = CarSerializer()
def get_valuation(self, obj):
return int(5*obj.positiveVotes/(obj.positiveVotes+obj.negativeVotes))
class Meta:
model = Driver
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'valuation', 'car')
class TravelSerializer(serializers.ModelSerializer):
driver= DriverSerializer()
class Meta:
model = Travel
fields = ('id', 'driver', 'starttime', 'endtime', 'cost', 'startpoint', 'origin', 'endpoint', 'destination')
class CustomerSerializer(serializers.ModelSerializer):
favlist = DriverSerializer(many=True)
travel_set = TravelSerializer(many=True)
class Meta:
model = Customer
fields = ('email', 'phone', 'first_name', 'last_name', 'image', 'lastUpdate', 'favlist', 'fAccessible', 'fAnimals', 'fAppPayment', 'fCapacity', 'travel_set')
|
55dc2428796059cba22df71bde307d5621394979 | runtests.py | runtests.py | # python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
| # python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
| Fix tests in Django 1.7 | Fix tests in Django 1.7
Django 1.7 doesn't include session or auth middleware
in minimal default config. Djrill admin views require auth.
| Python | bsd-3-clause | idlweb/Djrill,barseghyanartur/Djrill,janusnic/Djrill,barseghyanartur/Djrill,idlweb/Djrill,brack3t/Djrill,janusnic/Djrill | # python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
Fix tests in Django 1.7
Django 1.7 doesn't include session or auth middleware
in minimal default config. Djrill admin views require auth. | # python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
| <commit_before># python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
<commit_msg>Fix tests in Django 1.7
Django 1.7 doesn't include session or auth middleware
in minimal default config. Djrill admin views require auth.<commit_after> | # python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
| # python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
Fix tests in Django 1.7
Django 1.7 doesn't include session or auth middleware
in minimal default config. Djrill admin views require auth.# python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
| <commit_before># python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
<commit_msg>Fix tests in Django 1.7
Django 1.7 doesn't include session or auth middleware
in minimal default config. Djrill admin views require auth.<commit_after># python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
458fd49fdf73f5cc338c58b1e741fde42f2f7251 | exampleapp/models.py | exampleapp/models.py | from galleries.models import Gallery, ImageModel
from django.db import models
from imagekit.models import ImageSpec
from imagekit.processors.resize import Fit
class Photo(ImageModel):
thumbnail = ImageSpec([Fit(50, 50)])
full = ImageSpec([Fit(400, 200)])
caption = models.CharField(max_length=100)
class PortfolioImage(ImageModel):
thumbnail = ImageSpec([Fit(70, 40)])
class Video(models.Model):
title = models.CharField(max_length=50)
video = models.FileField(upload_to='galleries/video/video')
thumbnail = models.ImageField(upload_to='galleries/video/thumbnail', blank=True)
def __unicode__(self):
return self.title
class Meta:
ordering = ['title']
class PhotoAlbum(Gallery):
class GalleryMeta:
member_models = [Photo]
class Meta:
verbose_name = 'Photo Album'
class Portfolio(Gallery):
class GalleryMeta:
member_models = [Video]
membership_class = 'PortfolioMembership'
class PortfolioMembership(Portfolio.BaseMembership):
extra_field = models.CharField(max_length=10)
| from galleries.models import Gallery, ImageModel
from django.db import models
from imagekit.models import ImageSpec
from imagekit.processors import ResizeToFit
class Photo(ImageModel):
thumbnail = ImageSpec([ResizeToFit(50, 50)])
full = ImageSpec([ResizeToFit(400, 200)])
caption = models.CharField(max_length=100)
class PortfolioImage(ImageModel):
thumbnail = ImageSpec([ResizeToFit(70, 40)])
class Video(models.Model):
title = models.CharField(max_length=50)
video = models.FileField(upload_to='galleries/video/video')
thumbnail = models.ImageField(upload_to='galleries/video/thumbnail', blank=True)
def __unicode__(self):
return self.title
class Meta:
ordering = ['title']
class PhotoAlbum(Gallery):
class GalleryMeta:
member_models = [Photo]
class Meta:
verbose_name = 'Photo Album'
class Portfolio(Gallery):
class GalleryMeta:
member_models = [Video]
membership_class = 'PortfolioMembership'
class PortfolioMembership(Portfolio.BaseMembership):
extra_field = models.CharField(max_length=10)
| Use (not so) new processor class names | Use (not so) new processor class names
| Python | mit | hzdg/django-galleries,hzdg/django-galleries,hzdg/django-galleries | from galleries.models import Gallery, ImageModel
from django.db import models
from imagekit.models import ImageSpec
from imagekit.processors.resize import Fit
class Photo(ImageModel):
thumbnail = ImageSpec([Fit(50, 50)])
full = ImageSpec([Fit(400, 200)])
caption = models.CharField(max_length=100)
class PortfolioImage(ImageModel):
thumbnail = ImageSpec([Fit(70, 40)])
class Video(models.Model):
title = models.CharField(max_length=50)
video = models.FileField(upload_to='galleries/video/video')
thumbnail = models.ImageField(upload_to='galleries/video/thumbnail', blank=True)
def __unicode__(self):
return self.title
class Meta:
ordering = ['title']
class PhotoAlbum(Gallery):
class GalleryMeta:
member_models = [Photo]
class Meta:
verbose_name = 'Photo Album'
class Portfolio(Gallery):
class GalleryMeta:
member_models = [Video]
membership_class = 'PortfolioMembership'
class PortfolioMembership(Portfolio.BaseMembership):
extra_field = models.CharField(max_length=10)
Use (not so) new processor class names | from galleries.models import Gallery, ImageModel
from django.db import models
from imagekit.models import ImageSpec
from imagekit.processors import ResizeToFit
class Photo(ImageModel):
thumbnail = ImageSpec([ResizeToFit(50, 50)])
full = ImageSpec([ResizeToFit(400, 200)])
caption = models.CharField(max_length=100)
class PortfolioImage(ImageModel):
thumbnail = ImageSpec([ResizeToFit(70, 40)])
class Video(models.Model):
title = models.CharField(max_length=50)
video = models.FileField(upload_to='galleries/video/video')
thumbnail = models.ImageField(upload_to='galleries/video/thumbnail', blank=True)
def __unicode__(self):
return self.title
class Meta:
ordering = ['title']
class PhotoAlbum(Gallery):
class GalleryMeta:
member_models = [Photo]
class Meta:
verbose_name = 'Photo Album'
class Portfolio(Gallery):
class GalleryMeta:
member_models = [Video]
membership_class = 'PortfolioMembership'
class PortfolioMembership(Portfolio.BaseMembership):
extra_field = models.CharField(max_length=10)
| <commit_before>from galleries.models import Gallery, ImageModel
from django.db import models
from imagekit.models import ImageSpec
from imagekit.processors.resize import Fit
class Photo(ImageModel):
thumbnail = ImageSpec([Fit(50, 50)])
full = ImageSpec([Fit(400, 200)])
caption = models.CharField(max_length=100)
class PortfolioImage(ImageModel):
thumbnail = ImageSpec([Fit(70, 40)])
class Video(models.Model):
title = models.CharField(max_length=50)
video = models.FileField(upload_to='galleries/video/video')
thumbnail = models.ImageField(upload_to='galleries/video/thumbnail', blank=True)
def __unicode__(self):
return self.title
class Meta:
ordering = ['title']
class PhotoAlbum(Gallery):
class GalleryMeta:
member_models = [Photo]
class Meta:
verbose_name = 'Photo Album'
class Portfolio(Gallery):
class GalleryMeta:
member_models = [Video]
membership_class = 'PortfolioMembership'
class PortfolioMembership(Portfolio.BaseMembership):
extra_field = models.CharField(max_length=10)
<commit_msg>Use (not so) new processor class names<commit_after> | from galleries.models import Gallery, ImageModel
from django.db import models
from imagekit.models import ImageSpec
from imagekit.processors import ResizeToFit
class Photo(ImageModel):
thumbnail = ImageSpec([ResizeToFit(50, 50)])
full = ImageSpec([ResizeToFit(400, 200)])
caption = models.CharField(max_length=100)
class PortfolioImage(ImageModel):
thumbnail = ImageSpec([ResizeToFit(70, 40)])
class Video(models.Model):
title = models.CharField(max_length=50)
video = models.FileField(upload_to='galleries/video/video')
thumbnail = models.ImageField(upload_to='galleries/video/thumbnail', blank=True)
def __unicode__(self):
return self.title
class Meta:
ordering = ['title']
class PhotoAlbum(Gallery):
class GalleryMeta:
member_models = [Photo]
class Meta:
verbose_name = 'Photo Album'
class Portfolio(Gallery):
class GalleryMeta:
member_models = [Video]
membership_class = 'PortfolioMembership'
class PortfolioMembership(Portfolio.BaseMembership):
extra_field = models.CharField(max_length=10)
| from galleries.models import Gallery, ImageModel
from django.db import models
from imagekit.models import ImageSpec
from imagekit.processors.resize import Fit
class Photo(ImageModel):
thumbnail = ImageSpec([Fit(50, 50)])
full = ImageSpec([Fit(400, 200)])
caption = models.CharField(max_length=100)
class PortfolioImage(ImageModel):
thumbnail = ImageSpec([Fit(70, 40)])
class Video(models.Model):
title = models.CharField(max_length=50)
video = models.FileField(upload_to='galleries/video/video')
thumbnail = models.ImageField(upload_to='galleries/video/thumbnail', blank=True)
def __unicode__(self):
return self.title
class Meta:
ordering = ['title']
class PhotoAlbum(Gallery):
class GalleryMeta:
member_models = [Photo]
class Meta:
verbose_name = 'Photo Album'
class Portfolio(Gallery):
class GalleryMeta:
member_models = [Video]
membership_class = 'PortfolioMembership'
class PortfolioMembership(Portfolio.BaseMembership):
extra_field = models.CharField(max_length=10)
Use (not so) new processor class namesfrom galleries.models import Gallery, ImageModel
from django.db import models
from imagekit.models import ImageSpec
from imagekit.processors import ResizeToFit
class Photo(ImageModel):
thumbnail = ImageSpec([ResizeToFit(50, 50)])
full = ImageSpec([ResizeToFit(400, 200)])
caption = models.CharField(max_length=100)
class PortfolioImage(ImageModel):
thumbnail = ImageSpec([ResizeToFit(70, 40)])
class Video(models.Model):
title = models.CharField(max_length=50)
video = models.FileField(upload_to='galleries/video/video')
thumbnail = models.ImageField(upload_to='galleries/video/thumbnail', blank=True)
def __unicode__(self):
return self.title
class Meta:
ordering = ['title']
class PhotoAlbum(Gallery):
class GalleryMeta:
member_models = [Photo]
class Meta:
verbose_name = 'Photo Album'
class Portfolio(Gallery):
class GalleryMeta:
member_models = [Video]
membership_class = 'PortfolioMembership'
class PortfolioMembership(Portfolio.BaseMembership):
extra_field = models.CharField(max_length=10)
| <commit_before>from galleries.models import Gallery, ImageModel
from django.db import models
from imagekit.models import ImageSpec
from imagekit.processors.resize import Fit
class Photo(ImageModel):
thumbnail = ImageSpec([Fit(50, 50)])
full = ImageSpec([Fit(400, 200)])
caption = models.CharField(max_length=100)
class PortfolioImage(ImageModel):
thumbnail = ImageSpec([Fit(70, 40)])
class Video(models.Model):
title = models.CharField(max_length=50)
video = models.FileField(upload_to='galleries/video/video')
thumbnail = models.ImageField(upload_to='galleries/video/thumbnail', blank=True)
def __unicode__(self):
return self.title
class Meta:
ordering = ['title']
class PhotoAlbum(Gallery):
class GalleryMeta:
member_models = [Photo]
class Meta:
verbose_name = 'Photo Album'
class Portfolio(Gallery):
class GalleryMeta:
member_models = [Video]
membership_class = 'PortfolioMembership'
class PortfolioMembership(Portfolio.BaseMembership):
extra_field = models.CharField(max_length=10)
<commit_msg>Use (not so) new processor class names<commit_after>from galleries.models import Gallery, ImageModel
from django.db import models
from imagekit.models import ImageSpec
from imagekit.processors import ResizeToFit
class Photo(ImageModel):
thumbnail = ImageSpec([ResizeToFit(50, 50)])
full = ImageSpec([ResizeToFit(400, 200)])
caption = models.CharField(max_length=100)
class PortfolioImage(ImageModel):
thumbnail = ImageSpec([ResizeToFit(70, 40)])
class Video(models.Model):
title = models.CharField(max_length=50)
video = models.FileField(upload_to='galleries/video/video')
thumbnail = models.ImageField(upload_to='galleries/video/thumbnail', blank=True)
def __unicode__(self):
return self.title
class Meta:
ordering = ['title']
class PhotoAlbum(Gallery):
class GalleryMeta:
member_models = [Photo]
class Meta:
verbose_name = 'Photo Album'
class Portfolio(Gallery):
class GalleryMeta:
member_models = [Video]
membership_class = 'PortfolioMembership'
class PortfolioMembership(Portfolio.BaseMembership):
extra_field = models.CharField(max_length=10)
|
c8ef27115ebe38d19efb857a05fe5a4e7910ee55 | website/addons/forward/model.py | website/addons/forward/model.py | # -*- coding: utf-8 -*-
from modularodm import fields
from modularodm.validators import (
URLValidator, MinValueValidator, MaxValueValidator
)
from modularodm.exceptions import ValidationValueError
from framework.mongo.utils import sanitized
from website.addons.base import AddonNodeSettingsBase
class ForwardNodeSettings(AddonNodeSettingsBase):
complete = True
url = fields.StringField(validate=URLValidator())
label = fields.StringField(validate=sanitized)
redirect_bool = fields.BooleanField(default=True, validate=True)
redirect_secs = fields.IntegerField(
default=15,
validate=[MinValueValidator(5), MaxValueValidator(60)]
)
@property
def link_text(self):
return self.label if self.label else self.url
@ForwardNodeSettings.subscribe('before_save')
def validate_circular_reference(schema, instance):
"""Prevent node from forwarding to itself."""
if instance.url and instance.owner._id in instance.url:
raise ValidationValueError('Circular URL')
| # -*- coding: utf-8 -*-
from modularodm import fields
from modularodm.validators import (
URLValidator, MinValueValidator, MaxValueValidator
)
from modularodm.exceptions import ValidationValueError
from framework.mongo.utils import sanitized
from website.addons.base import AddonNodeSettingsBase
class ForwardNodeSettings(AddonNodeSettingsBase):
complete = True
url = fields.StringField(validate=URLValidator())
label = fields.StringField(validate=sanitized)
redirect_bool = fields.BooleanField(default=True, validate=True)
redirect_secs = fields.IntegerField(
default=15,
validate=[MinValueValidator(5), MaxValueValidator(60)]
)
@property
def link_text(self):
return self.label if self.label else self.url
def on_delete(self):
self.reset()
def reset(self):
self.url = None
self.label = None
self.redirect_bool = True
self.redirect_secs = 15
@ForwardNodeSettings.subscribe('before_save')
def validate_circular_reference(schema, instance):
"""Prevent node from forwarding to itself."""
if instance.url and instance.owner._id in instance.url:
raise ValidationValueError('Circular URL')
| Reset values of external link add-on to default when deleted. | Reset values of external link add-on to default when deleted.
| Python | apache-2.0 | cosenal/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,lyndsysimon/osf.io,dplorimer/osf,cldershem/osf.io,leb2dg/osf.io,kch8qx/osf.io,ZobairAlijan/osf.io,caseyrygt/osf.io,jmcarp/osf.io,SSJohns/osf.io,arpitar/osf.io,zamattiac/osf.io,RomanZWang/osf.io,alexschiller/osf.io,samchrisinger/osf.io,emetsger/osf.io,SSJohns/osf.io,asanfilippo7/osf.io,chrisseto/osf.io,sbt9uc/osf.io,caseyrygt/osf.io,TomHeatwole/osf.io,mattclark/osf.io,ticklemepierce/osf.io,mattclark/osf.io,felliott/osf.io,alexschiller/osf.io,DanielSBrown/osf.io,aaxelb/osf.io,binoculars/osf.io,HarryRybacki/osf.io,amyshi188/osf.io,leb2dg/osf.io,reinaH/osf.io,amyshi188/osf.io,Ghalko/osf.io,erinspace/osf.io,dplorimer/osf,brianjgeiger/osf.io,samanehsan/osf.io,mfraezz/osf.io,zachjanicki/osf.io,caneruguz/osf.io,ZobairAlijan/osf.io,njantrania/osf.io,danielneis/osf.io,mluo613/osf.io,abought/osf.io,GageGaskins/osf.io,mluke93/osf.io,zachjanicki/osf.io,cldershem/osf.io,samanehsan/osf.io,brandonPurvis/osf.io,haoyuchen1992/osf.io,SSJohns/osf.io,mluo613/osf.io,cosenal/osf.io,Ghalko/osf.io,CenterForOpenScience/osf.io,GageGaskins/osf.io,bdyetton/prettychart,DanielSBrown/osf.io,sloria/osf.io,mluo613/osf.io,baylee-d/osf.io,kwierman/osf.io,bdyetton/prettychart,DanielSBrown/osf.io,kwierman/osf.io,reinaH/osf.io,rdhyee/osf.io,KAsante95/osf.io,jmcarp/osf.io,RomanZWang/osf.io,monikagrabowska/osf.io,felliott/osf.io,mluo613/osf.io,asanfilippo7/osf.io,acshi/osf.io,laurenrevere/osf.io,MerlinZhang/osf.io,GageGaskins/osf.io,jnayak1/osf.io,chennan47/osf.io,asanfilippo7/osf.io,mfraezz/osf.io,doublebits/osf.io,adlius/osf.io,binoculars/osf.io,sbt9uc/osf.io,cwisecarver/osf.io,cosenal/osf.io,saradbowman/osf.io,MerlinZhang/osf.io,lyndsysimon/osf.io,cldershem/osf.io,jnayak1/osf.io,dplorimer/osf,danielneis/osf.io,TomBaxter/osf.io,erinspace/osf.io,hmoco/osf.io,mluke93/osf.io,rdhyee/osf.io,aaxelb/osf.io,Nesiehr/osf.io,adlius/osf.io,caneruguz/osf.io,cslzchen/osf.io,Ghalko/osf.io,kwierman/osf.io,petermalcolm/osf.io,cslzchen/osf.io,sloria/osf.io,ckc6cz/osf.io,doublebits/osf.io,ticklemepierce/osf.io,KAsante95/osf.io,rdhyee/osf.io,jolene-esposito/osf.io,kch8qx/osf.io,crcresearch/osf.io,brandonPurvis/osf.io,dplorimer/osf,mattclark/osf.io,chennan47/osf.io,monikagrabowska/osf.io,abought/osf.io,RomanZWang/osf.io,emetsger/osf.io,KAsante95/osf.io,adlius/osf.io,jolene-esposito/osf.io,Nesiehr/osf.io,baylee-d/osf.io,wearpants/osf.io,baylee-d/osf.io,RomanZWang/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,sbt9uc/osf.io,Nesiehr/osf.io,hmoco/osf.io,monikagrabowska/osf.io,caseyrollins/osf.io,icereval/osf.io,abought/osf.io,arpitar/osf.io,brandonPurvis/osf.io,caneruguz/osf.io,crcresearch/osf.io,TomHeatwole/osf.io,Ghalko/osf.io,ckc6cz/osf.io,TomHeatwole/osf.io,erinspace/osf.io,doublebits/osf.io,danielneis/osf.io,doublebits/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io,felliott/osf.io,billyhunt/osf.io,MerlinZhang/osf.io,alexschiller/osf.io,njantrania/osf.io,ticklemepierce/osf.io,billyhunt/osf.io,Johnetordoff/osf.io,cldershem/osf.io,billyhunt/osf.io,petermalcolm/osf.io,arpitar/osf.io,reinaH/osf.io,lyndsysimon/osf.io,pattisdr/osf.io,zamattiac/osf.io,mluke93/osf.io,crcresearch/osf.io,chrisseto/osf.io,felliott/osf.io,chennan47/osf.io,haoyuchen1992/osf.io,jolene-esposito/osf.io,doublebits/osf.io,acshi/osf.io,Johnetordoff/osf.io,ckc6cz/osf.io,wearpants/osf.io,lyndsysimon/osf.io,caseyrollins/osf.io,acshi/osf.io,GageGaskins/osf.io,leb2dg/osf.io,GageGaskins/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,abought/osf.io,Nesiehr/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,adlius/osf.io,emetsger/osf.io,amyshi188/osf.io,aaxelb/osf.io,pattisdr/osf.io,kch8qx/osf.io,jmcarp/osf.io,rdhyee/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,caneruguz/osf.io,aaxelb/osf.io,jnayak1/osf.io,brianjgeiger/osf.io,alexschiller/osf.io,sloria/osf.io,jnayak1/osf.io,zamattiac/osf.io,mluo613/osf.io,reinaH/osf.io,samchrisinger/osf.io,laurenrevere/osf.io,zachjanicki/osf.io,samchrisinger/osf.io,HarryRybacki/osf.io,sbt9uc/osf.io,mluke93/osf.io,chrisseto/osf.io,leb2dg/osf.io,njantrania/osf.io,ZobairAlijan/osf.io,samanehsan/osf.io,chrisseto/osf.io,cwisecarver/osf.io,jolene-esposito/osf.io,njantrania/osf.io,brandonPurvis/osf.io,RomanZWang/osf.io,bdyetton/prettychart,monikagrabowska/osf.io,ZobairAlijan/osf.io,petermalcolm/osf.io,ticklemepierce/osf.io,zachjanicki/osf.io,haoyuchen1992/osf.io,samanehsan/osf.io,hmoco/osf.io,wearpants/osf.io,brianjgeiger/osf.io,kwierman/osf.io,binoculars/osf.io,caseyrygt/osf.io,acshi/osf.io,ckc6cz/osf.io,arpitar/osf.io,mfraezz/osf.io,billyhunt/osf.io,caseyrollins/osf.io,cwisecarver/osf.io,zamattiac/osf.io,TomHeatwole/osf.io,icereval/osf.io,cslzchen/osf.io,haoyuchen1992/osf.io,saradbowman/osf.io,DanielSBrown/osf.io,HarryRybacki/osf.io,billyhunt/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,SSJohns/osf.io,wearpants/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,caseyrygt/osf.io,danielneis/osf.io,KAsante95/osf.io,samchrisinger/osf.io,kch8qx/osf.io,emetsger/osf.io,TomBaxter/osf.io,KAsante95/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,MerlinZhang/osf.io,bdyetton/prettychart,HalcyonChimera/osf.io,petermalcolm/osf.io,cosenal/osf.io,cslzchen/osf.io,jmcarp/osf.io,acshi/osf.io,amyshi188/osf.io,icereval/osf.io,HarryRybacki/osf.io | # -*- coding: utf-8 -*-
from modularodm import fields
from modularodm.validators import (
URLValidator, MinValueValidator, MaxValueValidator
)
from modularodm.exceptions import ValidationValueError
from framework.mongo.utils import sanitized
from website.addons.base import AddonNodeSettingsBase
class ForwardNodeSettings(AddonNodeSettingsBase):
complete = True
url = fields.StringField(validate=URLValidator())
label = fields.StringField(validate=sanitized)
redirect_bool = fields.BooleanField(default=True, validate=True)
redirect_secs = fields.IntegerField(
default=15,
validate=[MinValueValidator(5), MaxValueValidator(60)]
)
@property
def link_text(self):
return self.label if self.label else self.url
@ForwardNodeSettings.subscribe('before_save')
def validate_circular_reference(schema, instance):
"""Prevent node from forwarding to itself."""
if instance.url and instance.owner._id in instance.url:
raise ValidationValueError('Circular URL')
Reset values of external link add-on to default when deleted. | # -*- coding: utf-8 -*-
from modularodm import fields
from modularodm.validators import (
URLValidator, MinValueValidator, MaxValueValidator
)
from modularodm.exceptions import ValidationValueError
from framework.mongo.utils import sanitized
from website.addons.base import AddonNodeSettingsBase
class ForwardNodeSettings(AddonNodeSettingsBase):
complete = True
url = fields.StringField(validate=URLValidator())
label = fields.StringField(validate=sanitized)
redirect_bool = fields.BooleanField(default=True, validate=True)
redirect_secs = fields.IntegerField(
default=15,
validate=[MinValueValidator(5), MaxValueValidator(60)]
)
@property
def link_text(self):
return self.label if self.label else self.url
def on_delete(self):
self.reset()
def reset(self):
self.url = None
self.label = None
self.redirect_bool = True
self.redirect_secs = 15
@ForwardNodeSettings.subscribe('before_save')
def validate_circular_reference(schema, instance):
"""Prevent node from forwarding to itself."""
if instance.url and instance.owner._id in instance.url:
raise ValidationValueError('Circular URL')
| <commit_before># -*- coding: utf-8 -*-
from modularodm import fields
from modularodm.validators import (
URLValidator, MinValueValidator, MaxValueValidator
)
from modularodm.exceptions import ValidationValueError
from framework.mongo.utils import sanitized
from website.addons.base import AddonNodeSettingsBase
class ForwardNodeSettings(AddonNodeSettingsBase):
complete = True
url = fields.StringField(validate=URLValidator())
label = fields.StringField(validate=sanitized)
redirect_bool = fields.BooleanField(default=True, validate=True)
redirect_secs = fields.IntegerField(
default=15,
validate=[MinValueValidator(5), MaxValueValidator(60)]
)
@property
def link_text(self):
return self.label if self.label else self.url
@ForwardNodeSettings.subscribe('before_save')
def validate_circular_reference(schema, instance):
"""Prevent node from forwarding to itself."""
if instance.url and instance.owner._id in instance.url:
raise ValidationValueError('Circular URL')
<commit_msg>Reset values of external link add-on to default when deleted.<commit_after> | # -*- coding: utf-8 -*-
from modularodm import fields
from modularodm.validators import (
URLValidator, MinValueValidator, MaxValueValidator
)
from modularodm.exceptions import ValidationValueError
from framework.mongo.utils import sanitized
from website.addons.base import AddonNodeSettingsBase
class ForwardNodeSettings(AddonNodeSettingsBase):
complete = True
url = fields.StringField(validate=URLValidator())
label = fields.StringField(validate=sanitized)
redirect_bool = fields.BooleanField(default=True, validate=True)
redirect_secs = fields.IntegerField(
default=15,
validate=[MinValueValidator(5), MaxValueValidator(60)]
)
@property
def link_text(self):
return self.label if self.label else self.url
def on_delete(self):
self.reset()
def reset(self):
self.url = None
self.label = None
self.redirect_bool = True
self.redirect_secs = 15
@ForwardNodeSettings.subscribe('before_save')
def validate_circular_reference(schema, instance):
"""Prevent node from forwarding to itself."""
if instance.url and instance.owner._id in instance.url:
raise ValidationValueError('Circular URL')
| # -*- coding: utf-8 -*-
from modularodm import fields
from modularodm.validators import (
URLValidator, MinValueValidator, MaxValueValidator
)
from modularodm.exceptions import ValidationValueError
from framework.mongo.utils import sanitized
from website.addons.base import AddonNodeSettingsBase
class ForwardNodeSettings(AddonNodeSettingsBase):
complete = True
url = fields.StringField(validate=URLValidator())
label = fields.StringField(validate=sanitized)
redirect_bool = fields.BooleanField(default=True, validate=True)
redirect_secs = fields.IntegerField(
default=15,
validate=[MinValueValidator(5), MaxValueValidator(60)]
)
@property
def link_text(self):
return self.label if self.label else self.url
@ForwardNodeSettings.subscribe('before_save')
def validate_circular_reference(schema, instance):
"""Prevent node from forwarding to itself."""
if instance.url and instance.owner._id in instance.url:
raise ValidationValueError('Circular URL')
Reset values of external link add-on to default when deleted.# -*- coding: utf-8 -*-
from modularodm import fields
from modularodm.validators import (
URLValidator, MinValueValidator, MaxValueValidator
)
from modularodm.exceptions import ValidationValueError
from framework.mongo.utils import sanitized
from website.addons.base import AddonNodeSettingsBase
class ForwardNodeSettings(AddonNodeSettingsBase):
complete = True
url = fields.StringField(validate=URLValidator())
label = fields.StringField(validate=sanitized)
redirect_bool = fields.BooleanField(default=True, validate=True)
redirect_secs = fields.IntegerField(
default=15,
validate=[MinValueValidator(5), MaxValueValidator(60)]
)
@property
def link_text(self):
return self.label if self.label else self.url
def on_delete(self):
self.reset()
def reset(self):
self.url = None
self.label = None
self.redirect_bool = True
self.redirect_secs = 15
@ForwardNodeSettings.subscribe('before_save')
def validate_circular_reference(schema, instance):
"""Prevent node from forwarding to itself."""
if instance.url and instance.owner._id in instance.url:
raise ValidationValueError('Circular URL')
| <commit_before># -*- coding: utf-8 -*-
from modularodm import fields
from modularodm.validators import (
URLValidator, MinValueValidator, MaxValueValidator
)
from modularodm.exceptions import ValidationValueError
from framework.mongo.utils import sanitized
from website.addons.base import AddonNodeSettingsBase
class ForwardNodeSettings(AddonNodeSettingsBase):
complete = True
url = fields.StringField(validate=URLValidator())
label = fields.StringField(validate=sanitized)
redirect_bool = fields.BooleanField(default=True, validate=True)
redirect_secs = fields.IntegerField(
default=15,
validate=[MinValueValidator(5), MaxValueValidator(60)]
)
@property
def link_text(self):
return self.label if self.label else self.url
@ForwardNodeSettings.subscribe('before_save')
def validate_circular_reference(schema, instance):
"""Prevent node from forwarding to itself."""
if instance.url and instance.owner._id in instance.url:
raise ValidationValueError('Circular URL')
<commit_msg>Reset values of external link add-on to default when deleted.<commit_after># -*- coding: utf-8 -*-
from modularodm import fields
from modularodm.validators import (
URLValidator, MinValueValidator, MaxValueValidator
)
from modularodm.exceptions import ValidationValueError
from framework.mongo.utils import sanitized
from website.addons.base import AddonNodeSettingsBase
class ForwardNodeSettings(AddonNodeSettingsBase):
complete = True
url = fields.StringField(validate=URLValidator())
label = fields.StringField(validate=sanitized)
redirect_bool = fields.BooleanField(default=True, validate=True)
redirect_secs = fields.IntegerField(
default=15,
validate=[MinValueValidator(5), MaxValueValidator(60)]
)
@property
def link_text(self):
return self.label if self.label else self.url
def on_delete(self):
self.reset()
def reset(self):
self.url = None
self.label = None
self.redirect_bool = True
self.redirect_secs = 15
@ForwardNodeSettings.subscribe('before_save')
def validate_circular_reference(schema, instance):
"""Prevent node from forwarding to itself."""
if instance.url and instance.owner._id in instance.url:
raise ValidationValueError('Circular URL')
|
ac9c8fe7519ff76b4f4002ae8c50e0185fa4bb88 | tools/test_filter.py | tools/test_filter.py | {
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
'bslalg_constructorproxy': [ {'OS': 'AIX', 'library': 'shared_library'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
| {
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
| Remove exception for bslalg_constructorproxy test driver on AIX shared library builds. | Remove exception for bslalg_constructorproxy test driver on AIX shared library builds.
| Python | apache-2.0 | abeels/bde,che2/bde,minhlongdo/bde,bloomberg/bde-allocator-benchmarks,bowlofstew/bde,bloomberg/bde-allocator-benchmarks,jmptrader/bde,abeels/bde,dharesign/bde,frutiger/bde,che2/bde,apaprocki/bde,RMGiroux/bde-allocator-benchmarks,idispatch/bde,gbleaney/Allocator-Benchmarks,bloomberg/bde-allocator-benchmarks,idispatch/bde,che2/bde,gbleaney/Allocator-Benchmarks,osubboo/bde,RMGiroux/bde-allocator-benchmarks,bowlofstew/bde,apaprocki/bde,frutiger/bde,mversche/bde,dbremner/bde,jmptrader/bde,minhlongdo/bde,mversche/bde,idispatch/bde,saxena84/bde,frutiger/bde,bloomberg/bde,abeels/bde,frutiger/bde,che2/bde,apaprocki/bde,gbleaney/Allocator-Benchmarks,bloomberg/bde,minhlongdo/bde,jmptrader/bde,dbremner/bde,bloomberg/bde,minhlongdo/bde,RMGiroux/bde-allocator-benchmarks,saxena84/bde,apaprocki/bde,bloomberg/bde-allocator-benchmarks,osubboo/bde,RMGiroux/bde-allocator-benchmarks,bowlofstew/bde,bloomberg/bde,dbremner/bde,bloomberg/bde,abeels/bde,saxena84/bde,mversche/bde,mversche/bde,dharesign/bde,osubboo/bde,dbremner/bde,gbleaney/Allocator-Benchmarks,bloomberg/bde-allocator-benchmarks,dharesign/bde,osubboo/bde,abeels/bde,jmptrader/bde,idispatch/bde,dharesign/bde,RMGiroux/bde-allocator-benchmarks,apaprocki/bde,saxena84/bde,abeels/bde,bowlofstew/bde | {
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
'bslalg_constructorproxy': [ {'OS': 'AIX', 'library': 'shared_library'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
Remove exception for bslalg_constructorproxy test driver on AIX shared library builds. | {
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
| <commit_before>{
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
'bslalg_constructorproxy': [ {'OS': 'AIX', 'library': 'shared_library'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
<commit_msg>Remove exception for bslalg_constructorproxy test driver on AIX shared library builds.<commit_after> | {
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
| {
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
'bslalg_constructorproxy': [ {'OS': 'AIX', 'library': 'shared_library'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
Remove exception for bslalg_constructorproxy test driver on AIX shared library builds.{
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
| <commit_before>{
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
'bslalg_constructorproxy': [ {'OS': 'AIX', 'library': 'shared_library'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
<commit_msg>Remove exception for bslalg_constructorproxy test driver on AIX shared library builds.<commit_after>{
'bslmf_addreference': [ {'OS': 'Windows'} ],
'bslstl_iteratorutil': [ {'OS': 'SunOS'} ],
'bslstl_unorderedmultiset': [ {'OS': 'SunOS'} ],
'bsls_atomic' : [
{'case': 7, 'HOST': 'VM', 'policy': 'skip' },
{'case': 8, 'HOST': 'VM', 'policy': 'skip' },
],
'bsls_stopwatch' : [ {'case': 6, 'HOST': 'VM', 'policy': 'skip' } ],
}
|
121bcbfc873ce45667ec67bc6f22387b43f3aa52 | openfisca_web_ui/uuidhelpers.py | openfisca_web_ui/uuidhelpers.py | # -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Helpers to handle uuid"""
import uuid
def generate_uuid():
return unicode(uuid.uuid4()).replace('-', '')
| # -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Helpers to handle uuid"""
import uuid
def generate_uuid():
return unicode(uuid.uuid4().hex)
| Use uuid.hex instead of reinventing it. | Use uuid.hex instead of reinventing it.
| Python | agpl-3.0 | openfisca/openfisca-web-ui,openfisca/openfisca-web-ui,openfisca/openfisca-web-ui | # -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Helpers to handle uuid"""
import uuid
def generate_uuid():
return unicode(uuid.uuid4()).replace('-', '')
Use uuid.hex instead of reinventing it. | # -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Helpers to handle uuid"""
import uuid
def generate_uuid():
return unicode(uuid.uuid4().hex)
| <commit_before># -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Helpers to handle uuid"""
import uuid
def generate_uuid():
return unicode(uuid.uuid4()).replace('-', '')
<commit_msg>Use uuid.hex instead of reinventing it.<commit_after> | # -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Helpers to handle uuid"""
import uuid
def generate_uuid():
return unicode(uuid.uuid4().hex)
| # -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Helpers to handle uuid"""
import uuid
def generate_uuid():
return unicode(uuid.uuid4()).replace('-', '')
Use uuid.hex instead of reinventing it.# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Helpers to handle uuid"""
import uuid
def generate_uuid():
return unicode(uuid.uuid4().hex)
| <commit_before># -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Helpers to handle uuid"""
import uuid
def generate_uuid():
return unicode(uuid.uuid4()).replace('-', '')
<commit_msg>Use uuid.hex instead of reinventing it.<commit_after># -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Helpers to handle uuid"""
import uuid
def generate_uuid():
return unicode(uuid.uuid4().hex)
|
2f1423eb57c21938ce85a07e3a3760901f2a852a | games/objects/basescript.py | games/objects/basescript.py |
import datetime
from mongoengine_models import Message
class Script(object):
'''This is a placeholder class used for doing object script things.
It's mostly just used for detecting if an object is really a script or not.
'''
def __init__(self, mongo_engine_object=None):
self.me_obj = mongo_engine_object
print "Initted with %s" % self.me_obj
def say(self, message):
'''Write the given text to the zone message database.'''
Message(sender=self.me_obj.name, body=message, loc=self.me_obj.loc, player_generated=False).save()
print "[%s] %s: %s" % (datetime.datetime.now(), self.me_obj.name, message)
def tick(self):
pass
|
import datetime
from mongoengine_models import Message
import os
import re
import random
from settings import MAX_DICE_AMOUNT
def parse(s):
result = re.search(r'^((?P<rolls>\d+)#)?(?P<dice>\d*)d(?P<sides>\d+)(?P<mod>[+-]\d+)?$', s)
return (result.group('rolls') or 0,
result.group('dice') or 1,
result.group('sides') or 2,
result.group('mod') or 0)
class Script(object):
'''This is a placeholder class used for doing object script things.
It's mostly just used for detecting if an object is really a script or not.
'''
def __init__(self, mongo_engine_object=None):
self.me_obj = mongo_engine_object
print "Initted with %s" % self.me_obj
def roll(self, dicestring):
'''Roll dice specified by dicestring and return the value.
Does not allow more dice than the MAX_DICE_AMOUNT setting
to be rolled at once.'''
rolls, num, sides, mod = parse(dicestring)
num = int(num)
sides = int(sides)
mod = int(mod)
if num > MAX_DICE_AMOUNT:
raise UserWarning("Cannot roll more than %d dice at once." % MAX_DICE_AMOUNT)
rolls = []
for i in xrange(num):
#print "Rolling dice #%d of %d: %f%% done." % (i, num, (float(i)/float(num))*100)
rolls.append(random.randrange(1, int(sides)+1)+int(mod))
return sum(rolls)
def say(self, message):
'''Write the given text to the zone message database.'''
Message(sender=self.me_obj.name, body=message, loc=self.me_obj.loc, player_generated=False).save()
print "[%s] %s: %s" % (datetime.datetime.now(), self.me_obj.name, message)
def tick(self):
pass
| Add support for rolling dice for easy random number generation in scripts. | Add support for rolling dice for easy random number generation in scripts.
| Python | agpl-3.0 | cnelsonsic/SimpleMMO,cnelsonsic/SimpleMMO,cnelsonsic/SimpleMMO |
import datetime
from mongoengine_models import Message
class Script(object):
'''This is a placeholder class used for doing object script things.
It's mostly just used for detecting if an object is really a script or not.
'''
def __init__(self, mongo_engine_object=None):
self.me_obj = mongo_engine_object
print "Initted with %s" % self.me_obj
def say(self, message):
'''Write the given text to the zone message database.'''
Message(sender=self.me_obj.name, body=message, loc=self.me_obj.loc, player_generated=False).save()
print "[%s] %s: %s" % (datetime.datetime.now(), self.me_obj.name, message)
def tick(self):
pass
Add support for rolling dice for easy random number generation in scripts. |
import datetime
from mongoengine_models import Message
import os
import re
import random
from settings import MAX_DICE_AMOUNT
def parse(s):
result = re.search(r'^((?P<rolls>\d+)#)?(?P<dice>\d*)d(?P<sides>\d+)(?P<mod>[+-]\d+)?$', s)
return (result.group('rolls') or 0,
result.group('dice') or 1,
result.group('sides') or 2,
result.group('mod') or 0)
class Script(object):
'''This is a placeholder class used for doing object script things.
It's mostly just used for detecting if an object is really a script or not.
'''
def __init__(self, mongo_engine_object=None):
self.me_obj = mongo_engine_object
print "Initted with %s" % self.me_obj
def roll(self, dicestring):
'''Roll dice specified by dicestring and return the value.
Does not allow more dice than the MAX_DICE_AMOUNT setting
to be rolled at once.'''
rolls, num, sides, mod = parse(dicestring)
num = int(num)
sides = int(sides)
mod = int(mod)
if num > MAX_DICE_AMOUNT:
raise UserWarning("Cannot roll more than %d dice at once." % MAX_DICE_AMOUNT)
rolls = []
for i in xrange(num):
#print "Rolling dice #%d of %d: %f%% done." % (i, num, (float(i)/float(num))*100)
rolls.append(random.randrange(1, int(sides)+1)+int(mod))
return sum(rolls)
def say(self, message):
'''Write the given text to the zone message database.'''
Message(sender=self.me_obj.name, body=message, loc=self.me_obj.loc, player_generated=False).save()
print "[%s] %s: %s" % (datetime.datetime.now(), self.me_obj.name, message)
def tick(self):
pass
| <commit_before>
import datetime
from mongoengine_models import Message
class Script(object):
'''This is a placeholder class used for doing object script things.
It's mostly just used for detecting if an object is really a script or not.
'''
def __init__(self, mongo_engine_object=None):
self.me_obj = mongo_engine_object
print "Initted with %s" % self.me_obj
def say(self, message):
'''Write the given text to the zone message database.'''
Message(sender=self.me_obj.name, body=message, loc=self.me_obj.loc, player_generated=False).save()
print "[%s] %s: %s" % (datetime.datetime.now(), self.me_obj.name, message)
def tick(self):
pass
<commit_msg>Add support for rolling dice for easy random number generation in scripts.<commit_after> |
import datetime
from mongoengine_models import Message
import os
import re
import random
from settings import MAX_DICE_AMOUNT
def parse(s):
result = re.search(r'^((?P<rolls>\d+)#)?(?P<dice>\d*)d(?P<sides>\d+)(?P<mod>[+-]\d+)?$', s)
return (result.group('rolls') or 0,
result.group('dice') or 1,
result.group('sides') or 2,
result.group('mod') or 0)
class Script(object):
'''This is a placeholder class used for doing object script things.
It's mostly just used for detecting if an object is really a script or not.
'''
def __init__(self, mongo_engine_object=None):
self.me_obj = mongo_engine_object
print "Initted with %s" % self.me_obj
def roll(self, dicestring):
'''Roll dice specified by dicestring and return the value.
Does not allow more dice than the MAX_DICE_AMOUNT setting
to be rolled at once.'''
rolls, num, sides, mod = parse(dicestring)
num = int(num)
sides = int(sides)
mod = int(mod)
if num > MAX_DICE_AMOUNT:
raise UserWarning("Cannot roll more than %d dice at once." % MAX_DICE_AMOUNT)
rolls = []
for i in xrange(num):
#print "Rolling dice #%d of %d: %f%% done." % (i, num, (float(i)/float(num))*100)
rolls.append(random.randrange(1, int(sides)+1)+int(mod))
return sum(rolls)
def say(self, message):
'''Write the given text to the zone message database.'''
Message(sender=self.me_obj.name, body=message, loc=self.me_obj.loc, player_generated=False).save()
print "[%s] %s: %s" % (datetime.datetime.now(), self.me_obj.name, message)
def tick(self):
pass
|
import datetime
from mongoengine_models import Message
class Script(object):
'''This is a placeholder class used for doing object script things.
It's mostly just used for detecting if an object is really a script or not.
'''
def __init__(self, mongo_engine_object=None):
self.me_obj = mongo_engine_object
print "Initted with %s" % self.me_obj
def say(self, message):
'''Write the given text to the zone message database.'''
Message(sender=self.me_obj.name, body=message, loc=self.me_obj.loc, player_generated=False).save()
print "[%s] %s: %s" % (datetime.datetime.now(), self.me_obj.name, message)
def tick(self):
pass
Add support for rolling dice for easy random number generation in scripts.
import datetime
from mongoengine_models import Message
import os
import re
import random
from settings import MAX_DICE_AMOUNT
def parse(s):
result = re.search(r'^((?P<rolls>\d+)#)?(?P<dice>\d*)d(?P<sides>\d+)(?P<mod>[+-]\d+)?$', s)
return (result.group('rolls') or 0,
result.group('dice') or 1,
result.group('sides') or 2,
result.group('mod') or 0)
class Script(object):
'''This is a placeholder class used for doing object script things.
It's mostly just used for detecting if an object is really a script or not.
'''
def __init__(self, mongo_engine_object=None):
self.me_obj = mongo_engine_object
print "Initted with %s" % self.me_obj
def roll(self, dicestring):
'''Roll dice specified by dicestring and return the value.
Does not allow more dice than the MAX_DICE_AMOUNT setting
to be rolled at once.'''
rolls, num, sides, mod = parse(dicestring)
num = int(num)
sides = int(sides)
mod = int(mod)
if num > MAX_DICE_AMOUNT:
raise UserWarning("Cannot roll more than %d dice at once." % MAX_DICE_AMOUNT)
rolls = []
for i in xrange(num):
#print "Rolling dice #%d of %d: %f%% done." % (i, num, (float(i)/float(num))*100)
rolls.append(random.randrange(1, int(sides)+1)+int(mod))
return sum(rolls)
def say(self, message):
'''Write the given text to the zone message database.'''
Message(sender=self.me_obj.name, body=message, loc=self.me_obj.loc, player_generated=False).save()
print "[%s] %s: %s" % (datetime.datetime.now(), self.me_obj.name, message)
def tick(self):
pass
| <commit_before>
import datetime
from mongoengine_models import Message
class Script(object):
'''This is a placeholder class used for doing object script things.
It's mostly just used for detecting if an object is really a script or not.
'''
def __init__(self, mongo_engine_object=None):
self.me_obj = mongo_engine_object
print "Initted with %s" % self.me_obj
def say(self, message):
'''Write the given text to the zone message database.'''
Message(sender=self.me_obj.name, body=message, loc=self.me_obj.loc, player_generated=False).save()
print "[%s] %s: %s" % (datetime.datetime.now(), self.me_obj.name, message)
def tick(self):
pass
<commit_msg>Add support for rolling dice for easy random number generation in scripts.<commit_after>
import datetime
from mongoengine_models import Message
import os
import re
import random
from settings import MAX_DICE_AMOUNT
def parse(s):
result = re.search(r'^((?P<rolls>\d+)#)?(?P<dice>\d*)d(?P<sides>\d+)(?P<mod>[+-]\d+)?$', s)
return (result.group('rolls') or 0,
result.group('dice') or 1,
result.group('sides') or 2,
result.group('mod') or 0)
class Script(object):
'''This is a placeholder class used for doing object script things.
It's mostly just used for detecting if an object is really a script or not.
'''
def __init__(self, mongo_engine_object=None):
self.me_obj = mongo_engine_object
print "Initted with %s" % self.me_obj
def roll(self, dicestring):
'''Roll dice specified by dicestring and return the value.
Does not allow more dice than the MAX_DICE_AMOUNT setting
to be rolled at once.'''
rolls, num, sides, mod = parse(dicestring)
num = int(num)
sides = int(sides)
mod = int(mod)
if num > MAX_DICE_AMOUNT:
raise UserWarning("Cannot roll more than %d dice at once." % MAX_DICE_AMOUNT)
rolls = []
for i in xrange(num):
#print "Rolling dice #%d of %d: %f%% done." % (i, num, (float(i)/float(num))*100)
rolls.append(random.randrange(1, int(sides)+1)+int(mod))
return sum(rolls)
def say(self, message):
'''Write the given text to the zone message database.'''
Message(sender=self.me_obj.name, body=message, loc=self.me_obj.loc, player_generated=False).save()
print "[%s] %s: %s" % (datetime.datetime.now(), self.me_obj.name, message)
def tick(self):
pass
|
bc19d4d7d2181ee402aafcbb064070852151063c | IPython/html/tests/test_notebookapp.py | IPython/html/tests/test_notebookapp.py | """Test NotebookApp"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import nose.tools as nt
import IPython.testing.tools as tt
#-----------------------------------------------------------------------------
# Test functions
#-----------------------------------------------------------------------------
def test_help_output():
"""ipython notebook --help-all works"""
tt.help_all_output_test('notebook')
| """Test NotebookApp"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import nose.tools as nt
import IPython.testing.tools as tt
from IPython.html import notebookapp
#-----------------------------------------------------------------------------
# Test functions
#-----------------------------------------------------------------------------
def test_help_output():
"""ipython notebook --help-all works"""
tt.help_all_output_test('notebook')
def test_server_info_file():
nbapp = notebookapp.NotebookApp(profile='nbserver_file_test')
def get_servers():
return list(notebookapp.discover_running_servers(profile='nbserver_file_test'))
nbapp.initialize(argv=[])
nbapp.write_server_info_file()
servers = get_servers()
nt.assert_equal(len(servers), 1)
nt.assert_equal(servers[0]['port'], nbapp.port)
nt.assert_equal(servers[0]['url'], nbapp.connection_url)
nbapp.remove_server_info_file()
nt.assert_equal(get_servers(), [])
# The ENOENT error should be silenced.
nbapp.remove_server_info_file() | Test for writing and removing server info files | Test for writing and removing server info files
| Python | bsd-3-clause | ipython/ipython,ipython/ipython | """Test NotebookApp"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import nose.tools as nt
import IPython.testing.tools as tt
#-----------------------------------------------------------------------------
# Test functions
#-----------------------------------------------------------------------------
def test_help_output():
"""ipython notebook --help-all works"""
tt.help_all_output_test('notebook')
Test for writing and removing server info files | """Test NotebookApp"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import nose.tools as nt
import IPython.testing.tools as tt
from IPython.html import notebookapp
#-----------------------------------------------------------------------------
# Test functions
#-----------------------------------------------------------------------------
def test_help_output():
"""ipython notebook --help-all works"""
tt.help_all_output_test('notebook')
def test_server_info_file():
nbapp = notebookapp.NotebookApp(profile='nbserver_file_test')
def get_servers():
return list(notebookapp.discover_running_servers(profile='nbserver_file_test'))
nbapp.initialize(argv=[])
nbapp.write_server_info_file()
servers = get_servers()
nt.assert_equal(len(servers), 1)
nt.assert_equal(servers[0]['port'], nbapp.port)
nt.assert_equal(servers[0]['url'], nbapp.connection_url)
nbapp.remove_server_info_file()
nt.assert_equal(get_servers(), [])
# The ENOENT error should be silenced.
nbapp.remove_server_info_file() | <commit_before>"""Test NotebookApp"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import nose.tools as nt
import IPython.testing.tools as tt
#-----------------------------------------------------------------------------
# Test functions
#-----------------------------------------------------------------------------
def test_help_output():
"""ipython notebook --help-all works"""
tt.help_all_output_test('notebook')
<commit_msg>Test for writing and removing server info files<commit_after> | """Test NotebookApp"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import nose.tools as nt
import IPython.testing.tools as tt
from IPython.html import notebookapp
#-----------------------------------------------------------------------------
# Test functions
#-----------------------------------------------------------------------------
def test_help_output():
"""ipython notebook --help-all works"""
tt.help_all_output_test('notebook')
def test_server_info_file():
nbapp = notebookapp.NotebookApp(profile='nbserver_file_test')
def get_servers():
return list(notebookapp.discover_running_servers(profile='nbserver_file_test'))
nbapp.initialize(argv=[])
nbapp.write_server_info_file()
servers = get_servers()
nt.assert_equal(len(servers), 1)
nt.assert_equal(servers[0]['port'], nbapp.port)
nt.assert_equal(servers[0]['url'], nbapp.connection_url)
nbapp.remove_server_info_file()
nt.assert_equal(get_servers(), [])
# The ENOENT error should be silenced.
nbapp.remove_server_info_file() | """Test NotebookApp"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import nose.tools as nt
import IPython.testing.tools as tt
#-----------------------------------------------------------------------------
# Test functions
#-----------------------------------------------------------------------------
def test_help_output():
"""ipython notebook --help-all works"""
tt.help_all_output_test('notebook')
Test for writing and removing server info files"""Test NotebookApp"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import nose.tools as nt
import IPython.testing.tools as tt
from IPython.html import notebookapp
#-----------------------------------------------------------------------------
# Test functions
#-----------------------------------------------------------------------------
def test_help_output():
"""ipython notebook --help-all works"""
tt.help_all_output_test('notebook')
def test_server_info_file():
nbapp = notebookapp.NotebookApp(profile='nbserver_file_test')
def get_servers():
return list(notebookapp.discover_running_servers(profile='nbserver_file_test'))
nbapp.initialize(argv=[])
nbapp.write_server_info_file()
servers = get_servers()
nt.assert_equal(len(servers), 1)
nt.assert_equal(servers[0]['port'], nbapp.port)
nt.assert_equal(servers[0]['url'], nbapp.connection_url)
nbapp.remove_server_info_file()
nt.assert_equal(get_servers(), [])
# The ENOENT error should be silenced.
nbapp.remove_server_info_file() | <commit_before>"""Test NotebookApp"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import nose.tools as nt
import IPython.testing.tools as tt
#-----------------------------------------------------------------------------
# Test functions
#-----------------------------------------------------------------------------
def test_help_output():
"""ipython notebook --help-all works"""
tt.help_all_output_test('notebook')
<commit_msg>Test for writing and removing server info files<commit_after>"""Test NotebookApp"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import nose.tools as nt
import IPython.testing.tools as tt
from IPython.html import notebookapp
#-----------------------------------------------------------------------------
# Test functions
#-----------------------------------------------------------------------------
def test_help_output():
"""ipython notebook --help-all works"""
tt.help_all_output_test('notebook')
def test_server_info_file():
nbapp = notebookapp.NotebookApp(profile='nbserver_file_test')
def get_servers():
return list(notebookapp.discover_running_servers(profile='nbserver_file_test'))
nbapp.initialize(argv=[])
nbapp.write_server_info_file()
servers = get_servers()
nt.assert_equal(len(servers), 1)
nt.assert_equal(servers[0]['port'], nbapp.port)
nt.assert_equal(servers[0]['url'], nbapp.connection_url)
nbapp.remove_server_info_file()
nt.assert_equal(get_servers(), [])
# The ENOENT error should be silenced.
nbapp.remove_server_info_file() |
53ce60063c8a308cbbe08eddd264dd1e30c93615 | jarbas/core/tests/test_loaddatasets_command.py | jarbas/core/tests/test_loaddatasets_command.py | from django.test import TestCase
from jarbas.core.management.commands.loaddatasets import Command
class TestSerializer(TestCase):
def setUp(self):
self.command = Command()
def test_force_int(self):
self.assertEqual(self.command.force_int('1'), 1)
self.assertEqual(self.command.force_int('1.0'), 1)
with self.assertRaises(ValueError):
self.command.force_int('abc')
| from django.test import TestCase
from jarbas.core.management.commands.loaddatasets import Command
class TestSerializer(TestCase):
def setUp(self):
self.command = Command()
def test_force_int(self):
self.assertEqual(self.command.force_int('1'), 1)
self.assertEqual(self.command.force_int('1.0'), 1)
with self.assertRaises(ValueError):
self.command.force_int('abc')
def test_serializer(self):
expected = {
'document_id': 1,
'congressperson_id': 1,
'congressperson_document': 1,
'term': 1,
'term_id': 1,
'subquota_number': 1,
'subquota_group_id': 1,
'document_type': 1,
'month': 1,
'year': 1,
'installment': 1,
'batch_number': 1,
'reimbursement_number': 1,
'applicant_id': 1,
'document_value': 1.1,
'remark_value': 1.1,
'net_value': 1.1,
'reimbursement_value': 1.1,
'issue_date': None,
}
document = {
'document_id': '1',
'congressperson_id': '1',
'congressperson_document': '1',
'term': '1',
'term_id': '1',
'subquota_number': '1',
'subquota_group_id': '1',
'document_type': '1',
'month': '1',
'year': '1',
'installment': '1',
'batch_number': '1',
'reimbursement_number': '1',
'applicant_id': '1',
'document_value': '1.1',
'remark_value': '1.1',
'net_value': '1.1',
'reimbursement_value': '1.1',
'issue_date': '',
}
self.assertEqual(self.command.serialize(document), expected)
| Add loaddatasets serializer tests for real | Add loaddatasets serializer tests for real
| Python | mit | marcusrehm/serenata-de-amor,rogeriochaves/jarbas,datasciencebr/serenata-de-amor,rogeriochaves/jarbas,marcusrehm/serenata-de-amor,rogeriochaves/jarbas,Guilhermeslucas/jarbas,datasciencebr/jarbas,rogeriochaves/jarbas,datasciencebr/serenata-de-amor,Guilhermeslucas/jarbas,datasciencebr/jarbas,Guilhermeslucas/jarbas,datasciencebr/jarbas,datasciencebr/jarbas,marcusrehm/serenata-de-amor,marcusrehm/serenata-de-amor,Guilhermeslucas/jarbas | from django.test import TestCase
from jarbas.core.management.commands.loaddatasets import Command
class TestSerializer(TestCase):
def setUp(self):
self.command = Command()
def test_force_int(self):
self.assertEqual(self.command.force_int('1'), 1)
self.assertEqual(self.command.force_int('1.0'), 1)
with self.assertRaises(ValueError):
self.command.force_int('abc')
Add loaddatasets serializer tests for real | from django.test import TestCase
from jarbas.core.management.commands.loaddatasets import Command
class TestSerializer(TestCase):
def setUp(self):
self.command = Command()
def test_force_int(self):
self.assertEqual(self.command.force_int('1'), 1)
self.assertEqual(self.command.force_int('1.0'), 1)
with self.assertRaises(ValueError):
self.command.force_int('abc')
def test_serializer(self):
expected = {
'document_id': 1,
'congressperson_id': 1,
'congressperson_document': 1,
'term': 1,
'term_id': 1,
'subquota_number': 1,
'subquota_group_id': 1,
'document_type': 1,
'month': 1,
'year': 1,
'installment': 1,
'batch_number': 1,
'reimbursement_number': 1,
'applicant_id': 1,
'document_value': 1.1,
'remark_value': 1.1,
'net_value': 1.1,
'reimbursement_value': 1.1,
'issue_date': None,
}
document = {
'document_id': '1',
'congressperson_id': '1',
'congressperson_document': '1',
'term': '1',
'term_id': '1',
'subquota_number': '1',
'subquota_group_id': '1',
'document_type': '1',
'month': '1',
'year': '1',
'installment': '1',
'batch_number': '1',
'reimbursement_number': '1',
'applicant_id': '1',
'document_value': '1.1',
'remark_value': '1.1',
'net_value': '1.1',
'reimbursement_value': '1.1',
'issue_date': '',
}
self.assertEqual(self.command.serialize(document), expected)
| <commit_before>from django.test import TestCase
from jarbas.core.management.commands.loaddatasets import Command
class TestSerializer(TestCase):
def setUp(self):
self.command = Command()
def test_force_int(self):
self.assertEqual(self.command.force_int('1'), 1)
self.assertEqual(self.command.force_int('1.0'), 1)
with self.assertRaises(ValueError):
self.command.force_int('abc')
<commit_msg>Add loaddatasets serializer tests for real<commit_after> | from django.test import TestCase
from jarbas.core.management.commands.loaddatasets import Command
class TestSerializer(TestCase):
def setUp(self):
self.command = Command()
def test_force_int(self):
self.assertEqual(self.command.force_int('1'), 1)
self.assertEqual(self.command.force_int('1.0'), 1)
with self.assertRaises(ValueError):
self.command.force_int('abc')
def test_serializer(self):
expected = {
'document_id': 1,
'congressperson_id': 1,
'congressperson_document': 1,
'term': 1,
'term_id': 1,
'subquota_number': 1,
'subquota_group_id': 1,
'document_type': 1,
'month': 1,
'year': 1,
'installment': 1,
'batch_number': 1,
'reimbursement_number': 1,
'applicant_id': 1,
'document_value': 1.1,
'remark_value': 1.1,
'net_value': 1.1,
'reimbursement_value': 1.1,
'issue_date': None,
}
document = {
'document_id': '1',
'congressperson_id': '1',
'congressperson_document': '1',
'term': '1',
'term_id': '1',
'subquota_number': '1',
'subquota_group_id': '1',
'document_type': '1',
'month': '1',
'year': '1',
'installment': '1',
'batch_number': '1',
'reimbursement_number': '1',
'applicant_id': '1',
'document_value': '1.1',
'remark_value': '1.1',
'net_value': '1.1',
'reimbursement_value': '1.1',
'issue_date': '',
}
self.assertEqual(self.command.serialize(document), expected)
| from django.test import TestCase
from jarbas.core.management.commands.loaddatasets import Command
class TestSerializer(TestCase):
def setUp(self):
self.command = Command()
def test_force_int(self):
self.assertEqual(self.command.force_int('1'), 1)
self.assertEqual(self.command.force_int('1.0'), 1)
with self.assertRaises(ValueError):
self.command.force_int('abc')
Add loaddatasets serializer tests for realfrom django.test import TestCase
from jarbas.core.management.commands.loaddatasets import Command
class TestSerializer(TestCase):
def setUp(self):
self.command = Command()
def test_force_int(self):
self.assertEqual(self.command.force_int('1'), 1)
self.assertEqual(self.command.force_int('1.0'), 1)
with self.assertRaises(ValueError):
self.command.force_int('abc')
def test_serializer(self):
expected = {
'document_id': 1,
'congressperson_id': 1,
'congressperson_document': 1,
'term': 1,
'term_id': 1,
'subquota_number': 1,
'subquota_group_id': 1,
'document_type': 1,
'month': 1,
'year': 1,
'installment': 1,
'batch_number': 1,
'reimbursement_number': 1,
'applicant_id': 1,
'document_value': 1.1,
'remark_value': 1.1,
'net_value': 1.1,
'reimbursement_value': 1.1,
'issue_date': None,
}
document = {
'document_id': '1',
'congressperson_id': '1',
'congressperson_document': '1',
'term': '1',
'term_id': '1',
'subquota_number': '1',
'subquota_group_id': '1',
'document_type': '1',
'month': '1',
'year': '1',
'installment': '1',
'batch_number': '1',
'reimbursement_number': '1',
'applicant_id': '1',
'document_value': '1.1',
'remark_value': '1.1',
'net_value': '1.1',
'reimbursement_value': '1.1',
'issue_date': '',
}
self.assertEqual(self.command.serialize(document), expected)
| <commit_before>from django.test import TestCase
from jarbas.core.management.commands.loaddatasets import Command
class TestSerializer(TestCase):
def setUp(self):
self.command = Command()
def test_force_int(self):
self.assertEqual(self.command.force_int('1'), 1)
self.assertEqual(self.command.force_int('1.0'), 1)
with self.assertRaises(ValueError):
self.command.force_int('abc')
<commit_msg>Add loaddatasets serializer tests for real<commit_after>from django.test import TestCase
from jarbas.core.management.commands.loaddatasets import Command
class TestSerializer(TestCase):
def setUp(self):
self.command = Command()
def test_force_int(self):
self.assertEqual(self.command.force_int('1'), 1)
self.assertEqual(self.command.force_int('1.0'), 1)
with self.assertRaises(ValueError):
self.command.force_int('abc')
def test_serializer(self):
expected = {
'document_id': 1,
'congressperson_id': 1,
'congressperson_document': 1,
'term': 1,
'term_id': 1,
'subquota_number': 1,
'subquota_group_id': 1,
'document_type': 1,
'month': 1,
'year': 1,
'installment': 1,
'batch_number': 1,
'reimbursement_number': 1,
'applicant_id': 1,
'document_value': 1.1,
'remark_value': 1.1,
'net_value': 1.1,
'reimbursement_value': 1.1,
'issue_date': None,
}
document = {
'document_id': '1',
'congressperson_id': '1',
'congressperson_document': '1',
'term': '1',
'term_id': '1',
'subquota_number': '1',
'subquota_group_id': '1',
'document_type': '1',
'month': '1',
'year': '1',
'installment': '1',
'batch_number': '1',
'reimbursement_number': '1',
'applicant_id': '1',
'document_value': '1.1',
'remark_value': '1.1',
'net_value': '1.1',
'reimbursement_value': '1.1',
'issue_date': '',
}
self.assertEqual(self.command.serialize(document), expected)
|
d91d16a53ee01cf384187dcda28f1c4ab6e46e1b | astropy/io/misc/asdf/tags/time/tests/test_timedelta.py | astropy/io/misc/asdf/tags/time/tests/test_timedelta.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time, TimeDelta
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
@pytest.mark.parametrize('scale', list(TimeDelta.SCALES) + [None])
def test_timedetal_scales(scale, tmpdir):
tree = dict(timedelta=TimeDelta(0.125, scale=scale))
assert_roundtrip_tree(tree, tmpdir)
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time, TimeDelta
@pytest.mark.parametrize('fmt', TimeDelta.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now())
t2 = Time(Time.now())
td = TimeDelta(t2 - t1, format=fmt)
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
@pytest.mark.parametrize('scale', list(TimeDelta.SCALES) + [None])
def test_timedetal_scales(scale, tmpdir):
tree = dict(timedelta=TimeDelta(0.125, scale=scale))
assert_roundtrip_tree(tree, tmpdir)
| Fix the ASDF test for TimeDelta formats | Fix the ASDF test for TimeDelta formats
| Python | bsd-3-clause | larrybradley/astropy,MSeifert04/astropy,saimn/astropy,bsipocz/astropy,stargaser/astropy,larrybradley/astropy,dhomeier/astropy,saimn/astropy,bsipocz/astropy,mhvk/astropy,saimn/astropy,aleksandr-bakanov/astropy,pllim/astropy,mhvk/astropy,stargaser/astropy,MSeifert04/astropy,pllim/astropy,MSeifert04/astropy,astropy/astropy,MSeifert04/astropy,mhvk/astropy,lpsinger/astropy,StuartLittlefair/astropy,StuartLittlefair/astropy,pllim/astropy,stargaser/astropy,mhvk/astropy,mhvk/astropy,aleksandr-bakanov/astropy,bsipocz/astropy,dhomeier/astropy,lpsinger/astropy,saimn/astropy,StuartLittlefair/astropy,lpsinger/astropy,bsipocz/astropy,pllim/astropy,larrybradley/astropy,aleksandr-bakanov/astropy,lpsinger/astropy,dhomeier/astropy,astropy/astropy,saimn/astropy,aleksandr-bakanov/astropy,larrybradley/astropy,StuartLittlefair/astropy,astropy/astropy,larrybradley/astropy,astropy/astropy,pllim/astropy,StuartLittlefair/astropy,dhomeier/astropy,stargaser/astropy,astropy/astropy,dhomeier/astropy,lpsinger/astropy | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time, TimeDelta
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
@pytest.mark.parametrize('scale', list(TimeDelta.SCALES) + [None])
def test_timedetal_scales(scale, tmpdir):
tree = dict(timedelta=TimeDelta(0.125, scale=scale))
assert_roundtrip_tree(tree, tmpdir)
Fix the ASDF test for TimeDelta formats | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time, TimeDelta
@pytest.mark.parametrize('fmt', TimeDelta.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now())
t2 = Time(Time.now())
td = TimeDelta(t2 - t1, format=fmt)
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
@pytest.mark.parametrize('scale', list(TimeDelta.SCALES) + [None])
def test_timedetal_scales(scale, tmpdir):
tree = dict(timedelta=TimeDelta(0.125, scale=scale))
assert_roundtrip_tree(tree, tmpdir)
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time, TimeDelta
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
@pytest.mark.parametrize('scale', list(TimeDelta.SCALES) + [None])
def test_timedetal_scales(scale, tmpdir):
tree = dict(timedelta=TimeDelta(0.125, scale=scale))
assert_roundtrip_tree(tree, tmpdir)
<commit_msg>Fix the ASDF test for TimeDelta formats<commit_after> | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time, TimeDelta
@pytest.mark.parametrize('fmt', TimeDelta.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now())
t2 = Time(Time.now())
td = TimeDelta(t2 - t1, format=fmt)
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
@pytest.mark.parametrize('scale', list(TimeDelta.SCALES) + [None])
def test_timedetal_scales(scale, tmpdir):
tree = dict(timedelta=TimeDelta(0.125, scale=scale))
assert_roundtrip_tree(tree, tmpdir)
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time, TimeDelta
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
@pytest.mark.parametrize('scale', list(TimeDelta.SCALES) + [None])
def test_timedetal_scales(scale, tmpdir):
tree = dict(timedelta=TimeDelta(0.125, scale=scale))
assert_roundtrip_tree(tree, tmpdir)
Fix the ASDF test for TimeDelta formats# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time, TimeDelta
@pytest.mark.parametrize('fmt', TimeDelta.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now())
t2 = Time(Time.now())
td = TimeDelta(t2 - t1, format=fmt)
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
@pytest.mark.parametrize('scale', list(TimeDelta.SCALES) + [None])
def test_timedetal_scales(scale, tmpdir):
tree = dict(timedelta=TimeDelta(0.125, scale=scale))
assert_roundtrip_tree(tree, tmpdir)
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time, TimeDelta
@pytest.mark.parametrize('fmt', Time.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now(), format=fmt)
t2 = Time(Time.now(), format=fmt)
td = t2 - t1
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
@pytest.mark.parametrize('scale', list(TimeDelta.SCALES) + [None])
def test_timedetal_scales(scale, tmpdir):
tree = dict(timedelta=TimeDelta(0.125, scale=scale))
assert_roundtrip_tree(tree, tmpdir)
<commit_msg>Fix the ASDF test for TimeDelta formats<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import pytest
asdf = pytest.importorskip('asdf')
from asdf.tests.helpers import assert_roundtrip_tree
from astropy.time import Time, TimeDelta
@pytest.mark.parametrize('fmt', TimeDelta.FORMATS.keys())
def test_timedelta(fmt, tmpdir):
t1 = Time(Time.now())
t2 = Time(Time.now())
td = TimeDelta(t2 - t1, format=fmt)
tree = dict(timedelta=td)
assert_roundtrip_tree(tree, tmpdir)
@pytest.mark.parametrize('scale', list(TimeDelta.SCALES) + [None])
def test_timedetal_scales(scale, tmpdir):
tree = dict(timedelta=TimeDelta(0.125, scale=scale))
assert_roundtrip_tree(tree, tmpdir)
|
63a32acb6e2f9aadec015361f04283999f75be79 | examples/app/localmodule.py | examples/app/localmodule.py | def install_module(app):
"""Installs this localmodule."""
install_module
| class IndexResource(object):
def on_get(self, req, res):
res.body = 'Hello. This is app.'
def install_module(app):
"""Installs this localmodule."""
app.api.add_route('/', IndexResource())
| Add IndexResource to example module. | Add IndexResource to example module.
| Python | apache-2.0 | slinghq/sling | def install_module(app):
"""Installs this localmodule."""
install_module
Add IndexResource to example module. | class IndexResource(object):
def on_get(self, req, res):
res.body = 'Hello. This is app.'
def install_module(app):
"""Installs this localmodule."""
app.api.add_route('/', IndexResource())
| <commit_before>def install_module(app):
"""Installs this localmodule."""
install_module
<commit_msg>Add IndexResource to example module.<commit_after> | class IndexResource(object):
def on_get(self, req, res):
res.body = 'Hello. This is app.'
def install_module(app):
"""Installs this localmodule."""
app.api.add_route('/', IndexResource())
| def install_module(app):
"""Installs this localmodule."""
install_module
Add IndexResource to example module.class IndexResource(object):
def on_get(self, req, res):
res.body = 'Hello. This is app.'
def install_module(app):
"""Installs this localmodule."""
app.api.add_route('/', IndexResource())
| <commit_before>def install_module(app):
"""Installs this localmodule."""
install_module
<commit_msg>Add IndexResource to example module.<commit_after>class IndexResource(object):
def on_get(self, req, res):
res.body = 'Hello. This is app.'
def install_module(app):
"""Installs this localmodule."""
app.api.add_route('/', IndexResource())
|
7b7f626c54694ec72166094ad568254ecfcdce8a | strictyaml/__init__.py | strictyaml/__init__.py | # The all important loader
from strictyaml.parser import load
# Validators
from strictyaml.validators import Optional
from strictyaml.validators import Validator
from strictyaml.validators import OrValidator
from strictyaml.validators import Any
from strictyaml.validators import Scalar
from strictyaml.validators import Enum
from strictyaml.validators import Str
from strictyaml.validators import Int
from strictyaml.validators import Bool
from strictyaml.validators import Float
from strictyaml.validators import Decimal
from strictyaml.validators import Map
from strictyaml.validators import MapPattern
from strictyaml.validators import Seq
from strictyaml.validators import UniqueSeq
# Exceptions
from strictyaml.exceptions import StrictYAMLError
# Validaton
from strictyaml.exceptions import YAMLValidationError
# Disallowed token exceptions
from strictyaml.exceptions import DisallowedToken
from strictyaml.exceptions import TagTokenDisallowed
from strictyaml.exceptions import FlowMappingDisallowed
from strictyaml.exceptions import AnchorTokenDisallowed | # The all important loader
from strictyaml.parser import load
# Validators
from strictyaml.validators import Optional
from strictyaml.validators import Validator
from strictyaml.validators import OrValidator
from strictyaml.validators import Any
from strictyaml.validators import Scalar
from strictyaml.validators import Enum
from strictyaml.validators import Str
from strictyaml.validators import Int
from strictyaml.validators import Bool
from strictyaml.validators import Float
from strictyaml.validators import Decimal
from strictyaml.validators import Map
from strictyaml.validators import MapPattern
from strictyaml.validators import Seq
from strictyaml.validators import UniqueSeq
# Exceptions
from strictyaml.exceptions import YAMLError
from strictyaml.exceptions import StrictYAMLError
from strictyaml.exceptions import YAMLValidationError
# Disallowed token exceptions
from strictyaml.exceptions import DisallowedToken
from strictyaml.exceptions import TagTokenDisallowed
from strictyaml.exceptions import FlowMappingDisallowed
from strictyaml.exceptions import AnchorTokenDisallowed | REFACTOR : Import YAMLError so it is usable as a generic exception. | REFACTOR : Import YAMLError so it is usable as a generic exception.
| Python | mit | crdoconnor/strictyaml,crdoconnor/strictyaml | # The all important loader
from strictyaml.parser import load
# Validators
from strictyaml.validators import Optional
from strictyaml.validators import Validator
from strictyaml.validators import OrValidator
from strictyaml.validators import Any
from strictyaml.validators import Scalar
from strictyaml.validators import Enum
from strictyaml.validators import Str
from strictyaml.validators import Int
from strictyaml.validators import Bool
from strictyaml.validators import Float
from strictyaml.validators import Decimal
from strictyaml.validators import Map
from strictyaml.validators import MapPattern
from strictyaml.validators import Seq
from strictyaml.validators import UniqueSeq
# Exceptions
from strictyaml.exceptions import StrictYAMLError
# Validaton
from strictyaml.exceptions import YAMLValidationError
# Disallowed token exceptions
from strictyaml.exceptions import DisallowedToken
from strictyaml.exceptions import TagTokenDisallowed
from strictyaml.exceptions import FlowMappingDisallowed
from strictyaml.exceptions import AnchorTokenDisallowedREFACTOR : Import YAMLError so it is usable as a generic exception. | # The all important loader
from strictyaml.parser import load
# Validators
from strictyaml.validators import Optional
from strictyaml.validators import Validator
from strictyaml.validators import OrValidator
from strictyaml.validators import Any
from strictyaml.validators import Scalar
from strictyaml.validators import Enum
from strictyaml.validators import Str
from strictyaml.validators import Int
from strictyaml.validators import Bool
from strictyaml.validators import Float
from strictyaml.validators import Decimal
from strictyaml.validators import Map
from strictyaml.validators import MapPattern
from strictyaml.validators import Seq
from strictyaml.validators import UniqueSeq
# Exceptions
from strictyaml.exceptions import YAMLError
from strictyaml.exceptions import StrictYAMLError
from strictyaml.exceptions import YAMLValidationError
# Disallowed token exceptions
from strictyaml.exceptions import DisallowedToken
from strictyaml.exceptions import TagTokenDisallowed
from strictyaml.exceptions import FlowMappingDisallowed
from strictyaml.exceptions import AnchorTokenDisallowed | <commit_before># The all important loader
from strictyaml.parser import load
# Validators
from strictyaml.validators import Optional
from strictyaml.validators import Validator
from strictyaml.validators import OrValidator
from strictyaml.validators import Any
from strictyaml.validators import Scalar
from strictyaml.validators import Enum
from strictyaml.validators import Str
from strictyaml.validators import Int
from strictyaml.validators import Bool
from strictyaml.validators import Float
from strictyaml.validators import Decimal
from strictyaml.validators import Map
from strictyaml.validators import MapPattern
from strictyaml.validators import Seq
from strictyaml.validators import UniqueSeq
# Exceptions
from strictyaml.exceptions import StrictYAMLError
# Validaton
from strictyaml.exceptions import YAMLValidationError
# Disallowed token exceptions
from strictyaml.exceptions import DisallowedToken
from strictyaml.exceptions import TagTokenDisallowed
from strictyaml.exceptions import FlowMappingDisallowed
from strictyaml.exceptions import AnchorTokenDisallowed<commit_msg>REFACTOR : Import YAMLError so it is usable as a generic exception.<commit_after> | # The all important loader
from strictyaml.parser import load
# Validators
from strictyaml.validators import Optional
from strictyaml.validators import Validator
from strictyaml.validators import OrValidator
from strictyaml.validators import Any
from strictyaml.validators import Scalar
from strictyaml.validators import Enum
from strictyaml.validators import Str
from strictyaml.validators import Int
from strictyaml.validators import Bool
from strictyaml.validators import Float
from strictyaml.validators import Decimal
from strictyaml.validators import Map
from strictyaml.validators import MapPattern
from strictyaml.validators import Seq
from strictyaml.validators import UniqueSeq
# Exceptions
from strictyaml.exceptions import YAMLError
from strictyaml.exceptions import StrictYAMLError
from strictyaml.exceptions import YAMLValidationError
# Disallowed token exceptions
from strictyaml.exceptions import DisallowedToken
from strictyaml.exceptions import TagTokenDisallowed
from strictyaml.exceptions import FlowMappingDisallowed
from strictyaml.exceptions import AnchorTokenDisallowed | # The all important loader
from strictyaml.parser import load
# Validators
from strictyaml.validators import Optional
from strictyaml.validators import Validator
from strictyaml.validators import OrValidator
from strictyaml.validators import Any
from strictyaml.validators import Scalar
from strictyaml.validators import Enum
from strictyaml.validators import Str
from strictyaml.validators import Int
from strictyaml.validators import Bool
from strictyaml.validators import Float
from strictyaml.validators import Decimal
from strictyaml.validators import Map
from strictyaml.validators import MapPattern
from strictyaml.validators import Seq
from strictyaml.validators import UniqueSeq
# Exceptions
from strictyaml.exceptions import StrictYAMLError
# Validaton
from strictyaml.exceptions import YAMLValidationError
# Disallowed token exceptions
from strictyaml.exceptions import DisallowedToken
from strictyaml.exceptions import TagTokenDisallowed
from strictyaml.exceptions import FlowMappingDisallowed
from strictyaml.exceptions import AnchorTokenDisallowedREFACTOR : Import YAMLError so it is usable as a generic exception.# The all important loader
from strictyaml.parser import load
# Validators
from strictyaml.validators import Optional
from strictyaml.validators import Validator
from strictyaml.validators import OrValidator
from strictyaml.validators import Any
from strictyaml.validators import Scalar
from strictyaml.validators import Enum
from strictyaml.validators import Str
from strictyaml.validators import Int
from strictyaml.validators import Bool
from strictyaml.validators import Float
from strictyaml.validators import Decimal
from strictyaml.validators import Map
from strictyaml.validators import MapPattern
from strictyaml.validators import Seq
from strictyaml.validators import UniqueSeq
# Exceptions
from strictyaml.exceptions import YAMLError
from strictyaml.exceptions import StrictYAMLError
from strictyaml.exceptions import YAMLValidationError
# Disallowed token exceptions
from strictyaml.exceptions import DisallowedToken
from strictyaml.exceptions import TagTokenDisallowed
from strictyaml.exceptions import FlowMappingDisallowed
from strictyaml.exceptions import AnchorTokenDisallowed | <commit_before># The all important loader
from strictyaml.parser import load
# Validators
from strictyaml.validators import Optional
from strictyaml.validators import Validator
from strictyaml.validators import OrValidator
from strictyaml.validators import Any
from strictyaml.validators import Scalar
from strictyaml.validators import Enum
from strictyaml.validators import Str
from strictyaml.validators import Int
from strictyaml.validators import Bool
from strictyaml.validators import Float
from strictyaml.validators import Decimal
from strictyaml.validators import Map
from strictyaml.validators import MapPattern
from strictyaml.validators import Seq
from strictyaml.validators import UniqueSeq
# Exceptions
from strictyaml.exceptions import StrictYAMLError
# Validaton
from strictyaml.exceptions import YAMLValidationError
# Disallowed token exceptions
from strictyaml.exceptions import DisallowedToken
from strictyaml.exceptions import TagTokenDisallowed
from strictyaml.exceptions import FlowMappingDisallowed
from strictyaml.exceptions import AnchorTokenDisallowed<commit_msg>REFACTOR : Import YAMLError so it is usable as a generic exception.<commit_after># The all important loader
from strictyaml.parser import load
# Validators
from strictyaml.validators import Optional
from strictyaml.validators import Validator
from strictyaml.validators import OrValidator
from strictyaml.validators import Any
from strictyaml.validators import Scalar
from strictyaml.validators import Enum
from strictyaml.validators import Str
from strictyaml.validators import Int
from strictyaml.validators import Bool
from strictyaml.validators import Float
from strictyaml.validators import Decimal
from strictyaml.validators import Map
from strictyaml.validators import MapPattern
from strictyaml.validators import Seq
from strictyaml.validators import UniqueSeq
# Exceptions
from strictyaml.exceptions import YAMLError
from strictyaml.exceptions import StrictYAMLError
from strictyaml.exceptions import YAMLValidationError
# Disallowed token exceptions
from strictyaml.exceptions import DisallowedToken
from strictyaml.exceptions import TagTokenDisallowed
from strictyaml.exceptions import FlowMappingDisallowed
from strictyaml.exceptions import AnchorTokenDisallowed |
a4e9198194e08b99e11802dd260bd5c203179211 | Mollie/API/Object/Customer.py | Mollie/API/Object/Customer.py | from .Base import Base
class Customer(Base):
@property
def id(self):
return self.getProperty('id')
@property
def name(self):
return self.getProperty('name')
@property
def email(self):
return self.getProperty('email')
@property
def locale(self):
return self.getProperty('locale')
@property
def metadata(self):
if 'metadata' not in self:
return None
return self.getProperty('metadata')
@property
def mode(self):
return self.getProperty('mode')
@property
def resource(self):
return self.getProperty('resource')
@property
def createdAt(self):
return self.getProperty('createdAt')
| from .Base import Base
class Customer(Base):
@property
def id(self):
return self.getProperty('id')
@property
def name(self):
return self.getProperty('name')
@property
def email(self):
return self.getProperty('email')
@property
def locale(self):
return self.getProperty('locale')
@property
def metadata(self):
return self.getProperty('metadata')
@property
def mode(self):
return self.getProperty('mode')
@property
def resource(self):
return self.getProperty('resource')
@property
def createdAt(self):
return self.getProperty('createdAt')
| Remove forgotten if in customer.py | Remove forgotten if in customer.py
| Python | bsd-2-clause | mollie/mollie-api-python | from .Base import Base
class Customer(Base):
@property
def id(self):
return self.getProperty('id')
@property
def name(self):
return self.getProperty('name')
@property
def email(self):
return self.getProperty('email')
@property
def locale(self):
return self.getProperty('locale')
@property
def metadata(self):
if 'metadata' not in self:
return None
return self.getProperty('metadata')
@property
def mode(self):
return self.getProperty('mode')
@property
def resource(self):
return self.getProperty('resource')
@property
def createdAt(self):
return self.getProperty('createdAt')
Remove forgotten if in customer.py | from .Base import Base
class Customer(Base):
@property
def id(self):
return self.getProperty('id')
@property
def name(self):
return self.getProperty('name')
@property
def email(self):
return self.getProperty('email')
@property
def locale(self):
return self.getProperty('locale')
@property
def metadata(self):
return self.getProperty('metadata')
@property
def mode(self):
return self.getProperty('mode')
@property
def resource(self):
return self.getProperty('resource')
@property
def createdAt(self):
return self.getProperty('createdAt')
| <commit_before>from .Base import Base
class Customer(Base):
@property
def id(self):
return self.getProperty('id')
@property
def name(self):
return self.getProperty('name')
@property
def email(self):
return self.getProperty('email')
@property
def locale(self):
return self.getProperty('locale')
@property
def metadata(self):
if 'metadata' not in self:
return None
return self.getProperty('metadata')
@property
def mode(self):
return self.getProperty('mode')
@property
def resource(self):
return self.getProperty('resource')
@property
def createdAt(self):
return self.getProperty('createdAt')
<commit_msg>Remove forgotten if in customer.py<commit_after> | from .Base import Base
class Customer(Base):
@property
def id(self):
return self.getProperty('id')
@property
def name(self):
return self.getProperty('name')
@property
def email(self):
return self.getProperty('email')
@property
def locale(self):
return self.getProperty('locale')
@property
def metadata(self):
return self.getProperty('metadata')
@property
def mode(self):
return self.getProperty('mode')
@property
def resource(self):
return self.getProperty('resource')
@property
def createdAt(self):
return self.getProperty('createdAt')
| from .Base import Base
class Customer(Base):
@property
def id(self):
return self.getProperty('id')
@property
def name(self):
return self.getProperty('name')
@property
def email(self):
return self.getProperty('email')
@property
def locale(self):
return self.getProperty('locale')
@property
def metadata(self):
if 'metadata' not in self:
return None
return self.getProperty('metadata')
@property
def mode(self):
return self.getProperty('mode')
@property
def resource(self):
return self.getProperty('resource')
@property
def createdAt(self):
return self.getProperty('createdAt')
Remove forgotten if in customer.pyfrom .Base import Base
class Customer(Base):
@property
def id(self):
return self.getProperty('id')
@property
def name(self):
return self.getProperty('name')
@property
def email(self):
return self.getProperty('email')
@property
def locale(self):
return self.getProperty('locale')
@property
def metadata(self):
return self.getProperty('metadata')
@property
def mode(self):
return self.getProperty('mode')
@property
def resource(self):
return self.getProperty('resource')
@property
def createdAt(self):
return self.getProperty('createdAt')
| <commit_before>from .Base import Base
class Customer(Base):
@property
def id(self):
return self.getProperty('id')
@property
def name(self):
return self.getProperty('name')
@property
def email(self):
return self.getProperty('email')
@property
def locale(self):
return self.getProperty('locale')
@property
def metadata(self):
if 'metadata' not in self:
return None
return self.getProperty('metadata')
@property
def mode(self):
return self.getProperty('mode')
@property
def resource(self):
return self.getProperty('resource')
@property
def createdAt(self):
return self.getProperty('createdAt')
<commit_msg>Remove forgotten if in customer.py<commit_after>from .Base import Base
class Customer(Base):
@property
def id(self):
return self.getProperty('id')
@property
def name(self):
return self.getProperty('name')
@property
def email(self):
return self.getProperty('email')
@property
def locale(self):
return self.getProperty('locale')
@property
def metadata(self):
return self.getProperty('metadata')
@property
def mode(self):
return self.getProperty('mode')
@property
def resource(self):
return self.getProperty('resource')
@property
def createdAt(self):
return self.getProperty('createdAt')
|
38b739941f9d002e8ad345388ff41418fe4160a4 | flexget/plugins/input/sonarr.py | flexget/plugins/input/sonarr.py | from __future__ import unicode_literals, division, absolute_import
import logging
import requests
from flexget import plugin
from flexget.event import event
from flexget.entry import Entry
log = logging.getLogger('sonarr')
class Sonarr(object):
schema = {
'type': 'object',
'properties': {
'base_url': {'type': 'string'},
'port': {'type': 'number'},
'api_key': {'type':'string'}
},
'required': ['api_key', 'base_url', 'port'],
'additionalProperties': False
}
def on_task_input(self, task, config):
url = '%s:%s/api/series' % (config['base_url'], config['port'])
headers = {'X-Api-Key': config['api_key']}
json = task.requests.get(url, headers=headers).json()
entries = []
for show in json:
showName = show['title']
entry = Entry(title=showName,
url = '',
series_name=showName)
if entry.isvalid():
entries.append(entry)
else:
log.debug('Invalid entry created? %s' % entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(Sonarr, 'sonarr', api_ver=2)
| from __future__ import unicode_literals, division, absolute_import
import logging
import requests
from flexget import plugin
from flexget.event import event
from flexget.entry import Entry
log = logging.getLogger('sonarr')
class Sonarr(object):
schema = {
'type': 'object',
'properties': {
'base_url': {'type': 'string'},
'port': {'type': 'number'},
'api_key': {'type': 'string'}
},
'required': ['api_key', 'base_url', 'port'],
'additionalProperties': False
}
def on_task_input(self, task, config):
url = '%s:%s/api/series' % (config['base_url'], config['port'])
headers = {'X-Api-Key': config['api_key']}
json = task.requests.get(url, headers=headers).json()
entries = []
for show in json:
entry = Entry(title=show['title'],
url='',
series_name=show['title'],
tvdb_id=show['tvdbId'],
tvrage_id=show['tvRageId'])
if entry.isvalid():
entries.append(entry)
else:
log.debug('Invalid entry created? %s' % entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(Sonarr, 'sonarr', api_ver=2)
| Fix PEP8 and add more data | Fix PEP8 and add more data
Added tvdb_id and tvrage_id entries to returned data
| Python | mit | grrr2/Flexget,ianstalk/Flexget,thalamus/Flexget,Pretagonist/Flexget,jacobmetrick/Flexget,ratoaq2/Flexget,offbyone/Flexget,oxc/Flexget,grrr2/Flexget,ibrahimkarahan/Flexget,crawln45/Flexget,sean797/Flexget,gazpachoking/Flexget,Flexget/Flexget,qk4l/Flexget,ianstalk/Flexget,offbyone/Flexget,xfouloux/Flexget,antivirtel/Flexget,JorisDeRieck/Flexget,antivirtel/Flexget,cvium/Flexget,Danfocus/Flexget,LynxyssCZ/Flexget,Flexget/Flexget,Danfocus/Flexget,grrr2/Flexget,jawilson/Flexget,oxc/Flexget,malkavi/Flexget,cvium/Flexget,tarzasai/Flexget,tobinjt/Flexget,jawilson/Flexget,malkavi/Flexget,dsemi/Flexget,offbyone/Flexget,qk4l/Flexget,LynxyssCZ/Flexget,JorisDeRieck/Flexget,thalamus/Flexget,jacobmetrick/Flexget,patsissons/Flexget,malkavi/Flexget,drwyrm/Flexget,spencerjanssen/Flexget,OmgOhnoes/Flexget,sean797/Flexget,ibrahimkarahan/Flexget,jacobmetrick/Flexget,spencerjanssen/Flexget,oxc/Flexget,xfouloux/Flexget,tobinjt/Flexget,tarzasai/Flexget,ratoaq2/Flexget,jawilson/Flexget,sean797/Flexget,crawln45/Flexget,Danfocus/Flexget,Pretagonist/Flexget,lildadou/Flexget,poulpito/Flexget,drwyrm/Flexget,antivirtel/Flexget,patsissons/Flexget,OmgOhnoes/Flexget,Flexget/Flexget,gazpachoking/Flexget,Danfocus/Flexget,JorisDeRieck/Flexget,Flexget/Flexget,malkavi/Flexget,ZefQ/Flexget,tsnoam/Flexget,xfouloux/Flexget,qk4l/Flexget,tarzasai/Flexget,tobinjt/Flexget,ibrahimkarahan/Flexget,Pretagonist/Flexget,lildadou/Flexget,tobinjt/Flexget,tsnoam/Flexget,qvazzler/Flexget,crawln45/Flexget,cvium/Flexget,patsissons/Flexget,thalamus/Flexget,spencerjanssen/Flexget,OmgOhnoes/Flexget,ZefQ/Flexget,crawln45/Flexget,poulpito/Flexget,LynxyssCZ/Flexget,drwyrm/Flexget,ZefQ/Flexget,ianstalk/Flexget,dsemi/Flexget,poulpito/Flexget,tsnoam/Flexget,dsemi/Flexget,LynxyssCZ/Flexget,JorisDeRieck/Flexget,qvazzler/Flexget,lildadou/Flexget,ratoaq2/Flexget,jawilson/Flexget,qvazzler/Flexget | from __future__ import unicode_literals, division, absolute_import
import logging
import requests
from flexget import plugin
from flexget.event import event
from flexget.entry import Entry
log = logging.getLogger('sonarr')
class Sonarr(object):
schema = {
'type': 'object',
'properties': {
'base_url': {'type': 'string'},
'port': {'type': 'number'},
'api_key': {'type':'string'}
},
'required': ['api_key', 'base_url', 'port'],
'additionalProperties': False
}
def on_task_input(self, task, config):
url = '%s:%s/api/series' % (config['base_url'], config['port'])
headers = {'X-Api-Key': config['api_key']}
json = task.requests.get(url, headers=headers).json()
entries = []
for show in json:
showName = show['title']
entry = Entry(title=showName,
url = '',
series_name=showName)
if entry.isvalid():
entries.append(entry)
else:
log.debug('Invalid entry created? %s' % entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(Sonarr, 'sonarr', api_ver=2)
Fix PEP8 and add more data
Added tvdb_id and tvrage_id entries to returned data | from __future__ import unicode_literals, division, absolute_import
import logging
import requests
from flexget import plugin
from flexget.event import event
from flexget.entry import Entry
log = logging.getLogger('sonarr')
class Sonarr(object):
schema = {
'type': 'object',
'properties': {
'base_url': {'type': 'string'},
'port': {'type': 'number'},
'api_key': {'type': 'string'}
},
'required': ['api_key', 'base_url', 'port'],
'additionalProperties': False
}
def on_task_input(self, task, config):
url = '%s:%s/api/series' % (config['base_url'], config['port'])
headers = {'X-Api-Key': config['api_key']}
json = task.requests.get(url, headers=headers).json()
entries = []
for show in json:
entry = Entry(title=show['title'],
url='',
series_name=show['title'],
tvdb_id=show['tvdbId'],
tvrage_id=show['tvRageId'])
if entry.isvalid():
entries.append(entry)
else:
log.debug('Invalid entry created? %s' % entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(Sonarr, 'sonarr', api_ver=2)
| <commit_before>from __future__ import unicode_literals, division, absolute_import
import logging
import requests
from flexget import plugin
from flexget.event import event
from flexget.entry import Entry
log = logging.getLogger('sonarr')
class Sonarr(object):
schema = {
'type': 'object',
'properties': {
'base_url': {'type': 'string'},
'port': {'type': 'number'},
'api_key': {'type':'string'}
},
'required': ['api_key', 'base_url', 'port'],
'additionalProperties': False
}
def on_task_input(self, task, config):
url = '%s:%s/api/series' % (config['base_url'], config['port'])
headers = {'X-Api-Key': config['api_key']}
json = task.requests.get(url, headers=headers).json()
entries = []
for show in json:
showName = show['title']
entry = Entry(title=showName,
url = '',
series_name=showName)
if entry.isvalid():
entries.append(entry)
else:
log.debug('Invalid entry created? %s' % entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(Sonarr, 'sonarr', api_ver=2)
<commit_msg>Fix PEP8 and add more data
Added tvdb_id and tvrage_id entries to returned data<commit_after> | from __future__ import unicode_literals, division, absolute_import
import logging
import requests
from flexget import plugin
from flexget.event import event
from flexget.entry import Entry
log = logging.getLogger('sonarr')
class Sonarr(object):
schema = {
'type': 'object',
'properties': {
'base_url': {'type': 'string'},
'port': {'type': 'number'},
'api_key': {'type': 'string'}
},
'required': ['api_key', 'base_url', 'port'],
'additionalProperties': False
}
def on_task_input(self, task, config):
url = '%s:%s/api/series' % (config['base_url'], config['port'])
headers = {'X-Api-Key': config['api_key']}
json = task.requests.get(url, headers=headers).json()
entries = []
for show in json:
entry = Entry(title=show['title'],
url='',
series_name=show['title'],
tvdb_id=show['tvdbId'],
tvrage_id=show['tvRageId'])
if entry.isvalid():
entries.append(entry)
else:
log.debug('Invalid entry created? %s' % entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(Sonarr, 'sonarr', api_ver=2)
| from __future__ import unicode_literals, division, absolute_import
import logging
import requests
from flexget import plugin
from flexget.event import event
from flexget.entry import Entry
log = logging.getLogger('sonarr')
class Sonarr(object):
schema = {
'type': 'object',
'properties': {
'base_url': {'type': 'string'},
'port': {'type': 'number'},
'api_key': {'type':'string'}
},
'required': ['api_key', 'base_url', 'port'],
'additionalProperties': False
}
def on_task_input(self, task, config):
url = '%s:%s/api/series' % (config['base_url'], config['port'])
headers = {'X-Api-Key': config['api_key']}
json = task.requests.get(url, headers=headers).json()
entries = []
for show in json:
showName = show['title']
entry = Entry(title=showName,
url = '',
series_name=showName)
if entry.isvalid():
entries.append(entry)
else:
log.debug('Invalid entry created? %s' % entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(Sonarr, 'sonarr', api_ver=2)
Fix PEP8 and add more data
Added tvdb_id and tvrage_id entries to returned datafrom __future__ import unicode_literals, division, absolute_import
import logging
import requests
from flexget import plugin
from flexget.event import event
from flexget.entry import Entry
log = logging.getLogger('sonarr')
class Sonarr(object):
schema = {
'type': 'object',
'properties': {
'base_url': {'type': 'string'},
'port': {'type': 'number'},
'api_key': {'type': 'string'}
},
'required': ['api_key', 'base_url', 'port'],
'additionalProperties': False
}
def on_task_input(self, task, config):
url = '%s:%s/api/series' % (config['base_url'], config['port'])
headers = {'X-Api-Key': config['api_key']}
json = task.requests.get(url, headers=headers).json()
entries = []
for show in json:
entry = Entry(title=show['title'],
url='',
series_name=show['title'],
tvdb_id=show['tvdbId'],
tvrage_id=show['tvRageId'])
if entry.isvalid():
entries.append(entry)
else:
log.debug('Invalid entry created? %s' % entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(Sonarr, 'sonarr', api_ver=2)
| <commit_before>from __future__ import unicode_literals, division, absolute_import
import logging
import requests
from flexget import plugin
from flexget.event import event
from flexget.entry import Entry
log = logging.getLogger('sonarr')
class Sonarr(object):
schema = {
'type': 'object',
'properties': {
'base_url': {'type': 'string'},
'port': {'type': 'number'},
'api_key': {'type':'string'}
},
'required': ['api_key', 'base_url', 'port'],
'additionalProperties': False
}
def on_task_input(self, task, config):
url = '%s:%s/api/series' % (config['base_url'], config['port'])
headers = {'X-Api-Key': config['api_key']}
json = task.requests.get(url, headers=headers).json()
entries = []
for show in json:
showName = show['title']
entry = Entry(title=showName,
url = '',
series_name=showName)
if entry.isvalid():
entries.append(entry)
else:
log.debug('Invalid entry created? %s' % entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(Sonarr, 'sonarr', api_ver=2)
<commit_msg>Fix PEP8 and add more data
Added tvdb_id and tvrage_id entries to returned data<commit_after>from __future__ import unicode_literals, division, absolute_import
import logging
import requests
from flexget import plugin
from flexget.event import event
from flexget.entry import Entry
log = logging.getLogger('sonarr')
class Sonarr(object):
schema = {
'type': 'object',
'properties': {
'base_url': {'type': 'string'},
'port': {'type': 'number'},
'api_key': {'type': 'string'}
},
'required': ['api_key', 'base_url', 'port'],
'additionalProperties': False
}
def on_task_input(self, task, config):
url = '%s:%s/api/series' % (config['base_url'], config['port'])
headers = {'X-Api-Key': config['api_key']}
json = task.requests.get(url, headers=headers).json()
entries = []
for show in json:
entry = Entry(title=show['title'],
url='',
series_name=show['title'],
tvdb_id=show['tvdbId'],
tvrage_id=show['tvRageId'])
if entry.isvalid():
entries.append(entry)
else:
log.debug('Invalid entry created? %s' % entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(Sonarr, 'sonarr', api_ver=2)
|
e6f85eb50ea1de37ba0f2c4ad75997a9da3879a0 | changes/api/jobphase_index.py | changes/api/jobphase_index.py | from __future__ import absolute_import
from flask import Response
from sqlalchemy.orm import joinedload, subqueryload_all
from changes.api.base import APIView
from changes.api.serializer.models.jobphase import JobPhaseWithStepsSerializer
from changes.models import Job, JobPhase, JobStep
class JobPhaseIndexAPIView(APIView):
def get(self, job_id):
job = Job.query.options(
subqueryload_all(Job.phases),
joinedload(Job.project),
joinedload(Job.author),
).get(job_id)
if job is None:
return Response(status=404)
phase_list = list(JobPhase.query.options(
subqueryload_all(JobPhase.steps, JobStep.node),
).filter(
JobPhase.job_id == job.id,
))
return self.respond(self.serialize(phase_list, {
JobPhase: JobPhaseWithStepsSerializer(),
}))
def get_stream_channels(self, job_id):
return [
'jobs:{0}'.format(job_id),
'testgroups:{0}:*'.format(job_id),
'logsources:{0}:*'.format(job_id),
]
| from __future__ import absolute_import
from flask import Response
from sqlalchemy.orm import joinedload, subqueryload_all
from changes.api.base import APIView
from changes.api.serializer.models.jobphase import JobPhaseWithStepsSerializer
from changes.models import Job, JobPhase, JobStep
class JobPhaseIndexAPIView(APIView):
def get(self, job_id):
job = Job.query.options(
subqueryload_all(Job.phases),
joinedload(Job.project),
joinedload(Job.author),
).get(job_id)
if job is None:
return Response(status=404)
phase_list = list(JobPhase.query.options(
subqueryload_all(JobPhase.steps, JobStep.node),
).filter(
JobPhase.job_id == job.id,
).order_by(JobPhase.date_started.asc(), JobPhase.date_created.asc()))
return self.respond(self.serialize(phase_list, {
JobPhase: JobPhaseWithStepsSerializer(),
}))
def get_stream_channels(self, job_id):
return [
'jobs:{0}'.format(job_id),
'testgroups:{0}:*'.format(job_id),
'logsources:{0}:*'.format(job_id),
]
| Order JobPhase by date started, date created | Order JobPhase by date started, date created
| Python | apache-2.0 | bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes | from __future__ import absolute_import
from flask import Response
from sqlalchemy.orm import joinedload, subqueryload_all
from changes.api.base import APIView
from changes.api.serializer.models.jobphase import JobPhaseWithStepsSerializer
from changes.models import Job, JobPhase, JobStep
class JobPhaseIndexAPIView(APIView):
def get(self, job_id):
job = Job.query.options(
subqueryload_all(Job.phases),
joinedload(Job.project),
joinedload(Job.author),
).get(job_id)
if job is None:
return Response(status=404)
phase_list = list(JobPhase.query.options(
subqueryload_all(JobPhase.steps, JobStep.node),
).filter(
JobPhase.job_id == job.id,
))
return self.respond(self.serialize(phase_list, {
JobPhase: JobPhaseWithStepsSerializer(),
}))
def get_stream_channels(self, job_id):
return [
'jobs:{0}'.format(job_id),
'testgroups:{0}:*'.format(job_id),
'logsources:{0}:*'.format(job_id),
]
Order JobPhase by date started, date created | from __future__ import absolute_import
from flask import Response
from sqlalchemy.orm import joinedload, subqueryload_all
from changes.api.base import APIView
from changes.api.serializer.models.jobphase import JobPhaseWithStepsSerializer
from changes.models import Job, JobPhase, JobStep
class JobPhaseIndexAPIView(APIView):
def get(self, job_id):
job = Job.query.options(
subqueryload_all(Job.phases),
joinedload(Job.project),
joinedload(Job.author),
).get(job_id)
if job is None:
return Response(status=404)
phase_list = list(JobPhase.query.options(
subqueryload_all(JobPhase.steps, JobStep.node),
).filter(
JobPhase.job_id == job.id,
).order_by(JobPhase.date_started.asc(), JobPhase.date_created.asc()))
return self.respond(self.serialize(phase_list, {
JobPhase: JobPhaseWithStepsSerializer(),
}))
def get_stream_channels(self, job_id):
return [
'jobs:{0}'.format(job_id),
'testgroups:{0}:*'.format(job_id),
'logsources:{0}:*'.format(job_id),
]
| <commit_before>from __future__ import absolute_import
from flask import Response
from sqlalchemy.orm import joinedload, subqueryload_all
from changes.api.base import APIView
from changes.api.serializer.models.jobphase import JobPhaseWithStepsSerializer
from changes.models import Job, JobPhase, JobStep
class JobPhaseIndexAPIView(APIView):
def get(self, job_id):
job = Job.query.options(
subqueryload_all(Job.phases),
joinedload(Job.project),
joinedload(Job.author),
).get(job_id)
if job is None:
return Response(status=404)
phase_list = list(JobPhase.query.options(
subqueryload_all(JobPhase.steps, JobStep.node),
).filter(
JobPhase.job_id == job.id,
))
return self.respond(self.serialize(phase_list, {
JobPhase: JobPhaseWithStepsSerializer(),
}))
def get_stream_channels(self, job_id):
return [
'jobs:{0}'.format(job_id),
'testgroups:{0}:*'.format(job_id),
'logsources:{0}:*'.format(job_id),
]
<commit_msg>Order JobPhase by date started, date created<commit_after> | from __future__ import absolute_import
from flask import Response
from sqlalchemy.orm import joinedload, subqueryload_all
from changes.api.base import APIView
from changes.api.serializer.models.jobphase import JobPhaseWithStepsSerializer
from changes.models import Job, JobPhase, JobStep
class JobPhaseIndexAPIView(APIView):
def get(self, job_id):
job = Job.query.options(
subqueryload_all(Job.phases),
joinedload(Job.project),
joinedload(Job.author),
).get(job_id)
if job is None:
return Response(status=404)
phase_list = list(JobPhase.query.options(
subqueryload_all(JobPhase.steps, JobStep.node),
).filter(
JobPhase.job_id == job.id,
).order_by(JobPhase.date_started.asc(), JobPhase.date_created.asc()))
return self.respond(self.serialize(phase_list, {
JobPhase: JobPhaseWithStepsSerializer(),
}))
def get_stream_channels(self, job_id):
return [
'jobs:{0}'.format(job_id),
'testgroups:{0}:*'.format(job_id),
'logsources:{0}:*'.format(job_id),
]
| from __future__ import absolute_import
from flask import Response
from sqlalchemy.orm import joinedload, subqueryload_all
from changes.api.base import APIView
from changes.api.serializer.models.jobphase import JobPhaseWithStepsSerializer
from changes.models import Job, JobPhase, JobStep
class JobPhaseIndexAPIView(APIView):
def get(self, job_id):
job = Job.query.options(
subqueryload_all(Job.phases),
joinedload(Job.project),
joinedload(Job.author),
).get(job_id)
if job is None:
return Response(status=404)
phase_list = list(JobPhase.query.options(
subqueryload_all(JobPhase.steps, JobStep.node),
).filter(
JobPhase.job_id == job.id,
))
return self.respond(self.serialize(phase_list, {
JobPhase: JobPhaseWithStepsSerializer(),
}))
def get_stream_channels(self, job_id):
return [
'jobs:{0}'.format(job_id),
'testgroups:{0}:*'.format(job_id),
'logsources:{0}:*'.format(job_id),
]
Order JobPhase by date started, date createdfrom __future__ import absolute_import
from flask import Response
from sqlalchemy.orm import joinedload, subqueryload_all
from changes.api.base import APIView
from changes.api.serializer.models.jobphase import JobPhaseWithStepsSerializer
from changes.models import Job, JobPhase, JobStep
class JobPhaseIndexAPIView(APIView):
def get(self, job_id):
job = Job.query.options(
subqueryload_all(Job.phases),
joinedload(Job.project),
joinedload(Job.author),
).get(job_id)
if job is None:
return Response(status=404)
phase_list = list(JobPhase.query.options(
subqueryload_all(JobPhase.steps, JobStep.node),
).filter(
JobPhase.job_id == job.id,
).order_by(JobPhase.date_started.asc(), JobPhase.date_created.asc()))
return self.respond(self.serialize(phase_list, {
JobPhase: JobPhaseWithStepsSerializer(),
}))
def get_stream_channels(self, job_id):
return [
'jobs:{0}'.format(job_id),
'testgroups:{0}:*'.format(job_id),
'logsources:{0}:*'.format(job_id),
]
| <commit_before>from __future__ import absolute_import
from flask import Response
from sqlalchemy.orm import joinedload, subqueryload_all
from changes.api.base import APIView
from changes.api.serializer.models.jobphase import JobPhaseWithStepsSerializer
from changes.models import Job, JobPhase, JobStep
class JobPhaseIndexAPIView(APIView):
def get(self, job_id):
job = Job.query.options(
subqueryload_all(Job.phases),
joinedload(Job.project),
joinedload(Job.author),
).get(job_id)
if job is None:
return Response(status=404)
phase_list = list(JobPhase.query.options(
subqueryload_all(JobPhase.steps, JobStep.node),
).filter(
JobPhase.job_id == job.id,
))
return self.respond(self.serialize(phase_list, {
JobPhase: JobPhaseWithStepsSerializer(),
}))
def get_stream_channels(self, job_id):
return [
'jobs:{0}'.format(job_id),
'testgroups:{0}:*'.format(job_id),
'logsources:{0}:*'.format(job_id),
]
<commit_msg>Order JobPhase by date started, date created<commit_after>from __future__ import absolute_import
from flask import Response
from sqlalchemy.orm import joinedload, subqueryload_all
from changes.api.base import APIView
from changes.api.serializer.models.jobphase import JobPhaseWithStepsSerializer
from changes.models import Job, JobPhase, JobStep
class JobPhaseIndexAPIView(APIView):
def get(self, job_id):
job = Job.query.options(
subqueryload_all(Job.phases),
joinedload(Job.project),
joinedload(Job.author),
).get(job_id)
if job is None:
return Response(status=404)
phase_list = list(JobPhase.query.options(
subqueryload_all(JobPhase.steps, JobStep.node),
).filter(
JobPhase.job_id == job.id,
).order_by(JobPhase.date_started.asc(), JobPhase.date_created.asc()))
return self.respond(self.serialize(phase_list, {
JobPhase: JobPhaseWithStepsSerializer(),
}))
def get_stream_channels(self, job_id):
return [
'jobs:{0}'.format(job_id),
'testgroups:{0}:*'.format(job_id),
'logsources:{0}:*'.format(job_id),
]
|
202f47bb5903786b0c6a09ea4e27ed558938d2da | dramadive/button_app.py | dramadive/button_app.py | #!/usr/bin/env python3
import os
from select import select
from sys import exit, stderr
from time import sleep
WRITE_BYTES = [0x0 for b in range(0, 8)]
WRITE_BYTES[0] = 0x08
WRITE_BYTES[7] = 0x02
CLOSED = 0x15
OPEN = 0x17
DOWN = 0x16
def main():
fd_id = os.open('/dev/big_red_button', os.O_RDWR|os.O_NONBLOCK)
fd = os.fdopen(fd_id, 'rb+')
last = None
return_code = 0
try:
while True:
data = fd.write(bytes(WRITE_BYTES))
data = fd.read(8)
if data is not None and data[0] != last:
if data[0] == CLOSED:
print('closed')
elif last == DOWN and data[0] == OPEN:
print('up')
elif data[0] == OPEN:
print('open')
elif data[0] == DOWN:
print('down')
last = data[0]
sleep(0.1)
except KeyboardInterrupt:
return_code = 0
except Exception as e:
print(e, file=stderr)
return_code = 1
finally:
fd.close()
return return_code
if __name__ == '__main__':
exit(main())
| #!/usr/bin/env python3
import os
import requests
from select import select
from sys import exit, stderr
from time import sleep
WRITE_BYTES = [0x0 for b in range(0, 8)]
WRITE_BYTES[0] = 0x08
WRITE_BYTES[7] = 0x02
CLOSED = 0x15
OPEN = 0x17
DOWN = 0x16
def main():
fd_id = os.open('/dev/big_red_button', os.O_RDWR|os.O_NONBLOCK)
fd = os.fdopen(fd_id, 'rb+')
last = None
return_code = 0
try:
while True:
data = fd.write(bytes(WRITE_BYTES))
data = fd.read(8)
if data is not None and data[0] != last:
if data[0] == CLOSED:
action = 'closed'
elif last == DOWN and data[0] == OPEN:
action = 'up'
elif data[0] == OPEN:
action = 'open'
elif data[0] == DOWN:
action = 'down'
last = data[0]
requests.post('http://requestb.in/ok0w6bok', data={"action": action})
sleep(0.1)
except KeyboardInterrupt:
return_code = 0
except Exception as e:
print(e, file=stderr)
return_code = 1
finally:
fd.close()
return return_code
if __name__ == '__main__':
exit(main())
| Use vendor and product id for udev rules | Use vendor and product id for udev rules
| Python | apache-2.0 | 1stvamp/dramadive-client | #!/usr/bin/env python3
import os
from select import select
from sys import exit, stderr
from time import sleep
WRITE_BYTES = [0x0 for b in range(0, 8)]
WRITE_BYTES[0] = 0x08
WRITE_BYTES[7] = 0x02
CLOSED = 0x15
OPEN = 0x17
DOWN = 0x16
def main():
fd_id = os.open('/dev/big_red_button', os.O_RDWR|os.O_NONBLOCK)
fd = os.fdopen(fd_id, 'rb+')
last = None
return_code = 0
try:
while True:
data = fd.write(bytes(WRITE_BYTES))
data = fd.read(8)
if data is not None and data[0] != last:
if data[0] == CLOSED:
print('closed')
elif last == DOWN and data[0] == OPEN:
print('up')
elif data[0] == OPEN:
print('open')
elif data[0] == DOWN:
print('down')
last = data[0]
sleep(0.1)
except KeyboardInterrupt:
return_code = 0
except Exception as e:
print(e, file=stderr)
return_code = 1
finally:
fd.close()
return return_code
if __name__ == '__main__':
exit(main())
Use vendor and product id for udev rules | #!/usr/bin/env python3
import os
import requests
from select import select
from sys import exit, stderr
from time import sleep
WRITE_BYTES = [0x0 for b in range(0, 8)]
WRITE_BYTES[0] = 0x08
WRITE_BYTES[7] = 0x02
CLOSED = 0x15
OPEN = 0x17
DOWN = 0x16
def main():
fd_id = os.open('/dev/big_red_button', os.O_RDWR|os.O_NONBLOCK)
fd = os.fdopen(fd_id, 'rb+')
last = None
return_code = 0
try:
while True:
data = fd.write(bytes(WRITE_BYTES))
data = fd.read(8)
if data is not None and data[0] != last:
if data[0] == CLOSED:
action = 'closed'
elif last == DOWN and data[0] == OPEN:
action = 'up'
elif data[0] == OPEN:
action = 'open'
elif data[0] == DOWN:
action = 'down'
last = data[0]
requests.post('http://requestb.in/ok0w6bok', data={"action": action})
sleep(0.1)
except KeyboardInterrupt:
return_code = 0
except Exception as e:
print(e, file=stderr)
return_code = 1
finally:
fd.close()
return return_code
if __name__ == '__main__':
exit(main())
| <commit_before>#!/usr/bin/env python3
import os
from select import select
from sys import exit, stderr
from time import sleep
WRITE_BYTES = [0x0 for b in range(0, 8)]
WRITE_BYTES[0] = 0x08
WRITE_BYTES[7] = 0x02
CLOSED = 0x15
OPEN = 0x17
DOWN = 0x16
def main():
fd_id = os.open('/dev/big_red_button', os.O_RDWR|os.O_NONBLOCK)
fd = os.fdopen(fd_id, 'rb+')
last = None
return_code = 0
try:
while True:
data = fd.write(bytes(WRITE_BYTES))
data = fd.read(8)
if data is not None and data[0] != last:
if data[0] == CLOSED:
print('closed')
elif last == DOWN and data[0] == OPEN:
print('up')
elif data[0] == OPEN:
print('open')
elif data[0] == DOWN:
print('down')
last = data[0]
sleep(0.1)
except KeyboardInterrupt:
return_code = 0
except Exception as e:
print(e, file=stderr)
return_code = 1
finally:
fd.close()
return return_code
if __name__ == '__main__':
exit(main())
<commit_msg>Use vendor and product id for udev rules<commit_after> | #!/usr/bin/env python3
import os
import requests
from select import select
from sys import exit, stderr
from time import sleep
WRITE_BYTES = [0x0 for b in range(0, 8)]
WRITE_BYTES[0] = 0x08
WRITE_BYTES[7] = 0x02
CLOSED = 0x15
OPEN = 0x17
DOWN = 0x16
def main():
fd_id = os.open('/dev/big_red_button', os.O_RDWR|os.O_NONBLOCK)
fd = os.fdopen(fd_id, 'rb+')
last = None
return_code = 0
try:
while True:
data = fd.write(bytes(WRITE_BYTES))
data = fd.read(8)
if data is not None and data[0] != last:
if data[0] == CLOSED:
action = 'closed'
elif last == DOWN and data[0] == OPEN:
action = 'up'
elif data[0] == OPEN:
action = 'open'
elif data[0] == DOWN:
action = 'down'
last = data[0]
requests.post('http://requestb.in/ok0w6bok', data={"action": action})
sleep(0.1)
except KeyboardInterrupt:
return_code = 0
except Exception as e:
print(e, file=stderr)
return_code = 1
finally:
fd.close()
return return_code
if __name__ == '__main__':
exit(main())
| #!/usr/bin/env python3
import os
from select import select
from sys import exit, stderr
from time import sleep
WRITE_BYTES = [0x0 for b in range(0, 8)]
WRITE_BYTES[0] = 0x08
WRITE_BYTES[7] = 0x02
CLOSED = 0x15
OPEN = 0x17
DOWN = 0x16
def main():
fd_id = os.open('/dev/big_red_button', os.O_RDWR|os.O_NONBLOCK)
fd = os.fdopen(fd_id, 'rb+')
last = None
return_code = 0
try:
while True:
data = fd.write(bytes(WRITE_BYTES))
data = fd.read(8)
if data is not None and data[0] != last:
if data[0] == CLOSED:
print('closed')
elif last == DOWN and data[0] == OPEN:
print('up')
elif data[0] == OPEN:
print('open')
elif data[0] == DOWN:
print('down')
last = data[0]
sleep(0.1)
except KeyboardInterrupt:
return_code = 0
except Exception as e:
print(e, file=stderr)
return_code = 1
finally:
fd.close()
return return_code
if __name__ == '__main__':
exit(main())
Use vendor and product id for udev rules#!/usr/bin/env python3
import os
import requests
from select import select
from sys import exit, stderr
from time import sleep
WRITE_BYTES = [0x0 for b in range(0, 8)]
WRITE_BYTES[0] = 0x08
WRITE_BYTES[7] = 0x02
CLOSED = 0x15
OPEN = 0x17
DOWN = 0x16
def main():
fd_id = os.open('/dev/big_red_button', os.O_RDWR|os.O_NONBLOCK)
fd = os.fdopen(fd_id, 'rb+')
last = None
return_code = 0
try:
while True:
data = fd.write(bytes(WRITE_BYTES))
data = fd.read(8)
if data is not None and data[0] != last:
if data[0] == CLOSED:
action = 'closed'
elif last == DOWN and data[0] == OPEN:
action = 'up'
elif data[0] == OPEN:
action = 'open'
elif data[0] == DOWN:
action = 'down'
last = data[0]
requests.post('http://requestb.in/ok0w6bok', data={"action": action})
sleep(0.1)
except KeyboardInterrupt:
return_code = 0
except Exception as e:
print(e, file=stderr)
return_code = 1
finally:
fd.close()
return return_code
if __name__ == '__main__':
exit(main())
| <commit_before>#!/usr/bin/env python3
import os
from select import select
from sys import exit, stderr
from time import sleep
WRITE_BYTES = [0x0 for b in range(0, 8)]
WRITE_BYTES[0] = 0x08
WRITE_BYTES[7] = 0x02
CLOSED = 0x15
OPEN = 0x17
DOWN = 0x16
def main():
fd_id = os.open('/dev/big_red_button', os.O_RDWR|os.O_NONBLOCK)
fd = os.fdopen(fd_id, 'rb+')
last = None
return_code = 0
try:
while True:
data = fd.write(bytes(WRITE_BYTES))
data = fd.read(8)
if data is not None and data[0] != last:
if data[0] == CLOSED:
print('closed')
elif last == DOWN and data[0] == OPEN:
print('up')
elif data[0] == OPEN:
print('open')
elif data[0] == DOWN:
print('down')
last = data[0]
sleep(0.1)
except KeyboardInterrupt:
return_code = 0
except Exception as e:
print(e, file=stderr)
return_code = 1
finally:
fd.close()
return return_code
if __name__ == '__main__':
exit(main())
<commit_msg>Use vendor and product id for udev rules<commit_after>#!/usr/bin/env python3
import os
import requests
from select import select
from sys import exit, stderr
from time import sleep
WRITE_BYTES = [0x0 for b in range(0, 8)]
WRITE_BYTES[0] = 0x08
WRITE_BYTES[7] = 0x02
CLOSED = 0x15
OPEN = 0x17
DOWN = 0x16
def main():
fd_id = os.open('/dev/big_red_button', os.O_RDWR|os.O_NONBLOCK)
fd = os.fdopen(fd_id, 'rb+')
last = None
return_code = 0
try:
while True:
data = fd.write(bytes(WRITE_BYTES))
data = fd.read(8)
if data is not None and data[0] != last:
if data[0] == CLOSED:
action = 'closed'
elif last == DOWN and data[0] == OPEN:
action = 'up'
elif data[0] == OPEN:
action = 'open'
elif data[0] == DOWN:
action = 'down'
last = data[0]
requests.post('http://requestb.in/ok0w6bok', data={"action": action})
sleep(0.1)
except KeyboardInterrupt:
return_code = 0
except Exception as e:
print(e, file=stderr)
return_code = 1
finally:
fd.close()
return return_code
if __name__ == '__main__':
exit(main())
|
c68a69beb03047d7ea388704fbb12074b32216bc | scenarios/UAS/alice_cfg.py | scenarios/UAS/alice_cfg.py | from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig
class AUTH_CREDS(AUTH_CREDS_orig):
enalgs = ('SHA-256', 'MD5-sess', None)
def __init__(self):
AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
| from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig
class AUTH_CREDS(AUTH_CREDS_orig):
enalgs = ('SHA-256', 'SHA-256-sess', 'MD5-sess', None)
def __init__(self):
AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
| Enable SHA-256-sess to see if it works or not. | Enable SHA-256-sess to see if it works or not.
| Python | bsd-2-clause | sippy/voiptests,sippy/voiptests | from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig
class AUTH_CREDS(AUTH_CREDS_orig):
enalgs = ('SHA-256', 'MD5-sess', None)
def __init__(self):
AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
Enable SHA-256-sess to see if it works or not. | from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig
class AUTH_CREDS(AUTH_CREDS_orig):
enalgs = ('SHA-256', 'SHA-256-sess', 'MD5-sess', None)
def __init__(self):
AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
| <commit_before>from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig
class AUTH_CREDS(AUTH_CREDS_orig):
enalgs = ('SHA-256', 'MD5-sess', None)
def __init__(self):
AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
<commit_msg>Enable SHA-256-sess to see if it works or not.<commit_after> | from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig
class AUTH_CREDS(AUTH_CREDS_orig):
enalgs = ('SHA-256', 'SHA-256-sess', 'MD5-sess', None)
def __init__(self):
AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
| from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig
class AUTH_CREDS(AUTH_CREDS_orig):
enalgs = ('SHA-256', 'MD5-sess', None)
def __init__(self):
AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
Enable SHA-256-sess to see if it works or not.from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig
class AUTH_CREDS(AUTH_CREDS_orig):
enalgs = ('SHA-256', 'SHA-256-sess', 'MD5-sess', None)
def __init__(self):
AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
| <commit_before>from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig
class AUTH_CREDS(AUTH_CREDS_orig):
enalgs = ('SHA-256', 'MD5-sess', None)
def __init__(self):
AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
<commit_msg>Enable SHA-256-sess to see if it works or not.<commit_after>from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig
class AUTH_CREDS(AUTH_CREDS_orig):
enalgs = ('SHA-256', 'SHA-256-sess', 'MD5-sess', None)
def __init__(self):
AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
|
f577606a84deb5dac36aa95500b0d21529560868 | src/permission/conf.py | src/permission/conf.py | # coding=utf-8
"""
django-permission application configure
"""
__author__ = 'Alisue <lambdalisue@hashnote.net>'
__all__ = ('settings',)
from django.conf import settings
from appconf import AppConf
from permission.handlers import LogicalPermissionHandler
class PermissionConf(AppConf):
DEFAULT_PERMISSION_HANDLER = LogicalPermissionHandler
"""Default permission handler class"""
CHECK_PERMISSION_PRESENCE = settings.DEBUG
"""Check if the specified string permission exists"""
REPLACE_BUILTIN_IF = True
"""Whether replace builtin if templatetag"""
DEFAULT_APL_FIELD_NAME = 'author'
DEFAULT_APL_ANY_PERMISSION = False
DEFAULT_APL_CHANGE_PERMISSION = True
DEFAULT_APL_DELETE_PERMISSION = True
DEFAULT_CPL_FIELD_NAME = 'collaborators'
DEFAULT_CPL_ANY_PERMISSION = False
DEFAULT_CPL_CHANGE_PERMISSION = True
DEFAULT_CPL_DELETE_PERMISSION = False
DEFAULT_GIPL_ANY_PERMISSION = False
DEFAULT_GIPL_ADD_PERMISSION = True
DEFAULT_GIPL_CHANGE_PERMISSION = True
DEFAULT_GIPL_DELETE_PERMISSION = False
AUTODISCOVER_ENABLE = True
AUTODISCOVER_MODULE_NAME = 'perms'
AUTODISCOVER_VARIABLE_NAME = 'PERMISSION_LOGICS'
| # coding=utf-8
"""
django-permission application configure
"""
__author__ = 'Alisue <lambdalisue@hashnote.net>'
__all__ = ('settings',)
from django.conf import settings
from appconf import AppConf
from permission.handlers import LogicalPermissionHandler
class PermissionConf(AppConf):
DEFAULT_PERMISSION_HANDLER = LogicalPermissionHandler
"""Default permission handler class"""
CHECK_PERMISSION_PRESENCE = settings.DEBUG
"""Check if the specified string permission exists"""
REPLACE_BUILTIN_IF = True
"""Whether replace builtin if templatetag"""
DEFAULT_APL_FIELD_NAME = 'author'
DEFAULT_APL_ANY_PERMISSION = False
DEFAULT_APL_CHANGE_PERMISSION = True
DEFAULT_APL_DELETE_PERMISSION = True
DEFAULT_CPL_FIELD_NAME = 'collaborators'
DEFAULT_CPL_ANY_PERMISSION = False
DEFAULT_CPL_CHANGE_PERMISSION = True
DEFAULT_CPL_DELETE_PERMISSION = False
DEFAULT_GIPL_ANY_PERMISSION = False
DEFAULT_GIPL_ADD_PERMISSION = True
DEFAULT_GIPL_CHANGE_PERMISSION = True
DEFAULT_GIPL_DELETE_PERMISSION = False
DEFAULT_SPL_ANY_PERMISSION = False
DEFAULT_SPL_ADD_PERMISSION = True
DEFAULT_SPL_CHANGE_PERMISSION = True
DEFAULT_SPL_DELETE_PERMISSION = False
AUTODISCOVER_ENABLE = True
AUTODISCOVER_MODULE_NAME = 'perms'
AUTODISCOVER_VARIABLE_NAME = 'PERMISSION_LOGICS'
| Add default permission of StaffPermissionLogic | Add default permission of StaffPermissionLogic
| Python | mit | lambdalisue/django-permission | # coding=utf-8
"""
django-permission application configure
"""
__author__ = 'Alisue <lambdalisue@hashnote.net>'
__all__ = ('settings',)
from django.conf import settings
from appconf import AppConf
from permission.handlers import LogicalPermissionHandler
class PermissionConf(AppConf):
DEFAULT_PERMISSION_HANDLER = LogicalPermissionHandler
"""Default permission handler class"""
CHECK_PERMISSION_PRESENCE = settings.DEBUG
"""Check if the specified string permission exists"""
REPLACE_BUILTIN_IF = True
"""Whether replace builtin if templatetag"""
DEFAULT_APL_FIELD_NAME = 'author'
DEFAULT_APL_ANY_PERMISSION = False
DEFAULT_APL_CHANGE_PERMISSION = True
DEFAULT_APL_DELETE_PERMISSION = True
DEFAULT_CPL_FIELD_NAME = 'collaborators'
DEFAULT_CPL_ANY_PERMISSION = False
DEFAULT_CPL_CHANGE_PERMISSION = True
DEFAULT_CPL_DELETE_PERMISSION = False
DEFAULT_GIPL_ANY_PERMISSION = False
DEFAULT_GIPL_ADD_PERMISSION = True
DEFAULT_GIPL_CHANGE_PERMISSION = True
DEFAULT_GIPL_DELETE_PERMISSION = False
AUTODISCOVER_ENABLE = True
AUTODISCOVER_MODULE_NAME = 'perms'
AUTODISCOVER_VARIABLE_NAME = 'PERMISSION_LOGICS'
Add default permission of StaffPermissionLogic | # coding=utf-8
"""
django-permission application configure
"""
__author__ = 'Alisue <lambdalisue@hashnote.net>'
__all__ = ('settings',)
from django.conf import settings
from appconf import AppConf
from permission.handlers import LogicalPermissionHandler
class PermissionConf(AppConf):
DEFAULT_PERMISSION_HANDLER = LogicalPermissionHandler
"""Default permission handler class"""
CHECK_PERMISSION_PRESENCE = settings.DEBUG
"""Check if the specified string permission exists"""
REPLACE_BUILTIN_IF = True
"""Whether replace builtin if templatetag"""
DEFAULT_APL_FIELD_NAME = 'author'
DEFAULT_APL_ANY_PERMISSION = False
DEFAULT_APL_CHANGE_PERMISSION = True
DEFAULT_APL_DELETE_PERMISSION = True
DEFAULT_CPL_FIELD_NAME = 'collaborators'
DEFAULT_CPL_ANY_PERMISSION = False
DEFAULT_CPL_CHANGE_PERMISSION = True
DEFAULT_CPL_DELETE_PERMISSION = False
DEFAULT_GIPL_ANY_PERMISSION = False
DEFAULT_GIPL_ADD_PERMISSION = True
DEFAULT_GIPL_CHANGE_PERMISSION = True
DEFAULT_GIPL_DELETE_PERMISSION = False
DEFAULT_SPL_ANY_PERMISSION = False
DEFAULT_SPL_ADD_PERMISSION = True
DEFAULT_SPL_CHANGE_PERMISSION = True
DEFAULT_SPL_DELETE_PERMISSION = False
AUTODISCOVER_ENABLE = True
AUTODISCOVER_MODULE_NAME = 'perms'
AUTODISCOVER_VARIABLE_NAME = 'PERMISSION_LOGICS'
| <commit_before># coding=utf-8
"""
django-permission application configure
"""
__author__ = 'Alisue <lambdalisue@hashnote.net>'
__all__ = ('settings',)
from django.conf import settings
from appconf import AppConf
from permission.handlers import LogicalPermissionHandler
class PermissionConf(AppConf):
DEFAULT_PERMISSION_HANDLER = LogicalPermissionHandler
"""Default permission handler class"""
CHECK_PERMISSION_PRESENCE = settings.DEBUG
"""Check if the specified string permission exists"""
REPLACE_BUILTIN_IF = True
"""Whether replace builtin if templatetag"""
DEFAULT_APL_FIELD_NAME = 'author'
DEFAULT_APL_ANY_PERMISSION = False
DEFAULT_APL_CHANGE_PERMISSION = True
DEFAULT_APL_DELETE_PERMISSION = True
DEFAULT_CPL_FIELD_NAME = 'collaborators'
DEFAULT_CPL_ANY_PERMISSION = False
DEFAULT_CPL_CHANGE_PERMISSION = True
DEFAULT_CPL_DELETE_PERMISSION = False
DEFAULT_GIPL_ANY_PERMISSION = False
DEFAULT_GIPL_ADD_PERMISSION = True
DEFAULT_GIPL_CHANGE_PERMISSION = True
DEFAULT_GIPL_DELETE_PERMISSION = False
AUTODISCOVER_ENABLE = True
AUTODISCOVER_MODULE_NAME = 'perms'
AUTODISCOVER_VARIABLE_NAME = 'PERMISSION_LOGICS'
<commit_msg>Add default permission of StaffPermissionLogic<commit_after> | # coding=utf-8
"""
django-permission application configure
"""
__author__ = 'Alisue <lambdalisue@hashnote.net>'
__all__ = ('settings',)
from django.conf import settings
from appconf import AppConf
from permission.handlers import LogicalPermissionHandler
class PermissionConf(AppConf):
DEFAULT_PERMISSION_HANDLER = LogicalPermissionHandler
"""Default permission handler class"""
CHECK_PERMISSION_PRESENCE = settings.DEBUG
"""Check if the specified string permission exists"""
REPLACE_BUILTIN_IF = True
"""Whether replace builtin if templatetag"""
DEFAULT_APL_FIELD_NAME = 'author'
DEFAULT_APL_ANY_PERMISSION = False
DEFAULT_APL_CHANGE_PERMISSION = True
DEFAULT_APL_DELETE_PERMISSION = True
DEFAULT_CPL_FIELD_NAME = 'collaborators'
DEFAULT_CPL_ANY_PERMISSION = False
DEFAULT_CPL_CHANGE_PERMISSION = True
DEFAULT_CPL_DELETE_PERMISSION = False
DEFAULT_GIPL_ANY_PERMISSION = False
DEFAULT_GIPL_ADD_PERMISSION = True
DEFAULT_GIPL_CHANGE_PERMISSION = True
DEFAULT_GIPL_DELETE_PERMISSION = False
DEFAULT_SPL_ANY_PERMISSION = False
DEFAULT_SPL_ADD_PERMISSION = True
DEFAULT_SPL_CHANGE_PERMISSION = True
DEFAULT_SPL_DELETE_PERMISSION = False
AUTODISCOVER_ENABLE = True
AUTODISCOVER_MODULE_NAME = 'perms'
AUTODISCOVER_VARIABLE_NAME = 'PERMISSION_LOGICS'
| # coding=utf-8
"""
django-permission application configure
"""
__author__ = 'Alisue <lambdalisue@hashnote.net>'
__all__ = ('settings',)
from django.conf import settings
from appconf import AppConf
from permission.handlers import LogicalPermissionHandler
class PermissionConf(AppConf):
DEFAULT_PERMISSION_HANDLER = LogicalPermissionHandler
"""Default permission handler class"""
CHECK_PERMISSION_PRESENCE = settings.DEBUG
"""Check if the specified string permission exists"""
REPLACE_BUILTIN_IF = True
"""Whether replace builtin if templatetag"""
DEFAULT_APL_FIELD_NAME = 'author'
DEFAULT_APL_ANY_PERMISSION = False
DEFAULT_APL_CHANGE_PERMISSION = True
DEFAULT_APL_DELETE_PERMISSION = True
DEFAULT_CPL_FIELD_NAME = 'collaborators'
DEFAULT_CPL_ANY_PERMISSION = False
DEFAULT_CPL_CHANGE_PERMISSION = True
DEFAULT_CPL_DELETE_PERMISSION = False
DEFAULT_GIPL_ANY_PERMISSION = False
DEFAULT_GIPL_ADD_PERMISSION = True
DEFAULT_GIPL_CHANGE_PERMISSION = True
DEFAULT_GIPL_DELETE_PERMISSION = False
AUTODISCOVER_ENABLE = True
AUTODISCOVER_MODULE_NAME = 'perms'
AUTODISCOVER_VARIABLE_NAME = 'PERMISSION_LOGICS'
Add default permission of StaffPermissionLogic# coding=utf-8
"""
django-permission application configure
"""
__author__ = 'Alisue <lambdalisue@hashnote.net>'
__all__ = ('settings',)
from django.conf import settings
from appconf import AppConf
from permission.handlers import LogicalPermissionHandler
class PermissionConf(AppConf):
DEFAULT_PERMISSION_HANDLER = LogicalPermissionHandler
"""Default permission handler class"""
CHECK_PERMISSION_PRESENCE = settings.DEBUG
"""Check if the specified string permission exists"""
REPLACE_BUILTIN_IF = True
"""Whether replace builtin if templatetag"""
DEFAULT_APL_FIELD_NAME = 'author'
DEFAULT_APL_ANY_PERMISSION = False
DEFAULT_APL_CHANGE_PERMISSION = True
DEFAULT_APL_DELETE_PERMISSION = True
DEFAULT_CPL_FIELD_NAME = 'collaborators'
DEFAULT_CPL_ANY_PERMISSION = False
DEFAULT_CPL_CHANGE_PERMISSION = True
DEFAULT_CPL_DELETE_PERMISSION = False
DEFAULT_GIPL_ANY_PERMISSION = False
DEFAULT_GIPL_ADD_PERMISSION = True
DEFAULT_GIPL_CHANGE_PERMISSION = True
DEFAULT_GIPL_DELETE_PERMISSION = False
DEFAULT_SPL_ANY_PERMISSION = False
DEFAULT_SPL_ADD_PERMISSION = True
DEFAULT_SPL_CHANGE_PERMISSION = True
DEFAULT_SPL_DELETE_PERMISSION = False
AUTODISCOVER_ENABLE = True
AUTODISCOVER_MODULE_NAME = 'perms'
AUTODISCOVER_VARIABLE_NAME = 'PERMISSION_LOGICS'
| <commit_before># coding=utf-8
"""
django-permission application configure
"""
__author__ = 'Alisue <lambdalisue@hashnote.net>'
__all__ = ('settings',)
from django.conf import settings
from appconf import AppConf
from permission.handlers import LogicalPermissionHandler
class PermissionConf(AppConf):
DEFAULT_PERMISSION_HANDLER = LogicalPermissionHandler
"""Default permission handler class"""
CHECK_PERMISSION_PRESENCE = settings.DEBUG
"""Check if the specified string permission exists"""
REPLACE_BUILTIN_IF = True
"""Whether replace builtin if templatetag"""
DEFAULT_APL_FIELD_NAME = 'author'
DEFAULT_APL_ANY_PERMISSION = False
DEFAULT_APL_CHANGE_PERMISSION = True
DEFAULT_APL_DELETE_PERMISSION = True
DEFAULT_CPL_FIELD_NAME = 'collaborators'
DEFAULT_CPL_ANY_PERMISSION = False
DEFAULT_CPL_CHANGE_PERMISSION = True
DEFAULT_CPL_DELETE_PERMISSION = False
DEFAULT_GIPL_ANY_PERMISSION = False
DEFAULT_GIPL_ADD_PERMISSION = True
DEFAULT_GIPL_CHANGE_PERMISSION = True
DEFAULT_GIPL_DELETE_PERMISSION = False
AUTODISCOVER_ENABLE = True
AUTODISCOVER_MODULE_NAME = 'perms'
AUTODISCOVER_VARIABLE_NAME = 'PERMISSION_LOGICS'
<commit_msg>Add default permission of StaffPermissionLogic<commit_after># coding=utf-8
"""
django-permission application configure
"""
__author__ = 'Alisue <lambdalisue@hashnote.net>'
__all__ = ('settings',)
from django.conf import settings
from appconf import AppConf
from permission.handlers import LogicalPermissionHandler
class PermissionConf(AppConf):
DEFAULT_PERMISSION_HANDLER = LogicalPermissionHandler
"""Default permission handler class"""
CHECK_PERMISSION_PRESENCE = settings.DEBUG
"""Check if the specified string permission exists"""
REPLACE_BUILTIN_IF = True
"""Whether replace builtin if templatetag"""
DEFAULT_APL_FIELD_NAME = 'author'
DEFAULT_APL_ANY_PERMISSION = False
DEFAULT_APL_CHANGE_PERMISSION = True
DEFAULT_APL_DELETE_PERMISSION = True
DEFAULT_CPL_FIELD_NAME = 'collaborators'
DEFAULT_CPL_ANY_PERMISSION = False
DEFAULT_CPL_CHANGE_PERMISSION = True
DEFAULT_CPL_DELETE_PERMISSION = False
DEFAULT_GIPL_ANY_PERMISSION = False
DEFAULT_GIPL_ADD_PERMISSION = True
DEFAULT_GIPL_CHANGE_PERMISSION = True
DEFAULT_GIPL_DELETE_PERMISSION = False
DEFAULT_SPL_ANY_PERMISSION = False
DEFAULT_SPL_ADD_PERMISSION = True
DEFAULT_SPL_CHANGE_PERMISSION = True
DEFAULT_SPL_DELETE_PERMISSION = False
AUTODISCOVER_ENABLE = True
AUTODISCOVER_MODULE_NAME = 'perms'
AUTODISCOVER_VARIABLE_NAME = 'PERMISSION_LOGICS'
|
b8ec7503c71c43de8168a7d22cec4c2842382f2d | augur/datasources/downloads/test_downloads_routes.py | augur/datasources/downloads/test_downloads_routes.py | import os
import pytest
import requests
import augur.server
@pytest.fixture(scope="module")
def downloads_routes():
pass
| import os
import pytest
import requests
@pytest.fixture(scope="module")
def downloads_routes():
pass
| Remove uneccesary import in downloads API test | Remove uneccesary import in downloads API test
The test_downloads_routes.py module was uneccessarily importing augur.server
due to the remnants of the previous testing architecture. This import
was causing the build to error out, so it's been removed.
Signed-off-by: Carter Landis <ffc486ac0b21a34cfd7d1170183ed86b0f1b04a2@gmail.com>
| Python | mit | OSSHealth/ghdata,OSSHealth/ghdata,OSSHealth/ghdata | import os
import pytest
import requests
import augur.server
@pytest.fixture(scope="module")
def downloads_routes():
pass
Remove uneccesary import in downloads API test
The test_downloads_routes.py module was uneccessarily importing augur.server
due to the remnants of the previous testing architecture. This import
was causing the build to error out, so it's been removed.
Signed-off-by: Carter Landis <ffc486ac0b21a34cfd7d1170183ed86b0f1b04a2@gmail.com> | import os
import pytest
import requests
@pytest.fixture(scope="module")
def downloads_routes():
pass
| <commit_before>import os
import pytest
import requests
import augur.server
@pytest.fixture(scope="module")
def downloads_routes():
pass
<commit_msg>Remove uneccesary import in downloads API test
The test_downloads_routes.py module was uneccessarily importing augur.server
due to the remnants of the previous testing architecture. This import
was causing the build to error out, so it's been removed.
Signed-off-by: Carter Landis <ffc486ac0b21a34cfd7d1170183ed86b0f1b04a2@gmail.com><commit_after> | import os
import pytest
import requests
@pytest.fixture(scope="module")
def downloads_routes():
pass
| import os
import pytest
import requests
import augur.server
@pytest.fixture(scope="module")
def downloads_routes():
pass
Remove uneccesary import in downloads API test
The test_downloads_routes.py module was uneccessarily importing augur.server
due to the remnants of the previous testing architecture. This import
was causing the build to error out, so it's been removed.
Signed-off-by: Carter Landis <ffc486ac0b21a34cfd7d1170183ed86b0f1b04a2@gmail.com>import os
import pytest
import requests
@pytest.fixture(scope="module")
def downloads_routes():
pass
| <commit_before>import os
import pytest
import requests
import augur.server
@pytest.fixture(scope="module")
def downloads_routes():
pass
<commit_msg>Remove uneccesary import in downloads API test
The test_downloads_routes.py module was uneccessarily importing augur.server
due to the remnants of the previous testing architecture. This import
was causing the build to error out, so it's been removed.
Signed-off-by: Carter Landis <ffc486ac0b21a34cfd7d1170183ed86b0f1b04a2@gmail.com><commit_after>import os
import pytest
import requests
@pytest.fixture(scope="module")
def downloads_routes():
pass
|
033e6dbcbc735101164a7fa4c789e6704a6ee15a | aldryn_apphooks_config/models.py | aldryn_apphooks_config/models.py | # -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(max_length=100)
namespace = models.CharField(max_length=100)
app_data = AppDataField()
class Meta:
verbose_name = _(u'app-hook config')
verbose_name_plural = _(u'app-hook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
return _(u'%s / %s') % (self.type, self.namespace) | # -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(max_length=100)
namespace = models.CharField(max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'app-hook config')
verbose_name_plural = _(u'app-hook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace) | Change the str representation of AppHookConfig by using the related CMSApp name instead of the type | Change the str representation of AppHookConfig by using the related CMSApp name instead of the type
| Python | bsd-3-clause | aldryn/aldryn-apphooks-config,aldryn/aldryn-apphooks-config,aldryn/aldryn-apphooks-config | # -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(max_length=100)
namespace = models.CharField(max_length=100)
app_data = AppDataField()
class Meta:
verbose_name = _(u'app-hook config')
verbose_name_plural = _(u'app-hook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
return _(u'%s / %s') % (self.type, self.namespace)Change the str representation of AppHookConfig by using the related CMSApp name instead of the type | # -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(max_length=100)
namespace = models.CharField(max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'app-hook config')
verbose_name_plural = _(u'app-hook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace) | <commit_before># -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(max_length=100)
namespace = models.CharField(max_length=100)
app_data = AppDataField()
class Meta:
verbose_name = _(u'app-hook config')
verbose_name_plural = _(u'app-hook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
return _(u'%s / %s') % (self.type, self.namespace)<commit_msg>Change the str representation of AppHookConfig by using the related CMSApp name instead of the type<commit_after> | # -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(max_length=100)
namespace = models.CharField(max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'app-hook config')
verbose_name_plural = _(u'app-hook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace) | # -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(max_length=100)
namespace = models.CharField(max_length=100)
app_data = AppDataField()
class Meta:
verbose_name = _(u'app-hook config')
verbose_name_plural = _(u'app-hook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
return _(u'%s / %s') % (self.type, self.namespace)Change the str representation of AppHookConfig by using the related CMSApp name instead of the type# -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(max_length=100)
namespace = models.CharField(max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'app-hook config')
verbose_name_plural = _(u'app-hook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace) | <commit_before># -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(max_length=100)
namespace = models.CharField(max_length=100)
app_data = AppDataField()
class Meta:
verbose_name = _(u'app-hook config')
verbose_name_plural = _(u'app-hook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
return _(u'%s / %s') % (self.type, self.namespace)<commit_msg>Change the str representation of AppHookConfig by using the related CMSApp name instead of the type<commit_after># -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(max_length=100)
namespace = models.CharField(max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'app-hook config')
verbose_name_plural = _(u'app-hook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace) |
2d39aed3dcdb28acc61a6598cca9836665c2674e | cs251tk/student/markdownify/check_submit_date.py | cs251tk/student/markdownify/check_submit_date.py | import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
_, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if res[0] is not 'f':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
| import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
stat, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if stat is 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
| Modify error check in check_dates() | Modify error check in check_dates()
| Python | mit | StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit | import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
_, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if res[0] is not 'f':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
Modify error check in check_dates() | import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
stat, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if stat is 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
| <commit_before>import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
_, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if res[0] is not 'f':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
<commit_msg>Modify error check in check_dates()<commit_after> | import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
stat, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if stat is 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
| import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
_, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if res[0] is not 'f':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
Modify error check in check_dates()import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
stat, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if stat is 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
| <commit_before>import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
_, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if res[0] is not 'f':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
<commit_msg>Modify error check in check_dates()<commit_after>import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
stat, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if stat is 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
|
ab72360da83e3b8d95030394f35a442943f53233 | domains/integrator_chains/fmrb_sci_examples/scripts/lqr.py | domains/integrator_chains/fmrb_sci_examples/scripts/lqr.py | #!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
from control import lqr
import numpy as np
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
A = np.array([[0., 1, 0], [0,0,1], [0,0,0]])
B = np.array([[0.],[0],[1]])
Q = np.diag([1.,1,1])
R = np.diag([1.])
K, S, E = lqr(A,B,Q,R)
self.K = K
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
| #!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
| Remove some unused code that unnecessarily introduced depends | Remove some unused code that unnecessarily introduced depends
This should be regarded as an update to 9aa5b02e12a2287642a285541c43790a10d6444f
that removes unnecessary dependencies for the lqr.py example.
In the example provided by 9aa5b02e12a2287642a285541c43790a10d6444f
Python code was introduced that led to dependencies on NumPy and
the Python Control System Library (control), yet the state-feedback
gains were hard-coded. We will re-introduce these dependencies in the
next changeset, but having this checkpoint without them seems useful.
| Python | bsd-3-clause | fmrchallenge/fmrbenchmark,fmrchallenge/fmrbenchmark,fmrchallenge/fmrbenchmark | #!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
from control import lqr
import numpy as np
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
A = np.array([[0., 1, 0], [0,0,1], [0,0,0]])
B = np.array([[0.],[0],[1]])
Q = np.diag([1.,1,1])
R = np.diag([1.])
K, S, E = lqr(A,B,Q,R)
self.K = K
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
Remove some unused code that unnecessarily introduced depends
This should be regarded as an update to 9aa5b02e12a2287642a285541c43790a10d6444f
that removes unnecessary dependencies for the lqr.py example.
In the example provided by 9aa5b02e12a2287642a285541c43790a10d6444f
Python code was introduced that led to dependencies on NumPy and
the Python Control System Library (control), yet the state-feedback
gains were hard-coded. We will re-introduce these dependencies in the
next changeset, but having this checkpoint without them seems useful. | #!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
| <commit_before>#!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
from control import lqr
import numpy as np
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
A = np.array([[0., 1, 0], [0,0,1], [0,0,0]])
B = np.array([[0.],[0],[1]])
Q = np.diag([1.,1,1])
R = np.diag([1.])
K, S, E = lqr(A,B,Q,R)
self.K = K
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
<commit_msg>Remove some unused code that unnecessarily introduced depends
This should be regarded as an update to 9aa5b02e12a2287642a285541c43790a10d6444f
that removes unnecessary dependencies for the lqr.py example.
In the example provided by 9aa5b02e12a2287642a285541c43790a10d6444f
Python code was introduced that led to dependencies on NumPy and
the Python Control System Library (control), yet the state-feedback
gains were hard-coded. We will re-introduce these dependencies in the
next changeset, but having this checkpoint without them seems useful.<commit_after> | #!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
| #!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
from control import lqr
import numpy as np
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
A = np.array([[0., 1, 0], [0,0,1], [0,0,0]])
B = np.array([[0.],[0],[1]])
Q = np.diag([1.,1,1])
R = np.diag([1.])
K, S, E = lqr(A,B,Q,R)
self.K = K
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
Remove some unused code that unnecessarily introduced depends
This should be regarded as an update to 9aa5b02e12a2287642a285541c43790a10d6444f
that removes unnecessary dependencies for the lqr.py example.
In the example provided by 9aa5b02e12a2287642a285541c43790a10d6444f
Python code was introduced that led to dependencies on NumPy and
the Python Control System Library (control), yet the state-feedback
gains were hard-coded. We will re-introduce these dependencies in the
next changeset, but having this checkpoint without them seems useful.#!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
| <commit_before>#!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
from control import lqr
import numpy as np
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
A = np.array([[0., 1, 0], [0,0,1], [0,0,0]])
B = np.array([[0.],[0],[1]])
Q = np.diag([1.,1,1])
R = np.diag([1.])
K, S, E = lqr(A,B,Q,R)
self.K = K
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
<commit_msg>Remove some unused code that unnecessarily introduced depends
This should be regarded as an update to 9aa5b02e12a2287642a285541c43790a10d6444f
that removes unnecessary dependencies for the lqr.py example.
In the example provided by 9aa5b02e12a2287642a285541c43790a10d6444f
Python code was introduced that led to dependencies on NumPy and
the Python Control System Library (control), yet the state-feedback
gains were hard-coded. We will re-introduce these dependencies in the
next changeset, but having this checkpoint without them seems useful.<commit_after>#!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
|
4a3b39b55d129b2bad4772f8a477053bd33cb6c0 | masters/master.client.breakpad/master_site_config.py | masters/master.client.breakpad/master_site_config.py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Breakpad(Master.Master3):
project_name = 'Breakpad'
project_url = ('https://code.google.com/p/google-breakpad/wiki/'
'GettingStartedWithBreakpad')
master_port = 8047
slave_port = 8147
master_port_alt = 8247
buildbot_url = 'http://build.chromium.org/p/client.breakpad/'
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Breakpad(Master.Master3):
project_name = 'Breakpad'
project_url = ('https://code.google.com/p/google-breakpad/wiki/'
'GettingStartedWithBreakpad')
master_port = 8053
slave_port = 8153
master_port_alt = 8253
buildbot_url = 'http://build.chromium.org/p/client.breakpad/'
| Change port for breakpad to available port. | Change port for breakpad to available port.
BUG=366164, 359557
R=johnw@google.com
Review URL: https://codereview.chromium.org/526553002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@291789 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Breakpad(Master.Master3):
project_name = 'Breakpad'
project_url = ('https://code.google.com/p/google-breakpad/wiki/'
'GettingStartedWithBreakpad')
master_port = 8047
slave_port = 8147
master_port_alt = 8247
buildbot_url = 'http://build.chromium.org/p/client.breakpad/'
Change port for breakpad to available port.
BUG=366164, 359557
R=johnw@google.com
Review URL: https://codereview.chromium.org/526553002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@291789 0039d316-1c4b-4281-b951-d872f2087c98 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Breakpad(Master.Master3):
project_name = 'Breakpad'
project_url = ('https://code.google.com/p/google-breakpad/wiki/'
'GettingStartedWithBreakpad')
master_port = 8053
slave_port = 8153
master_port_alt = 8253
buildbot_url = 'http://build.chromium.org/p/client.breakpad/'
| <commit_before># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Breakpad(Master.Master3):
project_name = 'Breakpad'
project_url = ('https://code.google.com/p/google-breakpad/wiki/'
'GettingStartedWithBreakpad')
master_port = 8047
slave_port = 8147
master_port_alt = 8247
buildbot_url = 'http://build.chromium.org/p/client.breakpad/'
<commit_msg>Change port for breakpad to available port.
BUG=366164, 359557
R=johnw@google.com
Review URL: https://codereview.chromium.org/526553002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@291789 0039d316-1c4b-4281-b951-d872f2087c98<commit_after> | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Breakpad(Master.Master3):
project_name = 'Breakpad'
project_url = ('https://code.google.com/p/google-breakpad/wiki/'
'GettingStartedWithBreakpad')
master_port = 8053
slave_port = 8153
master_port_alt = 8253
buildbot_url = 'http://build.chromium.org/p/client.breakpad/'
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Breakpad(Master.Master3):
project_name = 'Breakpad'
project_url = ('https://code.google.com/p/google-breakpad/wiki/'
'GettingStartedWithBreakpad')
master_port = 8047
slave_port = 8147
master_port_alt = 8247
buildbot_url = 'http://build.chromium.org/p/client.breakpad/'
Change port for breakpad to available port.
BUG=366164, 359557
R=johnw@google.com
Review URL: https://codereview.chromium.org/526553002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@291789 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Breakpad(Master.Master3):
project_name = 'Breakpad'
project_url = ('https://code.google.com/p/google-breakpad/wiki/'
'GettingStartedWithBreakpad')
master_port = 8053
slave_port = 8153
master_port_alt = 8253
buildbot_url = 'http://build.chromium.org/p/client.breakpad/'
| <commit_before># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Breakpad(Master.Master3):
project_name = 'Breakpad'
project_url = ('https://code.google.com/p/google-breakpad/wiki/'
'GettingStartedWithBreakpad')
master_port = 8047
slave_port = 8147
master_port_alt = 8247
buildbot_url = 'http://build.chromium.org/p/client.breakpad/'
<commit_msg>Change port for breakpad to available port.
BUG=366164, 359557
R=johnw@google.com
Review URL: https://codereview.chromium.org/526553002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@291789 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Breakpad(Master.Master3):
project_name = 'Breakpad'
project_url = ('https://code.google.com/p/google-breakpad/wiki/'
'GettingStartedWithBreakpad')
master_port = 8053
slave_port = 8153
master_port_alt = 8253
buildbot_url = 'http://build.chromium.org/p/client.breakpad/'
|
687592754a80397e4e44585f232e76b7d3360780 | pod_manager/db.py | pod_manager/db.py | import redis
from pod_manager.settings import REDIS_HOST, REDIS_PORT, REDIS_DB
def get_client():
client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
return client
| import pickle
import redis
from pod_manager.settings import REDIS_HOST, REDIS_PORT, REDIS_DB
__all__ = [
'get_client',
'cache_object',
'get_object'
]
def get_client():
client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
return client
def cache_object(client, key, obj, ttl=60):
pipe = client.pipeline()
data = pickle.dumps(obj)
pipe.set(key, data)
if ttl:
pipe.expire(key, ttl)
pipe.execute()
def get_object(client, key):
data = client.get(key)
if not data:
return None
obj = pickle.loads(data)
return obj
| Add cache_object and get_object functions. | Add cache_object and get_object functions.
| Python | apache-2.0 | racker/pod-manager | import redis
from pod_manager.settings import REDIS_HOST, REDIS_PORT, REDIS_DB
def get_client():
client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
return client
Add cache_object and get_object functions. | import pickle
import redis
from pod_manager.settings import REDIS_HOST, REDIS_PORT, REDIS_DB
__all__ = [
'get_client',
'cache_object',
'get_object'
]
def get_client():
client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
return client
def cache_object(client, key, obj, ttl=60):
pipe = client.pipeline()
data = pickle.dumps(obj)
pipe.set(key, data)
if ttl:
pipe.expire(key, ttl)
pipe.execute()
def get_object(client, key):
data = client.get(key)
if not data:
return None
obj = pickle.loads(data)
return obj
| <commit_before>import redis
from pod_manager.settings import REDIS_HOST, REDIS_PORT, REDIS_DB
def get_client():
client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
return client
<commit_msg>Add cache_object and get_object functions.<commit_after> | import pickle
import redis
from pod_manager.settings import REDIS_HOST, REDIS_PORT, REDIS_DB
__all__ = [
'get_client',
'cache_object',
'get_object'
]
def get_client():
client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
return client
def cache_object(client, key, obj, ttl=60):
pipe = client.pipeline()
data = pickle.dumps(obj)
pipe.set(key, data)
if ttl:
pipe.expire(key, ttl)
pipe.execute()
def get_object(client, key):
data = client.get(key)
if not data:
return None
obj = pickle.loads(data)
return obj
| import redis
from pod_manager.settings import REDIS_HOST, REDIS_PORT, REDIS_DB
def get_client():
client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
return client
Add cache_object and get_object functions.import pickle
import redis
from pod_manager.settings import REDIS_HOST, REDIS_PORT, REDIS_DB
__all__ = [
'get_client',
'cache_object',
'get_object'
]
def get_client():
client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
return client
def cache_object(client, key, obj, ttl=60):
pipe = client.pipeline()
data = pickle.dumps(obj)
pipe.set(key, data)
if ttl:
pipe.expire(key, ttl)
pipe.execute()
def get_object(client, key):
data = client.get(key)
if not data:
return None
obj = pickle.loads(data)
return obj
| <commit_before>import redis
from pod_manager.settings import REDIS_HOST, REDIS_PORT, REDIS_DB
def get_client():
client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
return client
<commit_msg>Add cache_object and get_object functions.<commit_after>import pickle
import redis
from pod_manager.settings import REDIS_HOST, REDIS_PORT, REDIS_DB
__all__ = [
'get_client',
'cache_object',
'get_object'
]
def get_client():
client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
return client
def cache_object(client, key, obj, ttl=60):
pipe = client.pipeline()
data = pickle.dumps(obj)
pipe.set(key, data)
if ttl:
pipe.expire(key, ttl)
pipe.execute()
def get_object(client, key):
data = client.get(key)
if not data:
return None
obj = pickle.loads(data)
return obj
|
57d3f5a78385b07fb4d7f91ac97edb6e9dc850aa | waterbutler/providers/osfstorage/metadata.py | waterbutler/providers/osfstorage/metadata.py | from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0] != '/':
return '/' + self.raw['path']
return self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'downloads': self.raw['downloads']
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
| from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'downloads': self.raw['downloads']
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
| Use startswith in favor of indexing | Use startswith in favor of indexing
| Python | apache-2.0 | CenterForOpenScience/waterbutler,cosenal/waterbutler,icereval/waterbutler,chrisseto/waterbutler,RCOSDP/waterbutler,rafaeldelucena/waterbutler,Johnetordoff/waterbutler,TomBaxter/waterbutler,felliott/waterbutler,rdhyee/waterbutler,kwierman/waterbutler,Ghalko/waterbutler,hmoco/waterbutler | from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0] != '/':
return '/' + self.raw['path']
return self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'downloads': self.raw['downloads']
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
Use startswith in favor of indexing | from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'downloads': self.raw['downloads']
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
| <commit_before>from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0] != '/':
return '/' + self.raw['path']
return self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'downloads': self.raw['downloads']
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
<commit_msg>Use startswith in favor of indexing<commit_after> | from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'downloads': self.raw['downloads']
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
| from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0] != '/':
return '/' + self.raw['path']
return self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'downloads': self.raw['downloads']
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
Use startswith in favor of indexingfrom waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'downloads': self.raw['downloads']
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
| <commit_before>from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0] != '/':
return '/' + self.raw['path']
return self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'downloads': self.raw['downloads']
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
<commit_msg>Use startswith in favor of indexing<commit_after>from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'downloads': self.raw['downloads']
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
|
c393aab91962a1623b36769856026a1a31527939 | settings.py | settings.py | from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
| from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp')
SERVE_STATIC_FILES = True
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(asctime)s - %(levelname)s - %(name)s - %(filename)s:%(lineno)d %(funcName)s() %(message)s'
},
'simple': {
'format': '%(asctime)s - %(levelname)s - %(name)s - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S'
},
},
'handlers': {
'abacus': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '/var/log/daisyproducer_abacus_import.log',
'formatter': 'simple'
},
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '/var/log/daisyproducer.log',
'formatter': 'simple'
},
'console':{
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'abacus': {
'handlers': ['abacus'],
'level': 'DEBUG',
'propagate': False,
},
'daisyproducer.documents': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': False,
},
},
}
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
| Add the setup for logging | Add the setup for logging
| Python | agpl-3.0 | sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer | from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
Add the setup for logging | from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp')
SERVE_STATIC_FILES = True
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(asctime)s - %(levelname)s - %(name)s - %(filename)s:%(lineno)d %(funcName)s() %(message)s'
},
'simple': {
'format': '%(asctime)s - %(levelname)s - %(name)s - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S'
},
},
'handlers': {
'abacus': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '/var/log/daisyproducer_abacus_import.log',
'formatter': 'simple'
},
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '/var/log/daisyproducer.log',
'formatter': 'simple'
},
'console':{
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'abacus': {
'handlers': ['abacus'],
'level': 'DEBUG',
'propagate': False,
},
'daisyproducer.documents': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': False,
},
},
}
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
| <commit_before>from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
<commit_msg>Add the setup for logging<commit_after> | from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp')
SERVE_STATIC_FILES = True
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(asctime)s - %(levelname)s - %(name)s - %(filename)s:%(lineno)d %(funcName)s() %(message)s'
},
'simple': {
'format': '%(asctime)s - %(levelname)s - %(name)s - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S'
},
},
'handlers': {
'abacus': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '/var/log/daisyproducer_abacus_import.log',
'formatter': 'simple'
},
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '/var/log/daisyproducer.log',
'formatter': 'simple'
},
'console':{
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'abacus': {
'handlers': ['abacus'],
'level': 'DEBUG',
'propagate': False,
},
'daisyproducer.documents': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': False,
},
},
}
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
| from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
Add the setup for loggingfrom settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp')
SERVE_STATIC_FILES = True
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(asctime)s - %(levelname)s - %(name)s - %(filename)s:%(lineno)d %(funcName)s() %(message)s'
},
'simple': {
'format': '%(asctime)s - %(levelname)s - %(name)s - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S'
},
},
'handlers': {
'abacus': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '/var/log/daisyproducer_abacus_import.log',
'formatter': 'simple'
},
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '/var/log/daisyproducer.log',
'formatter': 'simple'
},
'console':{
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'abacus': {
'handlers': ['abacus'],
'level': 'DEBUG',
'propagate': False,
},
'daisyproducer.documents': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': False,
},
},
}
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
| <commit_before>from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
<commit_msg>Add the setup for logging<commit_after>from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', 'tmp')
SERVE_STATIC_FILES = True
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(asctime)s - %(levelname)s - %(name)s - %(filename)s:%(lineno)d %(funcName)s() %(message)s'
},
'simple': {
'format': '%(asctime)s - %(levelname)s - %(name)s - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S'
},
},
'handlers': {
'abacus': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '/var/log/daisyproducer_abacus_import.log',
'formatter': 'simple'
},
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '/var/log/daisyproducer.log',
'formatter': 'simple'
},
'console':{
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'abacus': {
'handlers': ['abacus'],
'level': 'DEBUG',
'propagate': False,
},
'daisyproducer.documents': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': False,
},
},
}
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
|
3692d64332768a6a8bd85ac5dbfecaba5c364d4a | tests/util/test_platform.py | tests/util/test_platform.py | import platform
from keyring.util.platform_ import (
config_root,
data_root,
_config_root_Linux,
_config_root_Windows,
_data_root_Linux,
_data_root_Windows,
)
def test_platform_Linux():
# rely on the Github Actions workflow to run this on different platforms
if platform.system() != "Linux":
return
assert config_root == _config_root_Linux
assert data_root == _data_root_Linux
def test_platform_Windows():
# rely on the Github Actions workflow to run this on different platforms
if platform.system() != "Windows":
return
assert config_root == _config_root_Windows
assert data_root == _data_root_Windows
| import pytest
import platform
from keyring.util.platform_ import (
config_root,
data_root,
_config_root_Linux,
_config_root_Windows,
_data_root_Linux,
_data_root_Windows,
)
@pytest.mark.skipif(
platform.system() != "Linux", reason="Requires platform.system() == 'Linux'"
)
def test_platform_Linux():
# rely on the Github Actions workflow to run this on different platforms
assert config_root == _config_root_Linux
assert data_root == _data_root_Linux
@pytest.mark.skipif(
platform.system() != "Windows", reason="Requires platform.system() == 'Windows'"
)
def test_platform_Windows():
# rely on the Github Actions workflow to run this on different platforms
assert config_root == _config_root_Windows
assert data_root == _data_root_Windows
| Use Pytest's skipif decorator instead of returning to skip assert statements. | Use Pytest's skipif decorator instead of returning to skip assert statements.
| Python | mit | jaraco/keyring | import platform
from keyring.util.platform_ import (
config_root,
data_root,
_config_root_Linux,
_config_root_Windows,
_data_root_Linux,
_data_root_Windows,
)
def test_platform_Linux():
# rely on the Github Actions workflow to run this on different platforms
if platform.system() != "Linux":
return
assert config_root == _config_root_Linux
assert data_root == _data_root_Linux
def test_platform_Windows():
# rely on the Github Actions workflow to run this on different platforms
if platform.system() != "Windows":
return
assert config_root == _config_root_Windows
assert data_root == _data_root_Windows
Use Pytest's skipif decorator instead of returning to skip assert statements. | import pytest
import platform
from keyring.util.platform_ import (
config_root,
data_root,
_config_root_Linux,
_config_root_Windows,
_data_root_Linux,
_data_root_Windows,
)
@pytest.mark.skipif(
platform.system() != "Linux", reason="Requires platform.system() == 'Linux'"
)
def test_platform_Linux():
# rely on the Github Actions workflow to run this on different platforms
assert config_root == _config_root_Linux
assert data_root == _data_root_Linux
@pytest.mark.skipif(
platform.system() != "Windows", reason="Requires platform.system() == 'Windows'"
)
def test_platform_Windows():
# rely on the Github Actions workflow to run this on different platforms
assert config_root == _config_root_Windows
assert data_root == _data_root_Windows
| <commit_before>import platform
from keyring.util.platform_ import (
config_root,
data_root,
_config_root_Linux,
_config_root_Windows,
_data_root_Linux,
_data_root_Windows,
)
def test_platform_Linux():
# rely on the Github Actions workflow to run this on different platforms
if platform.system() != "Linux":
return
assert config_root == _config_root_Linux
assert data_root == _data_root_Linux
def test_platform_Windows():
# rely on the Github Actions workflow to run this on different platforms
if platform.system() != "Windows":
return
assert config_root == _config_root_Windows
assert data_root == _data_root_Windows
<commit_msg>Use Pytest's skipif decorator instead of returning to skip assert statements.<commit_after> | import pytest
import platform
from keyring.util.platform_ import (
config_root,
data_root,
_config_root_Linux,
_config_root_Windows,
_data_root_Linux,
_data_root_Windows,
)
@pytest.mark.skipif(
platform.system() != "Linux", reason="Requires platform.system() == 'Linux'"
)
def test_platform_Linux():
# rely on the Github Actions workflow to run this on different platforms
assert config_root == _config_root_Linux
assert data_root == _data_root_Linux
@pytest.mark.skipif(
platform.system() != "Windows", reason="Requires platform.system() == 'Windows'"
)
def test_platform_Windows():
# rely on the Github Actions workflow to run this on different platforms
assert config_root == _config_root_Windows
assert data_root == _data_root_Windows
| import platform
from keyring.util.platform_ import (
config_root,
data_root,
_config_root_Linux,
_config_root_Windows,
_data_root_Linux,
_data_root_Windows,
)
def test_platform_Linux():
# rely on the Github Actions workflow to run this on different platforms
if platform.system() != "Linux":
return
assert config_root == _config_root_Linux
assert data_root == _data_root_Linux
def test_platform_Windows():
# rely on the Github Actions workflow to run this on different platforms
if platform.system() != "Windows":
return
assert config_root == _config_root_Windows
assert data_root == _data_root_Windows
Use Pytest's skipif decorator instead of returning to skip assert statements.import pytest
import platform
from keyring.util.platform_ import (
config_root,
data_root,
_config_root_Linux,
_config_root_Windows,
_data_root_Linux,
_data_root_Windows,
)
@pytest.mark.skipif(
platform.system() != "Linux", reason="Requires platform.system() == 'Linux'"
)
def test_platform_Linux():
# rely on the Github Actions workflow to run this on different platforms
assert config_root == _config_root_Linux
assert data_root == _data_root_Linux
@pytest.mark.skipif(
platform.system() != "Windows", reason="Requires platform.system() == 'Windows'"
)
def test_platform_Windows():
# rely on the Github Actions workflow to run this on different platforms
assert config_root == _config_root_Windows
assert data_root == _data_root_Windows
| <commit_before>import platform
from keyring.util.platform_ import (
config_root,
data_root,
_config_root_Linux,
_config_root_Windows,
_data_root_Linux,
_data_root_Windows,
)
def test_platform_Linux():
# rely on the Github Actions workflow to run this on different platforms
if platform.system() != "Linux":
return
assert config_root == _config_root_Linux
assert data_root == _data_root_Linux
def test_platform_Windows():
# rely on the Github Actions workflow to run this on different platforms
if platform.system() != "Windows":
return
assert config_root == _config_root_Windows
assert data_root == _data_root_Windows
<commit_msg>Use Pytest's skipif decorator instead of returning to skip assert statements.<commit_after>import pytest
import platform
from keyring.util.platform_ import (
config_root,
data_root,
_config_root_Linux,
_config_root_Windows,
_data_root_Linux,
_data_root_Windows,
)
@pytest.mark.skipif(
platform.system() != "Linux", reason="Requires platform.system() == 'Linux'"
)
def test_platform_Linux():
# rely on the Github Actions workflow to run this on different platforms
assert config_root == _config_root_Linux
assert data_root == _data_root_Linux
@pytest.mark.skipif(
platform.system() != "Windows", reason="Requires platform.system() == 'Windows'"
)
def test_platform_Windows():
# rely on the Github Actions workflow to run this on different platforms
assert config_root == _config_root_Windows
assert data_root == _data_root_Windows
|
ae78135aed5c60c03c247677e9096ce5411b6635 | django_olcc/olcc/templatetags/olcc.py | django_olcc/olcc/templatetags/olcc.py | from django import template
from bs4 import BeautifulSoup
register = template.Library()
@register.tag(name='activehref')
def do_active_href(parser, token):
nodelist = parser.parse(('endactivehref',))
parser.delete_first_token()
return ActiveHref(nodelist)
class ActiveHref(template.Node):
"""
This template tag will set an 'active' class attribute
on any anchor with an href value that matches part of the
current url path.
"""
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
soup = BeautifulSoup(self.nodelist.render(context))
if context.has_key('request'):
path = context.get('request').path
for a in soup.find_all('a'):
href = a['href']
if href == '/':
if path == href:
a['class'] = 'active'
break
else:
if href in path:
a['class'] = 'active'
break
return soup
| from django import template
from bs4 import BeautifulSoup
register = template.Library()
@register.tag(name='activehref')
def do_active_href(parser, token):
nodelist = parser.parse(('endactivehref',))
parser.delete_first_token()
return ActiveHref(nodelist)
class ActiveHref(template.Node):
"""
This template tag will set an 'active' class attribute
on any anchor with an href value that matches part of the
current url path.
Sample template usage:
{% activehref %}
<li><a href="{% url products %}">Products</a></li>
<li><a href="{% url stores %}">Stores</a></li>
<li><a href="{% url about %}">About</a></li>
{% endactivehref %}
"""
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
soup = BeautifulSoup(self.nodelist.render(context))
if context.has_key('request'):
path = context.get('request').path
for a in soup.find_all('a'):
href = a['href']
if href == '/':
if path == href:
a['class'] = 'active'
break
else:
if href in path:
a['class'] = 'active'
break
return soup
| Update docstring for the ActiveHref template tag. | Update docstring for the ActiveHref template tag.
| Python | mit | twaddington/django-olcc,twaddington/django-olcc,twaddington/django-olcc | from django import template
from bs4 import BeautifulSoup
register = template.Library()
@register.tag(name='activehref')
def do_active_href(parser, token):
nodelist = parser.parse(('endactivehref',))
parser.delete_first_token()
return ActiveHref(nodelist)
class ActiveHref(template.Node):
"""
This template tag will set an 'active' class attribute
on any anchor with an href value that matches part of the
current url path.
"""
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
soup = BeautifulSoup(self.nodelist.render(context))
if context.has_key('request'):
path = context.get('request').path
for a in soup.find_all('a'):
href = a['href']
if href == '/':
if path == href:
a['class'] = 'active'
break
else:
if href in path:
a['class'] = 'active'
break
return soup
Update docstring for the ActiveHref template tag. | from django import template
from bs4 import BeautifulSoup
register = template.Library()
@register.tag(name='activehref')
def do_active_href(parser, token):
nodelist = parser.parse(('endactivehref',))
parser.delete_first_token()
return ActiveHref(nodelist)
class ActiveHref(template.Node):
"""
This template tag will set an 'active' class attribute
on any anchor with an href value that matches part of the
current url path.
Sample template usage:
{% activehref %}
<li><a href="{% url products %}">Products</a></li>
<li><a href="{% url stores %}">Stores</a></li>
<li><a href="{% url about %}">About</a></li>
{% endactivehref %}
"""
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
soup = BeautifulSoup(self.nodelist.render(context))
if context.has_key('request'):
path = context.get('request').path
for a in soup.find_all('a'):
href = a['href']
if href == '/':
if path == href:
a['class'] = 'active'
break
else:
if href in path:
a['class'] = 'active'
break
return soup
| <commit_before>from django import template
from bs4 import BeautifulSoup
register = template.Library()
@register.tag(name='activehref')
def do_active_href(parser, token):
nodelist = parser.parse(('endactivehref',))
parser.delete_first_token()
return ActiveHref(nodelist)
class ActiveHref(template.Node):
"""
This template tag will set an 'active' class attribute
on any anchor with an href value that matches part of the
current url path.
"""
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
soup = BeautifulSoup(self.nodelist.render(context))
if context.has_key('request'):
path = context.get('request').path
for a in soup.find_all('a'):
href = a['href']
if href == '/':
if path == href:
a['class'] = 'active'
break
else:
if href in path:
a['class'] = 'active'
break
return soup
<commit_msg>Update docstring for the ActiveHref template tag.<commit_after> | from django import template
from bs4 import BeautifulSoup
register = template.Library()
@register.tag(name='activehref')
def do_active_href(parser, token):
nodelist = parser.parse(('endactivehref',))
parser.delete_first_token()
return ActiveHref(nodelist)
class ActiveHref(template.Node):
"""
This template tag will set an 'active' class attribute
on any anchor with an href value that matches part of the
current url path.
Sample template usage:
{% activehref %}
<li><a href="{% url products %}">Products</a></li>
<li><a href="{% url stores %}">Stores</a></li>
<li><a href="{% url about %}">About</a></li>
{% endactivehref %}
"""
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
soup = BeautifulSoup(self.nodelist.render(context))
if context.has_key('request'):
path = context.get('request').path
for a in soup.find_all('a'):
href = a['href']
if href == '/':
if path == href:
a['class'] = 'active'
break
else:
if href in path:
a['class'] = 'active'
break
return soup
| from django import template
from bs4 import BeautifulSoup
register = template.Library()
@register.tag(name='activehref')
def do_active_href(parser, token):
nodelist = parser.parse(('endactivehref',))
parser.delete_first_token()
return ActiveHref(nodelist)
class ActiveHref(template.Node):
"""
This template tag will set an 'active' class attribute
on any anchor with an href value that matches part of the
current url path.
"""
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
soup = BeautifulSoup(self.nodelist.render(context))
if context.has_key('request'):
path = context.get('request').path
for a in soup.find_all('a'):
href = a['href']
if href == '/':
if path == href:
a['class'] = 'active'
break
else:
if href in path:
a['class'] = 'active'
break
return soup
Update docstring for the ActiveHref template tag.from django import template
from bs4 import BeautifulSoup
register = template.Library()
@register.tag(name='activehref')
def do_active_href(parser, token):
nodelist = parser.parse(('endactivehref',))
parser.delete_first_token()
return ActiveHref(nodelist)
class ActiveHref(template.Node):
"""
This template tag will set an 'active' class attribute
on any anchor with an href value that matches part of the
current url path.
Sample template usage:
{% activehref %}
<li><a href="{% url products %}">Products</a></li>
<li><a href="{% url stores %}">Stores</a></li>
<li><a href="{% url about %}">About</a></li>
{% endactivehref %}
"""
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
soup = BeautifulSoup(self.nodelist.render(context))
if context.has_key('request'):
path = context.get('request').path
for a in soup.find_all('a'):
href = a['href']
if href == '/':
if path == href:
a['class'] = 'active'
break
else:
if href in path:
a['class'] = 'active'
break
return soup
| <commit_before>from django import template
from bs4 import BeautifulSoup
register = template.Library()
@register.tag(name='activehref')
def do_active_href(parser, token):
nodelist = parser.parse(('endactivehref',))
parser.delete_first_token()
return ActiveHref(nodelist)
class ActiveHref(template.Node):
"""
This template tag will set an 'active' class attribute
on any anchor with an href value that matches part of the
current url path.
"""
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
soup = BeautifulSoup(self.nodelist.render(context))
if context.has_key('request'):
path = context.get('request').path
for a in soup.find_all('a'):
href = a['href']
if href == '/':
if path == href:
a['class'] = 'active'
break
else:
if href in path:
a['class'] = 'active'
break
return soup
<commit_msg>Update docstring for the ActiveHref template tag.<commit_after>from django import template
from bs4 import BeautifulSoup
register = template.Library()
@register.tag(name='activehref')
def do_active_href(parser, token):
nodelist = parser.parse(('endactivehref',))
parser.delete_first_token()
return ActiveHref(nodelist)
class ActiveHref(template.Node):
"""
This template tag will set an 'active' class attribute
on any anchor with an href value that matches part of the
current url path.
Sample template usage:
{% activehref %}
<li><a href="{% url products %}">Products</a></li>
<li><a href="{% url stores %}">Stores</a></li>
<li><a href="{% url about %}">About</a></li>
{% endactivehref %}
"""
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
soup = BeautifulSoup(self.nodelist.render(context))
if context.has_key('request'):
path = context.get('request').path
for a in soup.find_all('a'):
href = a['href']
if href == '/':
if path == href:
a['class'] = 'active'
break
else:
if href in path:
a['class'] = 'active'
break
return soup
|
877378bd699b6d56ebc00b4df7d7a83523d470e7 | bugle/templatetags/bugle.py | bugle/templatetags/bugle.py | from django import template
from django.contrib.auth.models import User
from django.utils.safestring import mark_safe
import re
import urllib
register = template.Library()
username_re = re.compile('@[0-9a-zA-Z]+')
hashtag_re = re.compile('#[^\s]+')
@register.filter
def buglise(s):
s = unicode(s)
usernames = set(User.objects.values_list('username', flat=True))
def replace_username(match):
username = match.group(0)[1:]
if username.lower() == 'all':
return '<strong>@all</strong>'
if username in usernames:
return '<a href="/%s/">@%s</a>' % (username, username)
else:
return '@' + username
s = username_re.sub(replace_username, s)
s = hashtag_re.sub(
lambda m: '<a href="/search/?q=%s">%s</a>' % (
urllib.quote(m.group(0)),
m.group(0),
),
s
)
return mark_safe(s)
| from django import template
from django.contrib.auth.models import User
from django.utils.safestring import mark_safe
import re
import urllib
register = template.Library()
username_re = re.compile('@[0-9a-zA-Z]+')
hashtag_re = re.compile('\b#[^\s]+')
@register.filter
def buglise(s):
s = unicode(s)
usernames = set(User.objects.values_list('username', flat=True))
def replace_username(match):
username = match.group(0)[1:]
if username.lower() == 'all':
return '<strong>@all</strong>'
if username in usernames:
return '<a href="/%s/">@%s</a>' % (username, username)
else:
return '@' + username
s = username_re.sub(replace_username, s)
s = hashtag_re.sub(
lambda m: '<a href="/search/?q=%s">%s</a>' % (
urllib.quote(m.group(0)),
m.group(0),
),
s
)
return mark_safe(s)
| Fix hash tag linking entities | Fix hash tag linking entities
| Python | bsd-2-clause | devfort/bugle,devfort/bugle,devfort/bugle,simonw/bugle_project,simonw/bugle_project | from django import template
from django.contrib.auth.models import User
from django.utils.safestring import mark_safe
import re
import urllib
register = template.Library()
username_re = re.compile('@[0-9a-zA-Z]+')
hashtag_re = re.compile('#[^\s]+')
@register.filter
def buglise(s):
s = unicode(s)
usernames = set(User.objects.values_list('username', flat=True))
def replace_username(match):
username = match.group(0)[1:]
if username.lower() == 'all':
return '<strong>@all</strong>'
if username in usernames:
return '<a href="/%s/">@%s</a>' % (username, username)
else:
return '@' + username
s = username_re.sub(replace_username, s)
s = hashtag_re.sub(
lambda m: '<a href="/search/?q=%s">%s</a>' % (
urllib.quote(m.group(0)),
m.group(0),
),
s
)
return mark_safe(s)
Fix hash tag linking entities | from django import template
from django.contrib.auth.models import User
from django.utils.safestring import mark_safe
import re
import urllib
register = template.Library()
username_re = re.compile('@[0-9a-zA-Z]+')
hashtag_re = re.compile('\b#[^\s]+')
@register.filter
def buglise(s):
s = unicode(s)
usernames = set(User.objects.values_list('username', flat=True))
def replace_username(match):
username = match.group(0)[1:]
if username.lower() == 'all':
return '<strong>@all</strong>'
if username in usernames:
return '<a href="/%s/">@%s</a>' % (username, username)
else:
return '@' + username
s = username_re.sub(replace_username, s)
s = hashtag_re.sub(
lambda m: '<a href="/search/?q=%s">%s</a>' % (
urllib.quote(m.group(0)),
m.group(0),
),
s
)
return mark_safe(s)
| <commit_before>from django import template
from django.contrib.auth.models import User
from django.utils.safestring import mark_safe
import re
import urllib
register = template.Library()
username_re = re.compile('@[0-9a-zA-Z]+')
hashtag_re = re.compile('#[^\s]+')
@register.filter
def buglise(s):
s = unicode(s)
usernames = set(User.objects.values_list('username', flat=True))
def replace_username(match):
username = match.group(0)[1:]
if username.lower() == 'all':
return '<strong>@all</strong>'
if username in usernames:
return '<a href="/%s/">@%s</a>' % (username, username)
else:
return '@' + username
s = username_re.sub(replace_username, s)
s = hashtag_re.sub(
lambda m: '<a href="/search/?q=%s">%s</a>' % (
urllib.quote(m.group(0)),
m.group(0),
),
s
)
return mark_safe(s)
<commit_msg>Fix hash tag linking entities<commit_after> | from django import template
from django.contrib.auth.models import User
from django.utils.safestring import mark_safe
import re
import urllib
register = template.Library()
username_re = re.compile('@[0-9a-zA-Z]+')
hashtag_re = re.compile('\b#[^\s]+')
@register.filter
def buglise(s):
s = unicode(s)
usernames = set(User.objects.values_list('username', flat=True))
def replace_username(match):
username = match.group(0)[1:]
if username.lower() == 'all':
return '<strong>@all</strong>'
if username in usernames:
return '<a href="/%s/">@%s</a>' % (username, username)
else:
return '@' + username
s = username_re.sub(replace_username, s)
s = hashtag_re.sub(
lambda m: '<a href="/search/?q=%s">%s</a>' % (
urllib.quote(m.group(0)),
m.group(0),
),
s
)
return mark_safe(s)
| from django import template
from django.contrib.auth.models import User
from django.utils.safestring import mark_safe
import re
import urllib
register = template.Library()
username_re = re.compile('@[0-9a-zA-Z]+')
hashtag_re = re.compile('#[^\s]+')
@register.filter
def buglise(s):
s = unicode(s)
usernames = set(User.objects.values_list('username', flat=True))
def replace_username(match):
username = match.group(0)[1:]
if username.lower() == 'all':
return '<strong>@all</strong>'
if username in usernames:
return '<a href="/%s/">@%s</a>' % (username, username)
else:
return '@' + username
s = username_re.sub(replace_username, s)
s = hashtag_re.sub(
lambda m: '<a href="/search/?q=%s">%s</a>' % (
urllib.quote(m.group(0)),
m.group(0),
),
s
)
return mark_safe(s)
Fix hash tag linking entitiesfrom django import template
from django.contrib.auth.models import User
from django.utils.safestring import mark_safe
import re
import urllib
register = template.Library()
username_re = re.compile('@[0-9a-zA-Z]+')
hashtag_re = re.compile('\b#[^\s]+')
@register.filter
def buglise(s):
s = unicode(s)
usernames = set(User.objects.values_list('username', flat=True))
def replace_username(match):
username = match.group(0)[1:]
if username.lower() == 'all':
return '<strong>@all</strong>'
if username in usernames:
return '<a href="/%s/">@%s</a>' % (username, username)
else:
return '@' + username
s = username_re.sub(replace_username, s)
s = hashtag_re.sub(
lambda m: '<a href="/search/?q=%s">%s</a>' % (
urllib.quote(m.group(0)),
m.group(0),
),
s
)
return mark_safe(s)
| <commit_before>from django import template
from django.contrib.auth.models import User
from django.utils.safestring import mark_safe
import re
import urllib
register = template.Library()
username_re = re.compile('@[0-9a-zA-Z]+')
hashtag_re = re.compile('#[^\s]+')
@register.filter
def buglise(s):
s = unicode(s)
usernames = set(User.objects.values_list('username', flat=True))
def replace_username(match):
username = match.group(0)[1:]
if username.lower() == 'all':
return '<strong>@all</strong>'
if username in usernames:
return '<a href="/%s/">@%s</a>' % (username, username)
else:
return '@' + username
s = username_re.sub(replace_username, s)
s = hashtag_re.sub(
lambda m: '<a href="/search/?q=%s">%s</a>' % (
urllib.quote(m.group(0)),
m.group(0),
),
s
)
return mark_safe(s)
<commit_msg>Fix hash tag linking entities<commit_after>from django import template
from django.contrib.auth.models import User
from django.utils.safestring import mark_safe
import re
import urllib
register = template.Library()
username_re = re.compile('@[0-9a-zA-Z]+')
hashtag_re = re.compile('\b#[^\s]+')
@register.filter
def buglise(s):
s = unicode(s)
usernames = set(User.objects.values_list('username', flat=True))
def replace_username(match):
username = match.group(0)[1:]
if username.lower() == 'all':
return '<strong>@all</strong>'
if username in usernames:
return '<a href="/%s/">@%s</a>' % (username, username)
else:
return '@' + username
s = username_re.sub(replace_username, s)
s = hashtag_re.sub(
lambda m: '<a href="/search/?q=%s">%s</a>' % (
urllib.quote(m.group(0)),
m.group(0),
),
s
)
return mark_safe(s)
|
5107ca17da0764257bc05488457a7a20f59fe76f | calicoctl/calico_ctl/__init__.py | calicoctl/calico_ctl/__init__.py | __version__ = "0.16.0-dev"
__libnetwork_plugin_version__ = "v0.7.0-dev"
__libcalico_version__ = "v0.11.0-dev"
__felix_version__ = "1.3.0a6-dev"
| __version__ = "0.16.1-dev"
__libnetwork_plugin_version__ = "v0.7.0-dev"
__libcalico_version__ = "v0.11.0-dev"
__felix_version__ = "1.3.0a6-dev"
| Update docs to version v0.16.1 | Update docs to version v0.16.1
| Python | apache-2.0 | Metaswitch/calico-docker,caseydavenport/calico-docker,insequent/calico-docker,TrimBiggs/calico-containers,projectcalico/calico-containers,projectcalico/calico-docker,caseydavenport/calico-containers,caseydavenport/calico-containers,projectcalico/calico-containers,projectcalico/calico-docker,Metaswitch/calico-docker,TrimBiggs/calico-docker,caseydavenport/calico-docker,caseydavenport/calico-containers,TrimBiggs/calico-docker,quater/calico-containers,quater/calico-containers,projectcalico/calico-containers,insequent/calico-docker,TrimBiggs/calico-containers | __version__ = "0.16.0-dev"
__libnetwork_plugin_version__ = "v0.7.0-dev"
__libcalico_version__ = "v0.11.0-dev"
__felix_version__ = "1.3.0a6-dev"
Update docs to version v0.16.1 | __version__ = "0.16.1-dev"
__libnetwork_plugin_version__ = "v0.7.0-dev"
__libcalico_version__ = "v0.11.0-dev"
__felix_version__ = "1.3.0a6-dev"
| <commit_before>__version__ = "0.16.0-dev"
__libnetwork_plugin_version__ = "v0.7.0-dev"
__libcalico_version__ = "v0.11.0-dev"
__felix_version__ = "1.3.0a6-dev"
<commit_msg>Update docs to version v0.16.1<commit_after> | __version__ = "0.16.1-dev"
__libnetwork_plugin_version__ = "v0.7.0-dev"
__libcalico_version__ = "v0.11.0-dev"
__felix_version__ = "1.3.0a6-dev"
| __version__ = "0.16.0-dev"
__libnetwork_plugin_version__ = "v0.7.0-dev"
__libcalico_version__ = "v0.11.0-dev"
__felix_version__ = "1.3.0a6-dev"
Update docs to version v0.16.1__version__ = "0.16.1-dev"
__libnetwork_plugin_version__ = "v0.7.0-dev"
__libcalico_version__ = "v0.11.0-dev"
__felix_version__ = "1.3.0a6-dev"
| <commit_before>__version__ = "0.16.0-dev"
__libnetwork_plugin_version__ = "v0.7.0-dev"
__libcalico_version__ = "v0.11.0-dev"
__felix_version__ = "1.3.0a6-dev"
<commit_msg>Update docs to version v0.16.1<commit_after>__version__ = "0.16.1-dev"
__libnetwork_plugin_version__ = "v0.7.0-dev"
__libcalico_version__ = "v0.11.0-dev"
__felix_version__ = "1.3.0a6-dev"
|
1f11ca9f4e4d2560624fd159913d5b35d6bf12e7 | chainer/ya/utils/range_logger.py | chainer/ya/utils/range_logger.py | import logging
class rangelog:
logger = None
startlog = "--> Start: {name}"
endlog = "<-- End:" # noqa
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info(rangelog.startlog.format(name=self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info(rangelog.endlog.format(name=self.name))
| import logging
class rangelog:
logger = None
startmsg = "--> Start: {name}"
endmsg = "<-- End:" # noqa
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
@classmethod
def set_start_msg(cls, msg):
cls.startmsg = msg
@classmethod
def set_end_msg(cls, msg):
cls.endmsg = msg
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info(rangelog.startmsg.format(name=self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info(rangelog.endmsg.format(name=self.name))
| Add feature to set start/end messages | Add feature to set start/end messages
| Python | mit | yasuyuky/chainer-ya-utils | import logging
class rangelog:
logger = None
startlog = "--> Start: {name}"
endlog = "<-- End:" # noqa
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info(rangelog.startlog.format(name=self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info(rangelog.endlog.format(name=self.name))
Add feature to set start/end messages | import logging
class rangelog:
logger = None
startmsg = "--> Start: {name}"
endmsg = "<-- End:" # noqa
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
@classmethod
def set_start_msg(cls, msg):
cls.startmsg = msg
@classmethod
def set_end_msg(cls, msg):
cls.endmsg = msg
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info(rangelog.startmsg.format(name=self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info(rangelog.endmsg.format(name=self.name))
| <commit_before>import logging
class rangelog:
logger = None
startlog = "--> Start: {name}"
endlog = "<-- End:" # noqa
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info(rangelog.startlog.format(name=self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info(rangelog.endlog.format(name=self.name))
<commit_msg>Add feature to set start/end messages<commit_after> | import logging
class rangelog:
logger = None
startmsg = "--> Start: {name}"
endmsg = "<-- End:" # noqa
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
@classmethod
def set_start_msg(cls, msg):
cls.startmsg = msg
@classmethod
def set_end_msg(cls, msg):
cls.endmsg = msg
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info(rangelog.startmsg.format(name=self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info(rangelog.endmsg.format(name=self.name))
| import logging
class rangelog:
logger = None
startlog = "--> Start: {name}"
endlog = "<-- End:" # noqa
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info(rangelog.startlog.format(name=self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info(rangelog.endlog.format(name=self.name))
Add feature to set start/end messagesimport logging
class rangelog:
logger = None
startmsg = "--> Start: {name}"
endmsg = "<-- End:" # noqa
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
@classmethod
def set_start_msg(cls, msg):
cls.startmsg = msg
@classmethod
def set_end_msg(cls, msg):
cls.endmsg = msg
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info(rangelog.startmsg.format(name=self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info(rangelog.endmsg.format(name=self.name))
| <commit_before>import logging
class rangelog:
logger = None
startlog = "--> Start: {name}"
endlog = "<-- End:" # noqa
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info(rangelog.startlog.format(name=self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info(rangelog.endlog.format(name=self.name))
<commit_msg>Add feature to set start/end messages<commit_after>import logging
class rangelog:
logger = None
startmsg = "--> Start: {name}"
endmsg = "<-- End:" # noqa
@classmethod
def set_logger(cls, logger=None):
if logger is None:
cls.logger = logging.getLogger()
cls.logger.setLevel(getattr(logging, 'INFO'))
cls.logger.addHandler(logging.StreamHandler())
elif isinstance(logger, logging.Logger):
cls.logger = logger
@classmethod
def set_start_msg(cls, msg):
cls.startmsg = msg
@classmethod
def set_end_msg(cls, msg):
cls.endmsg = msg
def __init__(self, name):
if rangelog.logger is None:
rangelog.set_logger()
self.name = name
def __enter__(self):
rangelog.logger.info(rangelog.startmsg.format(name=self.name))
return rangelog.logger
def __exit__(self, *args):
rangelog.logger.info(rangelog.endmsg.format(name=self.name))
|
9c45b422e3854551fd1ff3ae1d56ee20d3d6457d | mezzanine/settings/admin.py | mezzanine/settings/admin.py |
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_unicode
from mezzanine.settings.models import Setting
from mezzanine.settings.forms import SettingsForm
class SettingsAdmin(admin.ModelAdmin):
"""
Admin class for settings model. Redirect add/change views to the list
view where a single form is rendered for editing all settings.
"""
def changelist_redirect(self):
app = Setting._meta.app_label
name = Setting.__name__.lower()
changelist_url = reverse("admin:%s_%s_changelist" % (app, name))
return HttpResponseRedirect(changelist_url)
def add_view(self, *args, **kwargs):
return self.changelist_redirect()
def change_view(self, *args, **kwargs):
return self.changelist_redirect()
def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
settings_form = SettingsForm(request.POST or None)
if settings_form.is_valid():
settings_form.save()
extra_context["settings_form"] = settings_form
extra_context["title"] = _("Change %s" %
force_unicode(Setting._meta.verbose_name_plural))
return super(SettingsAdmin, self).changelist_view(request, extra_context)
admin.site.register(Setting, SettingsAdmin)
|
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_unicode
from mezzanine.settings.models import Setting
from mezzanine.settings.forms import SettingsForm
class SettingsAdmin(admin.ModelAdmin):
"""
Admin class for settings model. Redirect add/change views to the list
view where a single form is rendered for editing all settings.
"""
def changelist_redirect(self):
app = Setting._meta.app_label
name = Setting.__name__.lower()
changelist_url = reverse("admin:%s_%s_changelist" % (app, name))
return HttpResponseRedirect(changelist_url)
def add_view(self, *args, **kwargs):
return self.changelist_redirect()
def change_view(self, *args, **kwargs):
return self.changelist_redirect()
def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
settings_form = SettingsForm(request.POST or None)
if settings_form.is_valid():
settings_form.save()
return self.changelist_redirect()
extra_context["settings_form"] = settings_form
extra_context["title"] = _("Change %s" %
force_unicode(Setting._meta.verbose_name_plural))
return super(SettingsAdmin, self).changelist_view(request, extra_context)
admin.site.register(Setting, SettingsAdmin)
| Add a redirect on successful update of settings. | Add a redirect on successful update of settings.
| Python | bsd-2-clause | wyzex/mezzanine,ZeroXn/mezzanine,stephenmcd/mezzanine,tuxinhang1989/mezzanine,jerivas/mezzanine,cccs-web/mezzanine,damnfine/mezzanine,guibernardino/mezzanine,emile2016/mezzanine,gradel/mezzanine,biomassives/mezzanine,wrwrwr/mezzanine,geodesign/mezzanine,PegasusWang/mezzanine,theclanks/mezzanine,dsanders11/mezzanine,wbtuomela/mezzanine,jjz/mezzanine,douglaskastle/mezzanine,PegasusWang/mezzanine,joshcartme/mezzanine,webounty/mezzanine,frankchin/mezzanine,mush42/mezzanine,promil23/mezzanine,nikolas/mezzanine,promil23/mezzanine,stbarnabas/mezzanine,biomassives/mezzanine,viaregio/mezzanine,viaregio/mezzanine,agepoly/mezzanine,wyzex/mezzanine,agepoly/mezzanine,webounty/mezzanine,SoLoHiC/mezzanine,scarcry/snm-mezzanine,agepoly/mezzanine,mush42/mezzanine,cccs-web/mezzanine,Cajoline/mezzanine,jjz/mezzanine,dovydas/mezzanine,gbosh/mezzanine,dekomote/mezzanine-modeltranslation-backport,frankchin/mezzanine,dekomote/mezzanine-modeltranslation-backport,frankchin/mezzanine,nikolas/mezzanine,wyzex/mezzanine,spookylukey/mezzanine,orlenko/sfpirg,gbosh/mezzanine,theclanks/mezzanine,dsanders11/mezzanine,gradel/mezzanine,adrian-the-git/mezzanine,sjuxax/mezzanine,ryneeverett/mezzanine,adrian-the-git/mezzanine,industrydive/mezzanine,dovydas/mezzanine,ryneeverett/mezzanine,molokov/mezzanine,jerivas/mezzanine,theclanks/mezzanine,dustinrb/mezzanine,viaregio/mezzanine,scarcry/snm-mezzanine,wrwrwr/mezzanine,molokov/mezzanine,industrydive/mezzanine,guibernardino/mezzanine,batpad/mezzanine,ZeroXn/mezzanine,wbtuomela/mezzanine,geodesign/mezzanine,saintbird/mezzanine,sjuxax/mezzanine,sjdines/mezzanine,stephenmcd/mezzanine,jerivas/mezzanine,vladir/mezzanine,christianwgd/mezzanine,emile2016/mezzanine,Skytorn86/mezzanine,promil23/mezzanine,fusionbox/mezzanine,readevalprint/mezzanine,Cicero-Zhao/mezzanine,vladir/mezzanine,ryneeverett/mezzanine,dsanders11/mezzanine,wbtuomela/mezzanine,AlexHill/mezzanine,damnfine/mezzanine,nikolas/mezzanine,Cajoline/mezzanine,spookylukey/mezzanine,orlenko/sfpirg,saintbird/mezzanine,sjuxax/mezzanine,orlenko/plei,dustinrb/mezzanine,dekomote/mezzanine-modeltranslation-backport,jjz/mezzanine,stbarnabas/mezzanine,frankier/mezzanine,readevalprint/mezzanine,Cajoline/mezzanine,eino-makitalo/mezzanine,Kniyl/mezzanine,tuxinhang1989/mezzanine,eino-makitalo/mezzanine,tuxinhang1989/mezzanine,christianwgd/mezzanine,scarcry/snm-mezzanine,frankier/mezzanine,Skytorn86/mezzanine,PegasusWang/mezzanine,frankier/mezzanine,joshcartme/mezzanine,gbosh/mezzanine,emile2016/mezzanine,damnfine/mezzanine,Cicero-Zhao/mezzanine,Skytorn86/mezzanine,sjdines/mezzanine,sjdines/mezzanine,SoLoHiC/mezzanine,fusionbox/mezzanine,stephenmcd/mezzanine,dustinrb/mezzanine,orlenko/plei,eino-makitalo/mezzanine,geodesign/mezzanine,dovydas/mezzanine,vladir/mezzanine,saintbird/mezzanine,douglaskastle/mezzanine,readevalprint/mezzanine,ZeroXn/mezzanine,AlexHill/mezzanine,industrydive/mezzanine,batpad/mezzanine,orlenko/plei,mush42/mezzanine,biomassives/mezzanine,douglaskastle/mezzanine,adrian-the-git/mezzanine,SoLoHiC/mezzanine,christianwgd/mezzanine,gradel/mezzanine,Kniyl/mezzanine,spookylukey/mezzanine,joshcartme/mezzanine,molokov/mezzanine,webounty/mezzanine,orlenko/sfpirg,Kniyl/mezzanine |
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_unicode
from mezzanine.settings.models import Setting
from mezzanine.settings.forms import SettingsForm
class SettingsAdmin(admin.ModelAdmin):
"""
Admin class for settings model. Redirect add/change views to the list
view where a single form is rendered for editing all settings.
"""
def changelist_redirect(self):
app = Setting._meta.app_label
name = Setting.__name__.lower()
changelist_url = reverse("admin:%s_%s_changelist" % (app, name))
return HttpResponseRedirect(changelist_url)
def add_view(self, *args, **kwargs):
return self.changelist_redirect()
def change_view(self, *args, **kwargs):
return self.changelist_redirect()
def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
settings_form = SettingsForm(request.POST or None)
if settings_form.is_valid():
settings_form.save()
extra_context["settings_form"] = settings_form
extra_context["title"] = _("Change %s" %
force_unicode(Setting._meta.verbose_name_plural))
return super(SettingsAdmin, self).changelist_view(request, extra_context)
admin.site.register(Setting, SettingsAdmin)
Add a redirect on successful update of settings. |
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_unicode
from mezzanine.settings.models import Setting
from mezzanine.settings.forms import SettingsForm
class SettingsAdmin(admin.ModelAdmin):
"""
Admin class for settings model. Redirect add/change views to the list
view where a single form is rendered for editing all settings.
"""
def changelist_redirect(self):
app = Setting._meta.app_label
name = Setting.__name__.lower()
changelist_url = reverse("admin:%s_%s_changelist" % (app, name))
return HttpResponseRedirect(changelist_url)
def add_view(self, *args, **kwargs):
return self.changelist_redirect()
def change_view(self, *args, **kwargs):
return self.changelist_redirect()
def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
settings_form = SettingsForm(request.POST or None)
if settings_form.is_valid():
settings_form.save()
return self.changelist_redirect()
extra_context["settings_form"] = settings_form
extra_context["title"] = _("Change %s" %
force_unicode(Setting._meta.verbose_name_plural))
return super(SettingsAdmin, self).changelist_view(request, extra_context)
admin.site.register(Setting, SettingsAdmin)
| <commit_before>
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_unicode
from mezzanine.settings.models import Setting
from mezzanine.settings.forms import SettingsForm
class SettingsAdmin(admin.ModelAdmin):
"""
Admin class for settings model. Redirect add/change views to the list
view where a single form is rendered for editing all settings.
"""
def changelist_redirect(self):
app = Setting._meta.app_label
name = Setting.__name__.lower()
changelist_url = reverse("admin:%s_%s_changelist" % (app, name))
return HttpResponseRedirect(changelist_url)
def add_view(self, *args, **kwargs):
return self.changelist_redirect()
def change_view(self, *args, **kwargs):
return self.changelist_redirect()
def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
settings_form = SettingsForm(request.POST or None)
if settings_form.is_valid():
settings_form.save()
extra_context["settings_form"] = settings_form
extra_context["title"] = _("Change %s" %
force_unicode(Setting._meta.verbose_name_plural))
return super(SettingsAdmin, self).changelist_view(request, extra_context)
admin.site.register(Setting, SettingsAdmin)
<commit_msg>Add a redirect on successful update of settings.<commit_after> |
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_unicode
from mezzanine.settings.models import Setting
from mezzanine.settings.forms import SettingsForm
class SettingsAdmin(admin.ModelAdmin):
"""
Admin class for settings model. Redirect add/change views to the list
view where a single form is rendered for editing all settings.
"""
def changelist_redirect(self):
app = Setting._meta.app_label
name = Setting.__name__.lower()
changelist_url = reverse("admin:%s_%s_changelist" % (app, name))
return HttpResponseRedirect(changelist_url)
def add_view(self, *args, **kwargs):
return self.changelist_redirect()
def change_view(self, *args, **kwargs):
return self.changelist_redirect()
def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
settings_form = SettingsForm(request.POST or None)
if settings_form.is_valid():
settings_form.save()
return self.changelist_redirect()
extra_context["settings_form"] = settings_form
extra_context["title"] = _("Change %s" %
force_unicode(Setting._meta.verbose_name_plural))
return super(SettingsAdmin, self).changelist_view(request, extra_context)
admin.site.register(Setting, SettingsAdmin)
|
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_unicode
from mezzanine.settings.models import Setting
from mezzanine.settings.forms import SettingsForm
class SettingsAdmin(admin.ModelAdmin):
"""
Admin class for settings model. Redirect add/change views to the list
view where a single form is rendered for editing all settings.
"""
def changelist_redirect(self):
app = Setting._meta.app_label
name = Setting.__name__.lower()
changelist_url = reverse("admin:%s_%s_changelist" % (app, name))
return HttpResponseRedirect(changelist_url)
def add_view(self, *args, **kwargs):
return self.changelist_redirect()
def change_view(self, *args, **kwargs):
return self.changelist_redirect()
def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
settings_form = SettingsForm(request.POST or None)
if settings_form.is_valid():
settings_form.save()
extra_context["settings_form"] = settings_form
extra_context["title"] = _("Change %s" %
force_unicode(Setting._meta.verbose_name_plural))
return super(SettingsAdmin, self).changelist_view(request, extra_context)
admin.site.register(Setting, SettingsAdmin)
Add a redirect on successful update of settings.
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_unicode
from mezzanine.settings.models import Setting
from mezzanine.settings.forms import SettingsForm
class SettingsAdmin(admin.ModelAdmin):
"""
Admin class for settings model. Redirect add/change views to the list
view where a single form is rendered for editing all settings.
"""
def changelist_redirect(self):
app = Setting._meta.app_label
name = Setting.__name__.lower()
changelist_url = reverse("admin:%s_%s_changelist" % (app, name))
return HttpResponseRedirect(changelist_url)
def add_view(self, *args, **kwargs):
return self.changelist_redirect()
def change_view(self, *args, **kwargs):
return self.changelist_redirect()
def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
settings_form = SettingsForm(request.POST or None)
if settings_form.is_valid():
settings_form.save()
return self.changelist_redirect()
extra_context["settings_form"] = settings_form
extra_context["title"] = _("Change %s" %
force_unicode(Setting._meta.verbose_name_plural))
return super(SettingsAdmin, self).changelist_view(request, extra_context)
admin.site.register(Setting, SettingsAdmin)
| <commit_before>
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_unicode
from mezzanine.settings.models import Setting
from mezzanine.settings.forms import SettingsForm
class SettingsAdmin(admin.ModelAdmin):
"""
Admin class for settings model. Redirect add/change views to the list
view where a single form is rendered for editing all settings.
"""
def changelist_redirect(self):
app = Setting._meta.app_label
name = Setting.__name__.lower()
changelist_url = reverse("admin:%s_%s_changelist" % (app, name))
return HttpResponseRedirect(changelist_url)
def add_view(self, *args, **kwargs):
return self.changelist_redirect()
def change_view(self, *args, **kwargs):
return self.changelist_redirect()
def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
settings_form = SettingsForm(request.POST or None)
if settings_form.is_valid():
settings_form.save()
extra_context["settings_form"] = settings_form
extra_context["title"] = _("Change %s" %
force_unicode(Setting._meta.verbose_name_plural))
return super(SettingsAdmin, self).changelist_view(request, extra_context)
admin.site.register(Setting, SettingsAdmin)
<commit_msg>Add a redirect on successful update of settings.<commit_after>
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_unicode
from mezzanine.settings.models import Setting
from mezzanine.settings.forms import SettingsForm
class SettingsAdmin(admin.ModelAdmin):
"""
Admin class for settings model. Redirect add/change views to the list
view where a single form is rendered for editing all settings.
"""
def changelist_redirect(self):
app = Setting._meta.app_label
name = Setting.__name__.lower()
changelist_url = reverse("admin:%s_%s_changelist" % (app, name))
return HttpResponseRedirect(changelist_url)
def add_view(self, *args, **kwargs):
return self.changelist_redirect()
def change_view(self, *args, **kwargs):
return self.changelist_redirect()
def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
settings_form = SettingsForm(request.POST or None)
if settings_form.is_valid():
settings_form.save()
return self.changelist_redirect()
extra_context["settings_form"] = settings_form
extra_context["title"] = _("Change %s" %
force_unicode(Setting._meta.verbose_name_plural))
return super(SettingsAdmin, self).changelist_view(request, extra_context)
admin.site.register(Setting, SettingsAdmin)
|
210b24b1e04106745b5680d099f31f3a354446e8 | test/599-whitewater.py | test/599-whitewater.py | # node 2356117215
assert_has_feature(
16, 34003, 23060, 'pois',
{ 'kind': 'put_in' })
# way 308154534
assert_has_feature(
13, 2448, 2992, 'roads',
{ 'kind': 'portage_way' })
| # node 3134398100
assert_has_feature(
16, 19591, 23939, 'pois',
{ 'kind': 'put_in_egress' })
# way 308154534
assert_has_feature(
13, 2448, 2992, 'roads',
{ 'kind': 'portage_way' })
| Use test feature from North America | Use test feature from North America
| Python | mit | mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource | # node 2356117215
assert_has_feature(
16, 34003, 23060, 'pois',
{ 'kind': 'put_in' })
# way 308154534
assert_has_feature(
13, 2448, 2992, 'roads',
{ 'kind': 'portage_way' })
Use test feature from North America | # node 3134398100
assert_has_feature(
16, 19591, 23939, 'pois',
{ 'kind': 'put_in_egress' })
# way 308154534
assert_has_feature(
13, 2448, 2992, 'roads',
{ 'kind': 'portage_way' })
| <commit_before># node 2356117215
assert_has_feature(
16, 34003, 23060, 'pois',
{ 'kind': 'put_in' })
# way 308154534
assert_has_feature(
13, 2448, 2992, 'roads',
{ 'kind': 'portage_way' })
<commit_msg>Use test feature from North America<commit_after> | # node 3134398100
assert_has_feature(
16, 19591, 23939, 'pois',
{ 'kind': 'put_in_egress' })
# way 308154534
assert_has_feature(
13, 2448, 2992, 'roads',
{ 'kind': 'portage_way' })
| # node 2356117215
assert_has_feature(
16, 34003, 23060, 'pois',
{ 'kind': 'put_in' })
# way 308154534
assert_has_feature(
13, 2448, 2992, 'roads',
{ 'kind': 'portage_way' })
Use test feature from North America# node 3134398100
assert_has_feature(
16, 19591, 23939, 'pois',
{ 'kind': 'put_in_egress' })
# way 308154534
assert_has_feature(
13, 2448, 2992, 'roads',
{ 'kind': 'portage_way' })
| <commit_before># node 2356117215
assert_has_feature(
16, 34003, 23060, 'pois',
{ 'kind': 'put_in' })
# way 308154534
assert_has_feature(
13, 2448, 2992, 'roads',
{ 'kind': 'portage_way' })
<commit_msg>Use test feature from North America<commit_after># node 3134398100
assert_has_feature(
16, 19591, 23939, 'pois',
{ 'kind': 'put_in_egress' })
# way 308154534
assert_has_feature(
13, 2448, 2992, 'roads',
{ 'kind': 'portage_way' })
|
3c0bc3c382b3abe693b3871ee0c3d40723cb8f28 | comics/crawler/crawlers/bunny.py | comics/crawler/crawlers/bunny.py | from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Bunny'
language = 'en'
url = 'http://bunny-comic.com/'
start_date = '2004-08-22'
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -8
rights = 'H. Davies, CC BY-NC-SA'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.bunny-comic.com/rss/bunny.xml')
for entry in self.feed.entries:
title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
url = pieces[i + 1]
break
image_name = url.replace('http://bunny-comic.com/strips/', '')
if self.pub_date == self.string_to_date(image_name[:6], '%d%m%y'):
self.title = title
self.url = url
return
| from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Bunny'
language = 'en'
url = 'http://bunny-comic.com/'
start_date = '2004-08-22'
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -8
rights = 'H. Davies, CC BY-NC-SA'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.bunny-comic.com/rss/bunny.xml')
for entry in self.feed.entries:
title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
url = pieces[i + 1]
break
image_name = url.replace('http://bunny-comic.com/strips/', '')
if (image_name[:6].isdigit()
and self.pub_date == self.string_to_date(
image_name[:6], '%d%m%y')):
self.title = title
self.url = url
return
| Fix Bunny crawler crash on non-date image names | Fix Bunny crawler crash on non-date image names
| Python | agpl-3.0 | jodal/comics,klette/comics,datagutten/comics,datagutten/comics,klette/comics,klette/comics,jodal/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics | from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Bunny'
language = 'en'
url = 'http://bunny-comic.com/'
start_date = '2004-08-22'
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -8
rights = 'H. Davies, CC BY-NC-SA'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.bunny-comic.com/rss/bunny.xml')
for entry in self.feed.entries:
title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
url = pieces[i + 1]
break
image_name = url.replace('http://bunny-comic.com/strips/', '')
if self.pub_date == self.string_to_date(image_name[:6], '%d%m%y'):
self.title = title
self.url = url
return
Fix Bunny crawler crash on non-date image names | from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Bunny'
language = 'en'
url = 'http://bunny-comic.com/'
start_date = '2004-08-22'
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -8
rights = 'H. Davies, CC BY-NC-SA'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.bunny-comic.com/rss/bunny.xml')
for entry in self.feed.entries:
title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
url = pieces[i + 1]
break
image_name = url.replace('http://bunny-comic.com/strips/', '')
if (image_name[:6].isdigit()
and self.pub_date == self.string_to_date(
image_name[:6], '%d%m%y')):
self.title = title
self.url = url
return
| <commit_before>from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Bunny'
language = 'en'
url = 'http://bunny-comic.com/'
start_date = '2004-08-22'
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -8
rights = 'H. Davies, CC BY-NC-SA'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.bunny-comic.com/rss/bunny.xml')
for entry in self.feed.entries:
title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
url = pieces[i + 1]
break
image_name = url.replace('http://bunny-comic.com/strips/', '')
if self.pub_date == self.string_to_date(image_name[:6], '%d%m%y'):
self.title = title
self.url = url
return
<commit_msg>Fix Bunny crawler crash on non-date image names<commit_after> | from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Bunny'
language = 'en'
url = 'http://bunny-comic.com/'
start_date = '2004-08-22'
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -8
rights = 'H. Davies, CC BY-NC-SA'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.bunny-comic.com/rss/bunny.xml')
for entry in self.feed.entries:
title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
url = pieces[i + 1]
break
image_name = url.replace('http://bunny-comic.com/strips/', '')
if (image_name[:6].isdigit()
and self.pub_date == self.string_to_date(
image_name[:6], '%d%m%y')):
self.title = title
self.url = url
return
| from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Bunny'
language = 'en'
url = 'http://bunny-comic.com/'
start_date = '2004-08-22'
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -8
rights = 'H. Davies, CC BY-NC-SA'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.bunny-comic.com/rss/bunny.xml')
for entry in self.feed.entries:
title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
url = pieces[i + 1]
break
image_name = url.replace('http://bunny-comic.com/strips/', '')
if self.pub_date == self.string_to_date(image_name[:6], '%d%m%y'):
self.title = title
self.url = url
return
Fix Bunny crawler crash on non-date image namesfrom comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Bunny'
language = 'en'
url = 'http://bunny-comic.com/'
start_date = '2004-08-22'
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -8
rights = 'H. Davies, CC BY-NC-SA'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.bunny-comic.com/rss/bunny.xml')
for entry in self.feed.entries:
title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
url = pieces[i + 1]
break
image_name = url.replace('http://bunny-comic.com/strips/', '')
if (image_name[:6].isdigit()
and self.pub_date == self.string_to_date(
image_name[:6], '%d%m%y')):
self.title = title
self.url = url
return
| <commit_before>from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Bunny'
language = 'en'
url = 'http://bunny-comic.com/'
start_date = '2004-08-22'
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -8
rights = 'H. Davies, CC BY-NC-SA'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.bunny-comic.com/rss/bunny.xml')
for entry in self.feed.entries:
title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
url = pieces[i + 1]
break
image_name = url.replace('http://bunny-comic.com/strips/', '')
if self.pub_date == self.string_to_date(image_name[:6], '%d%m%y'):
self.title = title
self.url = url
return
<commit_msg>Fix Bunny crawler crash on non-date image names<commit_after>from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Bunny'
language = 'en'
url = 'http://bunny-comic.com/'
start_date = '2004-08-22'
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -8
rights = 'H. Davies, CC BY-NC-SA'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.bunny-comic.com/rss/bunny.xml')
for entry in self.feed.entries:
title = entry.title
pieces = entry.summary.split('"')
for i, piece in enumerate(pieces):
if piece.count('src='):
url = pieces[i + 1]
break
image_name = url.replace('http://bunny-comic.com/strips/', '')
if (image_name[:6].isdigit()
and self.pub_date == self.string_to_date(
image_name[:6], '%d%m%y')):
self.title = title
self.url = url
return
|
11efbbfb310e584ef4af01bf7d662216a3356d22 | wagtail/search/backends/database/sqlite/utils.py | wagtail/search/backends/database/sqlite/utils.py | import sqlite3
from django.db import OperationalError
def fts5_available():
# based on https://stackoverflow.com/a/36656216/1853523
if sqlite3.sqlite_version_info < (3, 19, 0):
# Prior to version 3.19, SQLite doesn't support FTS5 queries with
# column filters ('{column_1 column_2} : query'), which the sqlite
# fulltext backend needs
return False
tmp_db = sqlite3.connect(':memory:')
try:
tmp_db.execute('CREATE VIRTUAL TABLE fts5test USING fts5 (data);')
except OperationalError:
return False
finally:
tmp_db.close()
return True
def fts_table_exists():
from wagtail.search.models import SQLiteFTSIndexEntry
try:
# ignore result of query; we are only interested in the query failing,
# not the presence of index entries
SQLiteFTSIndexEntry.objects.exists()
except OperationalError:
return False
return True
| import sqlite3
from django.db import OperationalError
def fts5_available():
# based on https://stackoverflow.com/a/36656216/1853523
if sqlite3.sqlite_version_info < (3, 19, 0):
# Prior to version 3.19, SQLite doesn't support FTS5 queries with
# column filters ('{column_1 column_2} : query'), which the sqlite
# fulltext backend needs
return False
tmp_db = sqlite3.connect(':memory:')
try:
tmp_db.execute('CREATE VIRTUAL TABLE fts5test USING fts5 (data);')
except sqlite3.OperationalError:
return False
finally:
tmp_db.close()
return True
def fts_table_exists():
from wagtail.search.models import SQLiteFTSIndexEntry
try:
# ignore result of query; we are only interested in the query failing,
# not the presence of index entries
SQLiteFTSIndexEntry.objects.exists()
except OperationalError:
return False
return True
| Fix Sqlite FTS5 compatibility check | Fix Sqlite FTS5 compatibility check
As per https://github.com/wagtail/wagtail/issues/7798#issuecomment-1021544265 - the direct query against the sqlite3 library will fail with sqlite3.OperationalError, not django.db.OperationalError.
| Python | bsd-3-clause | torchbox/wagtail,torchbox/wagtail,torchbox/wagtail,torchbox/wagtail | import sqlite3
from django.db import OperationalError
def fts5_available():
# based on https://stackoverflow.com/a/36656216/1853523
if sqlite3.sqlite_version_info < (3, 19, 0):
# Prior to version 3.19, SQLite doesn't support FTS5 queries with
# column filters ('{column_1 column_2} : query'), which the sqlite
# fulltext backend needs
return False
tmp_db = sqlite3.connect(':memory:')
try:
tmp_db.execute('CREATE VIRTUAL TABLE fts5test USING fts5 (data);')
except OperationalError:
return False
finally:
tmp_db.close()
return True
def fts_table_exists():
from wagtail.search.models import SQLiteFTSIndexEntry
try:
# ignore result of query; we are only interested in the query failing,
# not the presence of index entries
SQLiteFTSIndexEntry.objects.exists()
except OperationalError:
return False
return True
Fix Sqlite FTS5 compatibility check
As per https://github.com/wagtail/wagtail/issues/7798#issuecomment-1021544265 - the direct query against the sqlite3 library will fail with sqlite3.OperationalError, not django.db.OperationalError. | import sqlite3
from django.db import OperationalError
def fts5_available():
# based on https://stackoverflow.com/a/36656216/1853523
if sqlite3.sqlite_version_info < (3, 19, 0):
# Prior to version 3.19, SQLite doesn't support FTS5 queries with
# column filters ('{column_1 column_2} : query'), which the sqlite
# fulltext backend needs
return False
tmp_db = sqlite3.connect(':memory:')
try:
tmp_db.execute('CREATE VIRTUAL TABLE fts5test USING fts5 (data);')
except sqlite3.OperationalError:
return False
finally:
tmp_db.close()
return True
def fts_table_exists():
from wagtail.search.models import SQLiteFTSIndexEntry
try:
# ignore result of query; we are only interested in the query failing,
# not the presence of index entries
SQLiteFTSIndexEntry.objects.exists()
except OperationalError:
return False
return True
| <commit_before>import sqlite3
from django.db import OperationalError
def fts5_available():
# based on https://stackoverflow.com/a/36656216/1853523
if sqlite3.sqlite_version_info < (3, 19, 0):
# Prior to version 3.19, SQLite doesn't support FTS5 queries with
# column filters ('{column_1 column_2} : query'), which the sqlite
# fulltext backend needs
return False
tmp_db = sqlite3.connect(':memory:')
try:
tmp_db.execute('CREATE VIRTUAL TABLE fts5test USING fts5 (data);')
except OperationalError:
return False
finally:
tmp_db.close()
return True
def fts_table_exists():
from wagtail.search.models import SQLiteFTSIndexEntry
try:
# ignore result of query; we are only interested in the query failing,
# not the presence of index entries
SQLiteFTSIndexEntry.objects.exists()
except OperationalError:
return False
return True
<commit_msg>Fix Sqlite FTS5 compatibility check
As per https://github.com/wagtail/wagtail/issues/7798#issuecomment-1021544265 - the direct query against the sqlite3 library will fail with sqlite3.OperationalError, not django.db.OperationalError.<commit_after> | import sqlite3
from django.db import OperationalError
def fts5_available():
# based on https://stackoverflow.com/a/36656216/1853523
if sqlite3.sqlite_version_info < (3, 19, 0):
# Prior to version 3.19, SQLite doesn't support FTS5 queries with
# column filters ('{column_1 column_2} : query'), which the sqlite
# fulltext backend needs
return False
tmp_db = sqlite3.connect(':memory:')
try:
tmp_db.execute('CREATE VIRTUAL TABLE fts5test USING fts5 (data);')
except sqlite3.OperationalError:
return False
finally:
tmp_db.close()
return True
def fts_table_exists():
from wagtail.search.models import SQLiteFTSIndexEntry
try:
# ignore result of query; we are only interested in the query failing,
# not the presence of index entries
SQLiteFTSIndexEntry.objects.exists()
except OperationalError:
return False
return True
| import sqlite3
from django.db import OperationalError
def fts5_available():
# based on https://stackoverflow.com/a/36656216/1853523
if sqlite3.sqlite_version_info < (3, 19, 0):
# Prior to version 3.19, SQLite doesn't support FTS5 queries with
# column filters ('{column_1 column_2} : query'), which the sqlite
# fulltext backend needs
return False
tmp_db = sqlite3.connect(':memory:')
try:
tmp_db.execute('CREATE VIRTUAL TABLE fts5test USING fts5 (data);')
except OperationalError:
return False
finally:
tmp_db.close()
return True
def fts_table_exists():
from wagtail.search.models import SQLiteFTSIndexEntry
try:
# ignore result of query; we are only interested in the query failing,
# not the presence of index entries
SQLiteFTSIndexEntry.objects.exists()
except OperationalError:
return False
return True
Fix Sqlite FTS5 compatibility check
As per https://github.com/wagtail/wagtail/issues/7798#issuecomment-1021544265 - the direct query against the sqlite3 library will fail with sqlite3.OperationalError, not django.db.OperationalError.import sqlite3
from django.db import OperationalError
def fts5_available():
# based on https://stackoverflow.com/a/36656216/1853523
if sqlite3.sqlite_version_info < (3, 19, 0):
# Prior to version 3.19, SQLite doesn't support FTS5 queries with
# column filters ('{column_1 column_2} : query'), which the sqlite
# fulltext backend needs
return False
tmp_db = sqlite3.connect(':memory:')
try:
tmp_db.execute('CREATE VIRTUAL TABLE fts5test USING fts5 (data);')
except sqlite3.OperationalError:
return False
finally:
tmp_db.close()
return True
def fts_table_exists():
from wagtail.search.models import SQLiteFTSIndexEntry
try:
# ignore result of query; we are only interested in the query failing,
# not the presence of index entries
SQLiteFTSIndexEntry.objects.exists()
except OperationalError:
return False
return True
| <commit_before>import sqlite3
from django.db import OperationalError
def fts5_available():
# based on https://stackoverflow.com/a/36656216/1853523
if sqlite3.sqlite_version_info < (3, 19, 0):
# Prior to version 3.19, SQLite doesn't support FTS5 queries with
# column filters ('{column_1 column_2} : query'), which the sqlite
# fulltext backend needs
return False
tmp_db = sqlite3.connect(':memory:')
try:
tmp_db.execute('CREATE VIRTUAL TABLE fts5test USING fts5 (data);')
except OperationalError:
return False
finally:
tmp_db.close()
return True
def fts_table_exists():
from wagtail.search.models import SQLiteFTSIndexEntry
try:
# ignore result of query; we are only interested in the query failing,
# not the presence of index entries
SQLiteFTSIndexEntry.objects.exists()
except OperationalError:
return False
return True
<commit_msg>Fix Sqlite FTS5 compatibility check
As per https://github.com/wagtail/wagtail/issues/7798#issuecomment-1021544265 - the direct query against the sqlite3 library will fail with sqlite3.OperationalError, not django.db.OperationalError.<commit_after>import sqlite3
from django.db import OperationalError
def fts5_available():
# based on https://stackoverflow.com/a/36656216/1853523
if sqlite3.sqlite_version_info < (3, 19, 0):
# Prior to version 3.19, SQLite doesn't support FTS5 queries with
# column filters ('{column_1 column_2} : query'), which the sqlite
# fulltext backend needs
return False
tmp_db = sqlite3.connect(':memory:')
try:
tmp_db.execute('CREATE VIRTUAL TABLE fts5test USING fts5 (data);')
except sqlite3.OperationalError:
return False
finally:
tmp_db.close()
return True
def fts_table_exists():
from wagtail.search.models import SQLiteFTSIndexEntry
try:
# ignore result of query; we are only interested in the query failing,
# not the presence of index entries
SQLiteFTSIndexEntry.objects.exists()
except OperationalError:
return False
return True
|
930ad35cee818e2d0b97f840ff0b3b772bd51af3 | post_office/management/commands/send_queued_mail.py | post_office/management/commands/send_queued_mail.py | import tempfile
import sys
from optparse import make_option
from django.core.management.base import BaseCommand
from ...lockfile import FileLock, FileLocked
from ...mail import send_queued
from ...logutils import setup_loghandlers
logger = setup_loghandlers()
default_lockfile = tempfile.gettempdir() + "/post_office"
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-p', '--processes', type='int',
help='Number of processes used to send emails', default=1),
make_option('-L', '--lockfile', type='string', default=default_lockfile,
help='Absolute path of lockfile to acquire'),
make_option('-l', '--log-level', type='int',
help='"0" to log nothing, "1" to only log errors'),
)
def handle(self, *args, **options):
logger.info('Acquiring lock for sending queued emails at %s.lock' %
options['lockfile'])
try:
with FileLock(options['lockfile']):
try:
send_queued(options['processes'], options.get('log_level'))
except Exception as e:
logger.error(e, exc_info=sys.exc_info(), extra={'status_code': 500})
raise
except FileLocked:
logger.info('Failed to acquire lock, terminating now.')
| import tempfile
import sys
from django.core.management.base import BaseCommand
from ...lockfile import FileLock, FileLocked
from ...mail import send_queued
from ...logutils import setup_loghandlers
logger = setup_loghandlers()
default_lockfile = tempfile.gettempdir() + "/post_office"
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-p', '--processes',
type='int',
help='Number of processes used to send emails',
default=1
)
parser.add_argument('-L', '--lockfile',
type='string',
default=default_lockfile,
help='Absolute path of lockfile to acquire'
)
parser.add_argument('-l', '--log-level',
type='int',
help='"0" to log nothing, "1" to only log errors'
)
def handle(self, *args, **options):
logger.info('Acquiring lock for sending queued emails at %s.lock' %
options['lockfile'])
try:
with FileLock(options['lockfile']):
try:
send_queued(options['processes'], options.get('log_level'))
except Exception as e:
logger.error(e, exc_info=sys.exc_info(), extra={'status_code': 500})
raise
except FileLocked:
logger.info('Failed to acquire lock, terminating now.')
| Switch to using the `add_arguments` method. | Switch to using the `add_arguments` method.
This is an alternative to using the `option_list` and
`optparse.make_option`. Django deprecated the use of `optparse` in
management commands in Django 1.8 and removed it in Django 1.10.
| Python | mit | jrief/django-post_office,ui/django-post_office,ui/django-post_office | import tempfile
import sys
from optparse import make_option
from django.core.management.base import BaseCommand
from ...lockfile import FileLock, FileLocked
from ...mail import send_queued
from ...logutils import setup_loghandlers
logger = setup_loghandlers()
default_lockfile = tempfile.gettempdir() + "/post_office"
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-p', '--processes', type='int',
help='Number of processes used to send emails', default=1),
make_option('-L', '--lockfile', type='string', default=default_lockfile,
help='Absolute path of lockfile to acquire'),
make_option('-l', '--log-level', type='int',
help='"0" to log nothing, "1" to only log errors'),
)
def handle(self, *args, **options):
logger.info('Acquiring lock for sending queued emails at %s.lock' %
options['lockfile'])
try:
with FileLock(options['lockfile']):
try:
send_queued(options['processes'], options.get('log_level'))
except Exception as e:
logger.error(e, exc_info=sys.exc_info(), extra={'status_code': 500})
raise
except FileLocked:
logger.info('Failed to acquire lock, terminating now.')
Switch to using the `add_arguments` method.
This is an alternative to using the `option_list` and
`optparse.make_option`. Django deprecated the use of `optparse` in
management commands in Django 1.8 and removed it in Django 1.10. | import tempfile
import sys
from django.core.management.base import BaseCommand
from ...lockfile import FileLock, FileLocked
from ...mail import send_queued
from ...logutils import setup_loghandlers
logger = setup_loghandlers()
default_lockfile = tempfile.gettempdir() + "/post_office"
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-p', '--processes',
type='int',
help='Number of processes used to send emails',
default=1
)
parser.add_argument('-L', '--lockfile',
type='string',
default=default_lockfile,
help='Absolute path of lockfile to acquire'
)
parser.add_argument('-l', '--log-level',
type='int',
help='"0" to log nothing, "1" to only log errors'
)
def handle(self, *args, **options):
logger.info('Acquiring lock for sending queued emails at %s.lock' %
options['lockfile'])
try:
with FileLock(options['lockfile']):
try:
send_queued(options['processes'], options.get('log_level'))
except Exception as e:
logger.error(e, exc_info=sys.exc_info(), extra={'status_code': 500})
raise
except FileLocked:
logger.info('Failed to acquire lock, terminating now.')
| <commit_before>import tempfile
import sys
from optparse import make_option
from django.core.management.base import BaseCommand
from ...lockfile import FileLock, FileLocked
from ...mail import send_queued
from ...logutils import setup_loghandlers
logger = setup_loghandlers()
default_lockfile = tempfile.gettempdir() + "/post_office"
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-p', '--processes', type='int',
help='Number of processes used to send emails', default=1),
make_option('-L', '--lockfile', type='string', default=default_lockfile,
help='Absolute path of lockfile to acquire'),
make_option('-l', '--log-level', type='int',
help='"0" to log nothing, "1" to only log errors'),
)
def handle(self, *args, **options):
logger.info('Acquiring lock for sending queued emails at %s.lock' %
options['lockfile'])
try:
with FileLock(options['lockfile']):
try:
send_queued(options['processes'], options.get('log_level'))
except Exception as e:
logger.error(e, exc_info=sys.exc_info(), extra={'status_code': 500})
raise
except FileLocked:
logger.info('Failed to acquire lock, terminating now.')
<commit_msg>Switch to using the `add_arguments` method.
This is an alternative to using the `option_list` and
`optparse.make_option`. Django deprecated the use of `optparse` in
management commands in Django 1.8 and removed it in Django 1.10.<commit_after> | import tempfile
import sys
from django.core.management.base import BaseCommand
from ...lockfile import FileLock, FileLocked
from ...mail import send_queued
from ...logutils import setup_loghandlers
logger = setup_loghandlers()
default_lockfile = tempfile.gettempdir() + "/post_office"
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-p', '--processes',
type='int',
help='Number of processes used to send emails',
default=1
)
parser.add_argument('-L', '--lockfile',
type='string',
default=default_lockfile,
help='Absolute path of lockfile to acquire'
)
parser.add_argument('-l', '--log-level',
type='int',
help='"0" to log nothing, "1" to only log errors'
)
def handle(self, *args, **options):
logger.info('Acquiring lock for sending queued emails at %s.lock' %
options['lockfile'])
try:
with FileLock(options['lockfile']):
try:
send_queued(options['processes'], options.get('log_level'))
except Exception as e:
logger.error(e, exc_info=sys.exc_info(), extra={'status_code': 500})
raise
except FileLocked:
logger.info('Failed to acquire lock, terminating now.')
| import tempfile
import sys
from optparse import make_option
from django.core.management.base import BaseCommand
from ...lockfile import FileLock, FileLocked
from ...mail import send_queued
from ...logutils import setup_loghandlers
logger = setup_loghandlers()
default_lockfile = tempfile.gettempdir() + "/post_office"
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-p', '--processes', type='int',
help='Number of processes used to send emails', default=1),
make_option('-L', '--lockfile', type='string', default=default_lockfile,
help='Absolute path of lockfile to acquire'),
make_option('-l', '--log-level', type='int',
help='"0" to log nothing, "1" to only log errors'),
)
def handle(self, *args, **options):
logger.info('Acquiring lock for sending queued emails at %s.lock' %
options['lockfile'])
try:
with FileLock(options['lockfile']):
try:
send_queued(options['processes'], options.get('log_level'))
except Exception as e:
logger.error(e, exc_info=sys.exc_info(), extra={'status_code': 500})
raise
except FileLocked:
logger.info('Failed to acquire lock, terminating now.')
Switch to using the `add_arguments` method.
This is an alternative to using the `option_list` and
`optparse.make_option`. Django deprecated the use of `optparse` in
management commands in Django 1.8 and removed it in Django 1.10.import tempfile
import sys
from django.core.management.base import BaseCommand
from ...lockfile import FileLock, FileLocked
from ...mail import send_queued
from ...logutils import setup_loghandlers
logger = setup_loghandlers()
default_lockfile = tempfile.gettempdir() + "/post_office"
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-p', '--processes',
type='int',
help='Number of processes used to send emails',
default=1
)
parser.add_argument('-L', '--lockfile',
type='string',
default=default_lockfile,
help='Absolute path of lockfile to acquire'
)
parser.add_argument('-l', '--log-level',
type='int',
help='"0" to log nothing, "1" to only log errors'
)
def handle(self, *args, **options):
logger.info('Acquiring lock for sending queued emails at %s.lock' %
options['lockfile'])
try:
with FileLock(options['lockfile']):
try:
send_queued(options['processes'], options.get('log_level'))
except Exception as e:
logger.error(e, exc_info=sys.exc_info(), extra={'status_code': 500})
raise
except FileLocked:
logger.info('Failed to acquire lock, terminating now.')
| <commit_before>import tempfile
import sys
from optparse import make_option
from django.core.management.base import BaseCommand
from ...lockfile import FileLock, FileLocked
from ...mail import send_queued
from ...logutils import setup_loghandlers
logger = setup_loghandlers()
default_lockfile = tempfile.gettempdir() + "/post_office"
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-p', '--processes', type='int',
help='Number of processes used to send emails', default=1),
make_option('-L', '--lockfile', type='string', default=default_lockfile,
help='Absolute path of lockfile to acquire'),
make_option('-l', '--log-level', type='int',
help='"0" to log nothing, "1" to only log errors'),
)
def handle(self, *args, **options):
logger.info('Acquiring lock for sending queued emails at %s.lock' %
options['lockfile'])
try:
with FileLock(options['lockfile']):
try:
send_queued(options['processes'], options.get('log_level'))
except Exception as e:
logger.error(e, exc_info=sys.exc_info(), extra={'status_code': 500})
raise
except FileLocked:
logger.info('Failed to acquire lock, terminating now.')
<commit_msg>Switch to using the `add_arguments` method.
This is an alternative to using the `option_list` and
`optparse.make_option`. Django deprecated the use of `optparse` in
management commands in Django 1.8 and removed it in Django 1.10.<commit_after>import tempfile
import sys
from django.core.management.base import BaseCommand
from ...lockfile import FileLock, FileLocked
from ...mail import send_queued
from ...logutils import setup_loghandlers
logger = setup_loghandlers()
default_lockfile = tempfile.gettempdir() + "/post_office"
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-p', '--processes',
type='int',
help='Number of processes used to send emails',
default=1
)
parser.add_argument('-L', '--lockfile',
type='string',
default=default_lockfile,
help='Absolute path of lockfile to acquire'
)
parser.add_argument('-l', '--log-level',
type='int',
help='"0" to log nothing, "1" to only log errors'
)
def handle(self, *args, **options):
logger.info('Acquiring lock for sending queued emails at %s.lock' %
options['lockfile'])
try:
with FileLock(options['lockfile']):
try:
send_queued(options['processes'], options.get('log_level'))
except Exception as e:
logger.error(e, exc_info=sys.exc_info(), extra={'status_code': 500})
raise
except FileLocked:
logger.info('Failed to acquire lock, terminating now.')
|
292277abac516c412d58f1454331d9e38ddda2b3 | ca_on_candidates/people.py | ca_on_candidates/people.py | from utils import CSVScraper
from datetime import date
class OntarioCandidatesPersonScraper(CSVScraper):
csv_url = 'https://docs.google.com/spreadsheets/d/e/2PACX-1vQhrWSeOEC9DaNN2iDKcPC9IH701Al0pELevzSO62maI9WXt1TGvFH2fzUkXjUfujc3ontePcroFbT2/pub?output=csv'
encoding = 'utf-8'
updated_at = date(2018, 1, 31)
contact_person = 'andrew@newmode.net'
corrections = {
'district name': {
'Brantford-Brant': 'Brantford\u2014Brant',
}
}
def is_valid_row(self, row):
return any(row.values()) and row['last name'] and row['first name']
| from utils import CSVScraper
from datetime import date
class OntarioCandidatesPersonScraper(CSVScraper):
csv_url = 'https://docs.google.com/spreadsheets/d/e/2PACX-1vQhrWSeOEC9DaNN2iDKcPC9IH701Al0pELevzSO62maI9WXt1TGvFH2fzUkXjUfujc3ontePcroFbT2/pub?gid=881365071&single=true&output=csv'
encoding = 'utf-8'
updated_at = date(2018, 1, 31)
contact_person = 'andrew@newmode.net'
corrections = {
'district name': {
'Brantford-Brant': 'Brantford\u2014Brant',
}
}
def is_valid_row(self, row):
return any(row.values()) and row['last name'] and row['first name']
| Make CSV URL more specific | ca_on_candidates: Make CSV URL more specific
| Python | mit | opencivicdata/scrapers-ca,opencivicdata/scrapers-ca | from utils import CSVScraper
from datetime import date
class OntarioCandidatesPersonScraper(CSVScraper):
csv_url = 'https://docs.google.com/spreadsheets/d/e/2PACX-1vQhrWSeOEC9DaNN2iDKcPC9IH701Al0pELevzSO62maI9WXt1TGvFH2fzUkXjUfujc3ontePcroFbT2/pub?output=csv'
encoding = 'utf-8'
updated_at = date(2018, 1, 31)
contact_person = 'andrew@newmode.net'
corrections = {
'district name': {
'Brantford-Brant': 'Brantford\u2014Brant',
}
}
def is_valid_row(self, row):
return any(row.values()) and row['last name'] and row['first name']
ca_on_candidates: Make CSV URL more specific | from utils import CSVScraper
from datetime import date
class OntarioCandidatesPersonScraper(CSVScraper):
csv_url = 'https://docs.google.com/spreadsheets/d/e/2PACX-1vQhrWSeOEC9DaNN2iDKcPC9IH701Al0pELevzSO62maI9WXt1TGvFH2fzUkXjUfujc3ontePcroFbT2/pub?gid=881365071&single=true&output=csv'
encoding = 'utf-8'
updated_at = date(2018, 1, 31)
contact_person = 'andrew@newmode.net'
corrections = {
'district name': {
'Brantford-Brant': 'Brantford\u2014Brant',
}
}
def is_valid_row(self, row):
return any(row.values()) and row['last name'] and row['first name']
| <commit_before>from utils import CSVScraper
from datetime import date
class OntarioCandidatesPersonScraper(CSVScraper):
csv_url = 'https://docs.google.com/spreadsheets/d/e/2PACX-1vQhrWSeOEC9DaNN2iDKcPC9IH701Al0pELevzSO62maI9WXt1TGvFH2fzUkXjUfujc3ontePcroFbT2/pub?output=csv'
encoding = 'utf-8'
updated_at = date(2018, 1, 31)
contact_person = 'andrew@newmode.net'
corrections = {
'district name': {
'Brantford-Brant': 'Brantford\u2014Brant',
}
}
def is_valid_row(self, row):
return any(row.values()) and row['last name'] and row['first name']
<commit_msg>ca_on_candidates: Make CSV URL more specific<commit_after> | from utils import CSVScraper
from datetime import date
class OntarioCandidatesPersonScraper(CSVScraper):
csv_url = 'https://docs.google.com/spreadsheets/d/e/2PACX-1vQhrWSeOEC9DaNN2iDKcPC9IH701Al0pELevzSO62maI9WXt1TGvFH2fzUkXjUfujc3ontePcroFbT2/pub?gid=881365071&single=true&output=csv'
encoding = 'utf-8'
updated_at = date(2018, 1, 31)
contact_person = 'andrew@newmode.net'
corrections = {
'district name': {
'Brantford-Brant': 'Brantford\u2014Brant',
}
}
def is_valid_row(self, row):
return any(row.values()) and row['last name'] and row['first name']
| from utils import CSVScraper
from datetime import date
class OntarioCandidatesPersonScraper(CSVScraper):
csv_url = 'https://docs.google.com/spreadsheets/d/e/2PACX-1vQhrWSeOEC9DaNN2iDKcPC9IH701Al0pELevzSO62maI9WXt1TGvFH2fzUkXjUfujc3ontePcroFbT2/pub?output=csv'
encoding = 'utf-8'
updated_at = date(2018, 1, 31)
contact_person = 'andrew@newmode.net'
corrections = {
'district name': {
'Brantford-Brant': 'Brantford\u2014Brant',
}
}
def is_valid_row(self, row):
return any(row.values()) and row['last name'] and row['first name']
ca_on_candidates: Make CSV URL more specificfrom utils import CSVScraper
from datetime import date
class OntarioCandidatesPersonScraper(CSVScraper):
csv_url = 'https://docs.google.com/spreadsheets/d/e/2PACX-1vQhrWSeOEC9DaNN2iDKcPC9IH701Al0pELevzSO62maI9WXt1TGvFH2fzUkXjUfujc3ontePcroFbT2/pub?gid=881365071&single=true&output=csv'
encoding = 'utf-8'
updated_at = date(2018, 1, 31)
contact_person = 'andrew@newmode.net'
corrections = {
'district name': {
'Brantford-Brant': 'Brantford\u2014Brant',
}
}
def is_valid_row(self, row):
return any(row.values()) and row['last name'] and row['first name']
| <commit_before>from utils import CSVScraper
from datetime import date
class OntarioCandidatesPersonScraper(CSVScraper):
csv_url = 'https://docs.google.com/spreadsheets/d/e/2PACX-1vQhrWSeOEC9DaNN2iDKcPC9IH701Al0pELevzSO62maI9WXt1TGvFH2fzUkXjUfujc3ontePcroFbT2/pub?output=csv'
encoding = 'utf-8'
updated_at = date(2018, 1, 31)
contact_person = 'andrew@newmode.net'
corrections = {
'district name': {
'Brantford-Brant': 'Brantford\u2014Brant',
}
}
def is_valid_row(self, row):
return any(row.values()) and row['last name'] and row['first name']
<commit_msg>ca_on_candidates: Make CSV URL more specific<commit_after>from utils import CSVScraper
from datetime import date
class OntarioCandidatesPersonScraper(CSVScraper):
csv_url = 'https://docs.google.com/spreadsheets/d/e/2PACX-1vQhrWSeOEC9DaNN2iDKcPC9IH701Al0pELevzSO62maI9WXt1TGvFH2fzUkXjUfujc3ontePcroFbT2/pub?gid=881365071&single=true&output=csv'
encoding = 'utf-8'
updated_at = date(2018, 1, 31)
contact_person = 'andrew@newmode.net'
corrections = {
'district name': {
'Brantford-Brant': 'Brantford\u2014Brant',
}
}
def is_valid_row(self, row):
return any(row.values()) and row['last name'] and row['first name']
|
7111a61a66affcb3c60ea207084e537b2109da61 | mangaki/mangaki/management/commands/top.py | mangaki/mangaki/management/commands/top.py | from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from django.db import connection
from mangaki.models import Rating, Anime
from collections import Counter
import json
import sys
class Command(BaseCommand):
args = ''
help = 'Builds top'
def handle(self, *args, **options):
category = sys.argv[2]
c = Counter()
values = {'favorite': 10, 'like': 2, 'neutral': 0.5, 'dislike': -1}
anime_ids = Anime.objects.exclude(**{category: 1}).values_list('id', flat=True)
nb_ratings = Counter()
nb_stars = Counter()
for rating in Rating.objects.filter(work_id__in=anime_ids).select_related('work__anime__' + category):
contestant = getattr(rating.work.anime, category)
nb_ratings[contestant] += 1
if rating.choice == 'favorite':
nb_stars[contestant] += 1
c[contestant] += values.get(rating.choice, 0)
top = []
for i, (artist, score) in enumerate(c.most_common(20)):
top.append(dict(rank=i + 1, name=str(artist), score=score, nb_ratings=nb_ratings[artist], nb_stars=nb_stars[artist]))
print(json.dumps(top))
| from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from django.db import connection
from mangaki.models import Rating, Anime
from collections import Counter
import json
import sys
class Command(BaseCommand):
args = ''
help = 'Builds top'
def handle(self, *args, **options):
category = sys.argv[2]
c = Counter()
values = {'favorite': 10, 'like': 2, 'neutral': 0.5, 'dislike': -1}
anime_ids = Anime.objects.exclude(**{category: 1}).values_list('id', flat=True)
nb_ratings = Counter()
nb_stars = Counter()
for rating in Rating.objects.filter(work_id__in=anime_ids).select_related('work__anime__' + category):
contestant = getattr(rating.work.anime, category)
nb_ratings[contestant] += 1
if rating.choice == 'favorite':
nb_stars[contestant] += 1
c[contestant] += values.get(rating.choice, 0)
top = []
for i, (artist, score) in enumerate(c.most_common(20)):
top.append(dict(rank=i + 1, name=str(artist), id=artist.id, score=score, nb_ratings=nb_ratings[artist], nb_stars=nb_stars[artist]))
print(json.dumps(top))
| Add ID to Artist in Top | Add ID to Artist in Top
| Python | agpl-3.0 | Mako-kun/mangaki,Elarnon/mangaki,Elarnon/mangaki,Mako-kun/mangaki,Mako-kun/mangaki,Elarnon/mangaki | from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from django.db import connection
from mangaki.models import Rating, Anime
from collections import Counter
import json
import sys
class Command(BaseCommand):
args = ''
help = 'Builds top'
def handle(self, *args, **options):
category = sys.argv[2]
c = Counter()
values = {'favorite': 10, 'like': 2, 'neutral': 0.5, 'dislike': -1}
anime_ids = Anime.objects.exclude(**{category: 1}).values_list('id', flat=True)
nb_ratings = Counter()
nb_stars = Counter()
for rating in Rating.objects.filter(work_id__in=anime_ids).select_related('work__anime__' + category):
contestant = getattr(rating.work.anime, category)
nb_ratings[contestant] += 1
if rating.choice == 'favorite':
nb_stars[contestant] += 1
c[contestant] += values.get(rating.choice, 0)
top = []
for i, (artist, score) in enumerate(c.most_common(20)):
top.append(dict(rank=i + 1, name=str(artist), score=score, nb_ratings=nb_ratings[artist], nb_stars=nb_stars[artist]))
print(json.dumps(top))
Add ID to Artist in Top | from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from django.db import connection
from mangaki.models import Rating, Anime
from collections import Counter
import json
import sys
class Command(BaseCommand):
args = ''
help = 'Builds top'
def handle(self, *args, **options):
category = sys.argv[2]
c = Counter()
values = {'favorite': 10, 'like': 2, 'neutral': 0.5, 'dislike': -1}
anime_ids = Anime.objects.exclude(**{category: 1}).values_list('id', flat=True)
nb_ratings = Counter()
nb_stars = Counter()
for rating in Rating.objects.filter(work_id__in=anime_ids).select_related('work__anime__' + category):
contestant = getattr(rating.work.anime, category)
nb_ratings[contestant] += 1
if rating.choice == 'favorite':
nb_stars[contestant] += 1
c[contestant] += values.get(rating.choice, 0)
top = []
for i, (artist, score) in enumerate(c.most_common(20)):
top.append(dict(rank=i + 1, name=str(artist), id=artist.id, score=score, nb_ratings=nb_ratings[artist], nb_stars=nb_stars[artist]))
print(json.dumps(top))
| <commit_before>from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from django.db import connection
from mangaki.models import Rating, Anime
from collections import Counter
import json
import sys
class Command(BaseCommand):
args = ''
help = 'Builds top'
def handle(self, *args, **options):
category = sys.argv[2]
c = Counter()
values = {'favorite': 10, 'like': 2, 'neutral': 0.5, 'dislike': -1}
anime_ids = Anime.objects.exclude(**{category: 1}).values_list('id', flat=True)
nb_ratings = Counter()
nb_stars = Counter()
for rating in Rating.objects.filter(work_id__in=anime_ids).select_related('work__anime__' + category):
contestant = getattr(rating.work.anime, category)
nb_ratings[contestant] += 1
if rating.choice == 'favorite':
nb_stars[contestant] += 1
c[contestant] += values.get(rating.choice, 0)
top = []
for i, (artist, score) in enumerate(c.most_common(20)):
top.append(dict(rank=i + 1, name=str(artist), score=score, nb_ratings=nb_ratings[artist], nb_stars=nb_stars[artist]))
print(json.dumps(top))
<commit_msg>Add ID to Artist in Top<commit_after> | from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from django.db import connection
from mangaki.models import Rating, Anime
from collections import Counter
import json
import sys
class Command(BaseCommand):
args = ''
help = 'Builds top'
def handle(self, *args, **options):
category = sys.argv[2]
c = Counter()
values = {'favorite': 10, 'like': 2, 'neutral': 0.5, 'dislike': -1}
anime_ids = Anime.objects.exclude(**{category: 1}).values_list('id', flat=True)
nb_ratings = Counter()
nb_stars = Counter()
for rating in Rating.objects.filter(work_id__in=anime_ids).select_related('work__anime__' + category):
contestant = getattr(rating.work.anime, category)
nb_ratings[contestant] += 1
if rating.choice == 'favorite':
nb_stars[contestant] += 1
c[contestant] += values.get(rating.choice, 0)
top = []
for i, (artist, score) in enumerate(c.most_common(20)):
top.append(dict(rank=i + 1, name=str(artist), id=artist.id, score=score, nb_ratings=nb_ratings[artist], nb_stars=nb_stars[artist]))
print(json.dumps(top))
| from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from django.db import connection
from mangaki.models import Rating, Anime
from collections import Counter
import json
import sys
class Command(BaseCommand):
args = ''
help = 'Builds top'
def handle(self, *args, **options):
category = sys.argv[2]
c = Counter()
values = {'favorite': 10, 'like': 2, 'neutral': 0.5, 'dislike': -1}
anime_ids = Anime.objects.exclude(**{category: 1}).values_list('id', flat=True)
nb_ratings = Counter()
nb_stars = Counter()
for rating in Rating.objects.filter(work_id__in=anime_ids).select_related('work__anime__' + category):
contestant = getattr(rating.work.anime, category)
nb_ratings[contestant] += 1
if rating.choice == 'favorite':
nb_stars[contestant] += 1
c[contestant] += values.get(rating.choice, 0)
top = []
for i, (artist, score) in enumerate(c.most_common(20)):
top.append(dict(rank=i + 1, name=str(artist), score=score, nb_ratings=nb_ratings[artist], nb_stars=nb_stars[artist]))
print(json.dumps(top))
Add ID to Artist in Topfrom django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from django.db import connection
from mangaki.models import Rating, Anime
from collections import Counter
import json
import sys
class Command(BaseCommand):
args = ''
help = 'Builds top'
def handle(self, *args, **options):
category = sys.argv[2]
c = Counter()
values = {'favorite': 10, 'like': 2, 'neutral': 0.5, 'dislike': -1}
anime_ids = Anime.objects.exclude(**{category: 1}).values_list('id', flat=True)
nb_ratings = Counter()
nb_stars = Counter()
for rating in Rating.objects.filter(work_id__in=anime_ids).select_related('work__anime__' + category):
contestant = getattr(rating.work.anime, category)
nb_ratings[contestant] += 1
if rating.choice == 'favorite':
nb_stars[contestant] += 1
c[contestant] += values.get(rating.choice, 0)
top = []
for i, (artist, score) in enumerate(c.most_common(20)):
top.append(dict(rank=i + 1, name=str(artist), id=artist.id, score=score, nb_ratings=nb_ratings[artist], nb_stars=nb_stars[artist]))
print(json.dumps(top))
| <commit_before>from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from django.db import connection
from mangaki.models import Rating, Anime
from collections import Counter
import json
import sys
class Command(BaseCommand):
args = ''
help = 'Builds top'
def handle(self, *args, **options):
category = sys.argv[2]
c = Counter()
values = {'favorite': 10, 'like': 2, 'neutral': 0.5, 'dislike': -1}
anime_ids = Anime.objects.exclude(**{category: 1}).values_list('id', flat=True)
nb_ratings = Counter()
nb_stars = Counter()
for rating in Rating.objects.filter(work_id__in=anime_ids).select_related('work__anime__' + category):
contestant = getattr(rating.work.anime, category)
nb_ratings[contestant] += 1
if rating.choice == 'favorite':
nb_stars[contestant] += 1
c[contestant] += values.get(rating.choice, 0)
top = []
for i, (artist, score) in enumerate(c.most_common(20)):
top.append(dict(rank=i + 1, name=str(artist), score=score, nb_ratings=nb_ratings[artist], nb_stars=nb_stars[artist]))
print(json.dumps(top))
<commit_msg>Add ID to Artist in Top<commit_after>from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from django.db import connection
from mangaki.models import Rating, Anime
from collections import Counter
import json
import sys
class Command(BaseCommand):
args = ''
help = 'Builds top'
def handle(self, *args, **options):
category = sys.argv[2]
c = Counter()
values = {'favorite': 10, 'like': 2, 'neutral': 0.5, 'dislike': -1}
anime_ids = Anime.objects.exclude(**{category: 1}).values_list('id', flat=True)
nb_ratings = Counter()
nb_stars = Counter()
for rating in Rating.objects.filter(work_id__in=anime_ids).select_related('work__anime__' + category):
contestant = getattr(rating.work.anime, category)
nb_ratings[contestant] += 1
if rating.choice == 'favorite':
nb_stars[contestant] += 1
c[contestant] += values.get(rating.choice, 0)
top = []
for i, (artist, score) in enumerate(c.most_common(20)):
top.append(dict(rank=i + 1, name=str(artist), id=artist.id, score=score, nb_ratings=nb_ratings[artist], nb_stars=nb_stars[artist]))
print(json.dumps(top))
|
355e0bab150f2e5c5c52b02714dfaef997dda856 | regparser/tree/xml_parser/flatsubtree_processor.py | regparser/tree/xml_parser/flatsubtree_processor.py | from regparser.tree.depth import markers as mtypes
from regparser.tree.struct import Node
from regparser.tree.xml_parser import (
paragraph_processor, simple_hierarchy_processor, us_code)
class FlatParagraphProcessor(paragraph_processor.ParagraphProcessor):
"""Paragraph Processor which does not try to derive paragraph markers"""
MATCHERS = [paragraph_processor.StarsMatcher(),
paragraph_processor.TableMatcher(),
simple_hierarchy_processor.SimpleHierarchyMatcher(
['NOTE', 'NOTES'], Node.NOTE),
paragraph_processor.HeaderMatcher(),
paragraph_processor.SimpleTagMatcher('P', 'FP'),
us_code.USCodeMatcher(),
paragraph_processor.IgnoreTagMatcher('PRTPAGE')]
class FlatsubtreeMatcher(paragraph_processor.BaseMatcher):
"""
Detects tags passed to it on init and processes them with the
FlatParagraphProcessor. Also optionally sets node_type.
"""
def __init__(self, tags, node_type=Node.REGTEXT):
self.tags = list(tags)
self.node_type = node_type
def matches(self, xml):
return xml.tag in self.tags
def derive_nodes(self, xml, processor=None):
processor = FlatParagraphProcessor()
text = (xml.text or '').strip()
node = Node(text=text, node_type=self.node_type,
label=[mtypes.MARKERLESS])
return [processor.process(xml, node)]
| from regparser.tree.depth import markers as mtypes
from regparser.tree.struct import Node
from regparser.tree.xml_parser import (
paragraph_processor, simple_hierarchy_processor, us_code)
class FlatParagraphProcessor(paragraph_processor.ParagraphProcessor):
"""Paragraph Processor which does not try to derive paragraph markers"""
MATCHERS = [paragraph_processor.StarsMatcher(),
paragraph_processor.TableMatcher(),
simple_hierarchy_processor.SimpleHierarchyMatcher(
['NOTE', 'NOTES'], Node.NOTE),
paragraph_processor.HeaderMatcher(),
paragraph_processor.SimpleTagMatcher('P', 'FP'),
us_code.USCodeMatcher(),
paragraph_processor.GraphicsMatcher(),
paragraph_processor.IgnoreTagMatcher('PRTPAGE')]
class FlatsubtreeMatcher(paragraph_processor.BaseMatcher):
"""
Detects tags passed to it on init and processes them with the
FlatParagraphProcessor. Also optionally sets node_type.
"""
def __init__(self, tags, node_type=Node.REGTEXT):
self.tags = list(tags)
self.node_type = node_type
def matches(self, xml):
return xml.tag in self.tags
def derive_nodes(self, xml, processor=None):
processor = FlatParagraphProcessor()
text = (xml.text or '').strip()
node = Node(text=text, node_type=self.node_type,
label=[mtypes.MARKERLESS])
return [processor.process(xml, node)]
| Allow images in EXTRACTs, etc. | Allow images in EXTRACTs, etc.
| Python | cc0-1.0 | tadhg-ohiggins/regulations-parser,eregs/regulations-parser,eregs/regulations-parser,tadhg-ohiggins/regulations-parser | from regparser.tree.depth import markers as mtypes
from regparser.tree.struct import Node
from regparser.tree.xml_parser import (
paragraph_processor, simple_hierarchy_processor, us_code)
class FlatParagraphProcessor(paragraph_processor.ParagraphProcessor):
"""Paragraph Processor which does not try to derive paragraph markers"""
MATCHERS = [paragraph_processor.StarsMatcher(),
paragraph_processor.TableMatcher(),
simple_hierarchy_processor.SimpleHierarchyMatcher(
['NOTE', 'NOTES'], Node.NOTE),
paragraph_processor.HeaderMatcher(),
paragraph_processor.SimpleTagMatcher('P', 'FP'),
us_code.USCodeMatcher(),
paragraph_processor.IgnoreTagMatcher('PRTPAGE')]
class FlatsubtreeMatcher(paragraph_processor.BaseMatcher):
"""
Detects tags passed to it on init and processes them with the
FlatParagraphProcessor. Also optionally sets node_type.
"""
def __init__(self, tags, node_type=Node.REGTEXT):
self.tags = list(tags)
self.node_type = node_type
def matches(self, xml):
return xml.tag in self.tags
def derive_nodes(self, xml, processor=None):
processor = FlatParagraphProcessor()
text = (xml.text or '').strip()
node = Node(text=text, node_type=self.node_type,
label=[mtypes.MARKERLESS])
return [processor.process(xml, node)]
Allow images in EXTRACTs, etc. | from regparser.tree.depth import markers as mtypes
from regparser.tree.struct import Node
from regparser.tree.xml_parser import (
paragraph_processor, simple_hierarchy_processor, us_code)
class FlatParagraphProcessor(paragraph_processor.ParagraphProcessor):
"""Paragraph Processor which does not try to derive paragraph markers"""
MATCHERS = [paragraph_processor.StarsMatcher(),
paragraph_processor.TableMatcher(),
simple_hierarchy_processor.SimpleHierarchyMatcher(
['NOTE', 'NOTES'], Node.NOTE),
paragraph_processor.HeaderMatcher(),
paragraph_processor.SimpleTagMatcher('P', 'FP'),
us_code.USCodeMatcher(),
paragraph_processor.GraphicsMatcher(),
paragraph_processor.IgnoreTagMatcher('PRTPAGE')]
class FlatsubtreeMatcher(paragraph_processor.BaseMatcher):
"""
Detects tags passed to it on init and processes them with the
FlatParagraphProcessor. Also optionally sets node_type.
"""
def __init__(self, tags, node_type=Node.REGTEXT):
self.tags = list(tags)
self.node_type = node_type
def matches(self, xml):
return xml.tag in self.tags
def derive_nodes(self, xml, processor=None):
processor = FlatParagraphProcessor()
text = (xml.text or '').strip()
node = Node(text=text, node_type=self.node_type,
label=[mtypes.MARKERLESS])
return [processor.process(xml, node)]
| <commit_before>from regparser.tree.depth import markers as mtypes
from regparser.tree.struct import Node
from regparser.tree.xml_parser import (
paragraph_processor, simple_hierarchy_processor, us_code)
class FlatParagraphProcessor(paragraph_processor.ParagraphProcessor):
"""Paragraph Processor which does not try to derive paragraph markers"""
MATCHERS = [paragraph_processor.StarsMatcher(),
paragraph_processor.TableMatcher(),
simple_hierarchy_processor.SimpleHierarchyMatcher(
['NOTE', 'NOTES'], Node.NOTE),
paragraph_processor.HeaderMatcher(),
paragraph_processor.SimpleTagMatcher('P', 'FP'),
us_code.USCodeMatcher(),
paragraph_processor.IgnoreTagMatcher('PRTPAGE')]
class FlatsubtreeMatcher(paragraph_processor.BaseMatcher):
"""
Detects tags passed to it on init and processes them with the
FlatParagraphProcessor. Also optionally sets node_type.
"""
def __init__(self, tags, node_type=Node.REGTEXT):
self.tags = list(tags)
self.node_type = node_type
def matches(self, xml):
return xml.tag in self.tags
def derive_nodes(self, xml, processor=None):
processor = FlatParagraphProcessor()
text = (xml.text or '').strip()
node = Node(text=text, node_type=self.node_type,
label=[mtypes.MARKERLESS])
return [processor.process(xml, node)]
<commit_msg>Allow images in EXTRACTs, etc.<commit_after> | from regparser.tree.depth import markers as mtypes
from regparser.tree.struct import Node
from regparser.tree.xml_parser import (
paragraph_processor, simple_hierarchy_processor, us_code)
class FlatParagraphProcessor(paragraph_processor.ParagraphProcessor):
"""Paragraph Processor which does not try to derive paragraph markers"""
MATCHERS = [paragraph_processor.StarsMatcher(),
paragraph_processor.TableMatcher(),
simple_hierarchy_processor.SimpleHierarchyMatcher(
['NOTE', 'NOTES'], Node.NOTE),
paragraph_processor.HeaderMatcher(),
paragraph_processor.SimpleTagMatcher('P', 'FP'),
us_code.USCodeMatcher(),
paragraph_processor.GraphicsMatcher(),
paragraph_processor.IgnoreTagMatcher('PRTPAGE')]
class FlatsubtreeMatcher(paragraph_processor.BaseMatcher):
"""
Detects tags passed to it on init and processes them with the
FlatParagraphProcessor. Also optionally sets node_type.
"""
def __init__(self, tags, node_type=Node.REGTEXT):
self.tags = list(tags)
self.node_type = node_type
def matches(self, xml):
return xml.tag in self.tags
def derive_nodes(self, xml, processor=None):
processor = FlatParagraphProcessor()
text = (xml.text or '').strip()
node = Node(text=text, node_type=self.node_type,
label=[mtypes.MARKERLESS])
return [processor.process(xml, node)]
| from regparser.tree.depth import markers as mtypes
from regparser.tree.struct import Node
from regparser.tree.xml_parser import (
paragraph_processor, simple_hierarchy_processor, us_code)
class FlatParagraphProcessor(paragraph_processor.ParagraphProcessor):
"""Paragraph Processor which does not try to derive paragraph markers"""
MATCHERS = [paragraph_processor.StarsMatcher(),
paragraph_processor.TableMatcher(),
simple_hierarchy_processor.SimpleHierarchyMatcher(
['NOTE', 'NOTES'], Node.NOTE),
paragraph_processor.HeaderMatcher(),
paragraph_processor.SimpleTagMatcher('P', 'FP'),
us_code.USCodeMatcher(),
paragraph_processor.IgnoreTagMatcher('PRTPAGE')]
class FlatsubtreeMatcher(paragraph_processor.BaseMatcher):
"""
Detects tags passed to it on init and processes them with the
FlatParagraphProcessor. Also optionally sets node_type.
"""
def __init__(self, tags, node_type=Node.REGTEXT):
self.tags = list(tags)
self.node_type = node_type
def matches(self, xml):
return xml.tag in self.tags
def derive_nodes(self, xml, processor=None):
processor = FlatParagraphProcessor()
text = (xml.text or '').strip()
node = Node(text=text, node_type=self.node_type,
label=[mtypes.MARKERLESS])
return [processor.process(xml, node)]
Allow images in EXTRACTs, etc.from regparser.tree.depth import markers as mtypes
from regparser.tree.struct import Node
from regparser.tree.xml_parser import (
paragraph_processor, simple_hierarchy_processor, us_code)
class FlatParagraphProcessor(paragraph_processor.ParagraphProcessor):
"""Paragraph Processor which does not try to derive paragraph markers"""
MATCHERS = [paragraph_processor.StarsMatcher(),
paragraph_processor.TableMatcher(),
simple_hierarchy_processor.SimpleHierarchyMatcher(
['NOTE', 'NOTES'], Node.NOTE),
paragraph_processor.HeaderMatcher(),
paragraph_processor.SimpleTagMatcher('P', 'FP'),
us_code.USCodeMatcher(),
paragraph_processor.GraphicsMatcher(),
paragraph_processor.IgnoreTagMatcher('PRTPAGE')]
class FlatsubtreeMatcher(paragraph_processor.BaseMatcher):
"""
Detects tags passed to it on init and processes them with the
FlatParagraphProcessor. Also optionally sets node_type.
"""
def __init__(self, tags, node_type=Node.REGTEXT):
self.tags = list(tags)
self.node_type = node_type
def matches(self, xml):
return xml.tag in self.tags
def derive_nodes(self, xml, processor=None):
processor = FlatParagraphProcessor()
text = (xml.text or '').strip()
node = Node(text=text, node_type=self.node_type,
label=[mtypes.MARKERLESS])
return [processor.process(xml, node)]
| <commit_before>from regparser.tree.depth import markers as mtypes
from regparser.tree.struct import Node
from regparser.tree.xml_parser import (
paragraph_processor, simple_hierarchy_processor, us_code)
class FlatParagraphProcessor(paragraph_processor.ParagraphProcessor):
"""Paragraph Processor which does not try to derive paragraph markers"""
MATCHERS = [paragraph_processor.StarsMatcher(),
paragraph_processor.TableMatcher(),
simple_hierarchy_processor.SimpleHierarchyMatcher(
['NOTE', 'NOTES'], Node.NOTE),
paragraph_processor.HeaderMatcher(),
paragraph_processor.SimpleTagMatcher('P', 'FP'),
us_code.USCodeMatcher(),
paragraph_processor.IgnoreTagMatcher('PRTPAGE')]
class FlatsubtreeMatcher(paragraph_processor.BaseMatcher):
"""
Detects tags passed to it on init and processes them with the
FlatParagraphProcessor. Also optionally sets node_type.
"""
def __init__(self, tags, node_type=Node.REGTEXT):
self.tags = list(tags)
self.node_type = node_type
def matches(self, xml):
return xml.tag in self.tags
def derive_nodes(self, xml, processor=None):
processor = FlatParagraphProcessor()
text = (xml.text or '').strip()
node = Node(text=text, node_type=self.node_type,
label=[mtypes.MARKERLESS])
return [processor.process(xml, node)]
<commit_msg>Allow images in EXTRACTs, etc.<commit_after>from regparser.tree.depth import markers as mtypes
from regparser.tree.struct import Node
from regparser.tree.xml_parser import (
paragraph_processor, simple_hierarchy_processor, us_code)
class FlatParagraphProcessor(paragraph_processor.ParagraphProcessor):
"""Paragraph Processor which does not try to derive paragraph markers"""
MATCHERS = [paragraph_processor.StarsMatcher(),
paragraph_processor.TableMatcher(),
simple_hierarchy_processor.SimpleHierarchyMatcher(
['NOTE', 'NOTES'], Node.NOTE),
paragraph_processor.HeaderMatcher(),
paragraph_processor.SimpleTagMatcher('P', 'FP'),
us_code.USCodeMatcher(),
paragraph_processor.GraphicsMatcher(),
paragraph_processor.IgnoreTagMatcher('PRTPAGE')]
class FlatsubtreeMatcher(paragraph_processor.BaseMatcher):
"""
Detects tags passed to it on init and processes them with the
FlatParagraphProcessor. Also optionally sets node_type.
"""
def __init__(self, tags, node_type=Node.REGTEXT):
self.tags = list(tags)
self.node_type = node_type
def matches(self, xml):
return xml.tag in self.tags
def derive_nodes(self, xml, processor=None):
processor = FlatParagraphProcessor()
text = (xml.text or '').strip()
node = Node(text=text, node_type=self.node_type,
label=[mtypes.MARKERLESS])
return [processor.process(xml, node)]
|
5e6d62ce7a567282a88530a2db80b775c9c4406e | swf/core.py | swf/core.py | # -*- coding:utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
import boto.swf
from . import settings
SETTINGS = settings.get()
class ConnectedSWFObject(object):
"""Authenticated object interface
Provides the instance attributes:
- `region`: name of the AWS region
- `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object):
"""
__slots__ = [
'region',
'connection'
]
def __init__(self, *args, **kwargs):
settings_ = {k: v for k, v in SETTINGS.iteritems()}
settings_.update(kwargs)
self.region = (settings_.pop('region', None) or
boto.swf.layer1.Layer1.DefaultRegionName)
self.connection = boto.swf.connect_to_region(self.region, **settings_)
if self.connection is None:
raise ValueError('invalid region: {}'.format(self.region))
| # -*- coding:utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
import boto.swf
from . import settings
SETTINGS = settings.get()
class ConnectedSWFObject(object):
"""Authenticated object interface
Provides the instance attributes:
- `region`: name of the AWS region
- `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object):
"""
__slots__ = [
'region',
'connection'
]
def __init__(self, *args, **kwargs):
settings_ = {key: SETTINGS.get(key, kwargs.get(key)) for key in
('aws_access_key_id',
'aws_secret_access_key')}
self.region = (SETTINGS.get('region') or
kwargs.get('region') or
boto.swf.layer1.Layer1.DefaultRegionName)
self.connection = (kwargs.pop('connection', None) or
boto.swf.connect_to_region(self.region, **settings_))
if self.connection is None:
raise ValueError('invalid region: {}'.format(self.region))
| Fix ConnectedSWFObject: restrict attributes set by constructor | Fix ConnectedSWFObject: restrict attributes set by constructor
- credentials: SETTINGS | kwargs
- region: SETTINGS | kwargs | boto.swf.layer1.Layer1.DefaultRegionName
- connection: kwargs
| Python | mit | botify-labs/python-simple-workflow,botify-labs/python-simple-workflow | # -*- coding:utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
import boto.swf
from . import settings
SETTINGS = settings.get()
class ConnectedSWFObject(object):
"""Authenticated object interface
Provides the instance attributes:
- `region`: name of the AWS region
- `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object):
"""
__slots__ = [
'region',
'connection'
]
def __init__(self, *args, **kwargs):
settings_ = {k: v for k, v in SETTINGS.iteritems()}
settings_.update(kwargs)
self.region = (settings_.pop('region', None) or
boto.swf.layer1.Layer1.DefaultRegionName)
self.connection = boto.swf.connect_to_region(self.region, **settings_)
if self.connection is None:
raise ValueError('invalid region: {}'.format(self.region))
Fix ConnectedSWFObject: restrict attributes set by constructor
- credentials: SETTINGS | kwargs
- region: SETTINGS | kwargs | boto.swf.layer1.Layer1.DefaultRegionName
- connection: kwargs | # -*- coding:utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
import boto.swf
from . import settings
SETTINGS = settings.get()
class ConnectedSWFObject(object):
"""Authenticated object interface
Provides the instance attributes:
- `region`: name of the AWS region
- `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object):
"""
__slots__ = [
'region',
'connection'
]
def __init__(self, *args, **kwargs):
settings_ = {key: SETTINGS.get(key, kwargs.get(key)) for key in
('aws_access_key_id',
'aws_secret_access_key')}
self.region = (SETTINGS.get('region') or
kwargs.get('region') or
boto.swf.layer1.Layer1.DefaultRegionName)
self.connection = (kwargs.pop('connection', None) or
boto.swf.connect_to_region(self.region, **settings_))
if self.connection is None:
raise ValueError('invalid region: {}'.format(self.region))
| <commit_before># -*- coding:utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
import boto.swf
from . import settings
SETTINGS = settings.get()
class ConnectedSWFObject(object):
"""Authenticated object interface
Provides the instance attributes:
- `region`: name of the AWS region
- `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object):
"""
__slots__ = [
'region',
'connection'
]
def __init__(self, *args, **kwargs):
settings_ = {k: v for k, v in SETTINGS.iteritems()}
settings_.update(kwargs)
self.region = (settings_.pop('region', None) or
boto.swf.layer1.Layer1.DefaultRegionName)
self.connection = boto.swf.connect_to_region(self.region, **settings_)
if self.connection is None:
raise ValueError('invalid region: {}'.format(self.region))
<commit_msg>Fix ConnectedSWFObject: restrict attributes set by constructor
- credentials: SETTINGS | kwargs
- region: SETTINGS | kwargs | boto.swf.layer1.Layer1.DefaultRegionName
- connection: kwargs<commit_after> | # -*- coding:utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
import boto.swf
from . import settings
SETTINGS = settings.get()
class ConnectedSWFObject(object):
"""Authenticated object interface
Provides the instance attributes:
- `region`: name of the AWS region
- `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object):
"""
__slots__ = [
'region',
'connection'
]
def __init__(self, *args, **kwargs):
settings_ = {key: SETTINGS.get(key, kwargs.get(key)) for key in
('aws_access_key_id',
'aws_secret_access_key')}
self.region = (SETTINGS.get('region') or
kwargs.get('region') or
boto.swf.layer1.Layer1.DefaultRegionName)
self.connection = (kwargs.pop('connection', None) or
boto.swf.connect_to_region(self.region, **settings_))
if self.connection is None:
raise ValueError('invalid region: {}'.format(self.region))
| # -*- coding:utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
import boto.swf
from . import settings
SETTINGS = settings.get()
class ConnectedSWFObject(object):
"""Authenticated object interface
Provides the instance attributes:
- `region`: name of the AWS region
- `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object):
"""
__slots__ = [
'region',
'connection'
]
def __init__(self, *args, **kwargs):
settings_ = {k: v for k, v in SETTINGS.iteritems()}
settings_.update(kwargs)
self.region = (settings_.pop('region', None) or
boto.swf.layer1.Layer1.DefaultRegionName)
self.connection = boto.swf.connect_to_region(self.region, **settings_)
if self.connection is None:
raise ValueError('invalid region: {}'.format(self.region))
Fix ConnectedSWFObject: restrict attributes set by constructor
- credentials: SETTINGS | kwargs
- region: SETTINGS | kwargs | boto.swf.layer1.Layer1.DefaultRegionName
- connection: kwargs# -*- coding:utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
import boto.swf
from . import settings
SETTINGS = settings.get()
class ConnectedSWFObject(object):
"""Authenticated object interface
Provides the instance attributes:
- `region`: name of the AWS region
- `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object):
"""
__slots__ = [
'region',
'connection'
]
def __init__(self, *args, **kwargs):
settings_ = {key: SETTINGS.get(key, kwargs.get(key)) for key in
('aws_access_key_id',
'aws_secret_access_key')}
self.region = (SETTINGS.get('region') or
kwargs.get('region') or
boto.swf.layer1.Layer1.DefaultRegionName)
self.connection = (kwargs.pop('connection', None) or
boto.swf.connect_to_region(self.region, **settings_))
if self.connection is None:
raise ValueError('invalid region: {}'.format(self.region))
| <commit_before># -*- coding:utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
import boto.swf
from . import settings
SETTINGS = settings.get()
class ConnectedSWFObject(object):
"""Authenticated object interface
Provides the instance attributes:
- `region`: name of the AWS region
- `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object):
"""
__slots__ = [
'region',
'connection'
]
def __init__(self, *args, **kwargs):
settings_ = {k: v for k, v in SETTINGS.iteritems()}
settings_.update(kwargs)
self.region = (settings_.pop('region', None) or
boto.swf.layer1.Layer1.DefaultRegionName)
self.connection = boto.swf.connect_to_region(self.region, **settings_)
if self.connection is None:
raise ValueError('invalid region: {}'.format(self.region))
<commit_msg>Fix ConnectedSWFObject: restrict attributes set by constructor
- credentials: SETTINGS | kwargs
- region: SETTINGS | kwargs | boto.swf.layer1.Layer1.DefaultRegionName
- connection: kwargs<commit_after># -*- coding:utf-8 -*-
# Copyright (c) 2013, Theo Crevon
# Copyright (c) 2013, Greg Leclercq
#
# See the file LICENSE for copying permission.
import boto.swf
from . import settings
SETTINGS = settings.get()
class ConnectedSWFObject(object):
"""Authenticated object interface
Provides the instance attributes:
- `region`: name of the AWS region
- `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object):
"""
__slots__ = [
'region',
'connection'
]
def __init__(self, *args, **kwargs):
settings_ = {key: SETTINGS.get(key, kwargs.get(key)) for key in
('aws_access_key_id',
'aws_secret_access_key')}
self.region = (SETTINGS.get('region') or
kwargs.get('region') or
boto.swf.layer1.Layer1.DefaultRegionName)
self.connection = (kwargs.pop('connection', None) or
boto.swf.connect_to_region(self.region, **settings_))
if self.connection is None:
raise ValueError('invalid region: {}'.format(self.region))
|
0dde910ff5f44aaa265e9439d006effde3c68a3f | plugins/brian.py | plugins/brian.py | """Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {}'.format(generate_phrase(phrases, cache))
| """Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~brian'.format(generate_phrase(phrases, cache))
| Add attribution to quotes in plugin | Add attribution to quotes in plugin
| Python | mit | kvchen/keffbot-py,kvchen/keffbot | """Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {}'.format(generate_phrase(phrases, cache))
Add attribution to quotes in plugin | """Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~brian'.format(generate_phrase(phrases, cache))
| <commit_before>"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {}'.format(generate_phrase(phrases, cache))
<commit_msg>Add attribution to quotes in plugin<commit_after> | """Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~brian'.format(generate_phrase(phrases, cache))
| """Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {}'.format(generate_phrase(phrases, cache))
Add attribution to quotes in plugin"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~brian'.format(generate_phrase(phrases, cache))
| <commit_before>"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {}'.format(generate_phrase(phrases, cache))
<commit_msg>Add attribution to quotes in plugin<commit_after>"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~brian'.format(generate_phrase(phrases, cache))
|
862c2bdeaab094afdd61db862be54a8c4b7c08f3 | corehq/apps/users/admin.py | corehq/apps/users/admin.py | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class CustomUserAdmin(UserAdmin):
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
| from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror, HQApiKey
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class ApiKeyInline(admin.TabularInline):
model = HQApiKey
readonly_fields = ['key', 'created']
extra = 1
class CustomUserAdmin(UserAdmin):
inlines = [
ApiKeyInline,
]
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
| Add ApiKey to Users page in Django Admin | Add ApiKey to Users page in Django Admin
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class CustomUserAdmin(UserAdmin):
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
Add ApiKey to Users page in Django Admin | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror, HQApiKey
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class ApiKeyInline(admin.TabularInline):
model = HQApiKey
readonly_fields = ['key', 'created']
extra = 1
class CustomUserAdmin(UserAdmin):
inlines = [
ApiKeyInline,
]
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
| <commit_before>from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class CustomUserAdmin(UserAdmin):
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
<commit_msg>Add ApiKey to Users page in Django Admin<commit_after> | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror, HQApiKey
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class ApiKeyInline(admin.TabularInline):
model = HQApiKey
readonly_fields = ['key', 'created']
extra = 1
class CustomUserAdmin(UserAdmin):
inlines = [
ApiKeyInline,
]
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
| from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class CustomUserAdmin(UserAdmin):
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
Add ApiKey to Users page in Django Adminfrom django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror, HQApiKey
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class ApiKeyInline(admin.TabularInline):
model = HQApiKey
readonly_fields = ['key', 'created']
extra = 1
class CustomUserAdmin(UserAdmin):
inlines = [
ApiKeyInline,
]
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
| <commit_before>from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class CustomUserAdmin(UserAdmin):
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
<commit_msg>Add ApiKey to Users page in Django Admin<commit_after>from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django_digest.models import PartialDigest, UserNonce
from .models import DomainPermissionsMirror, HQApiKey
class DDUserNonceAdmin(admin.ModelAdmin):
list_display = ('user', 'nonce', 'count', 'last_used_at')
class DDPartialDigestAdmin(admin.ModelAdmin):
list_display = ('user', 'partial_digest', 'confirmed')
search_fields = ('login',)
admin.site.register(UserNonce, DDUserNonceAdmin)
admin.site.register(PartialDigest, DDPartialDigestAdmin)
class ApiKeyInline(admin.TabularInline):
model = HQApiKey
readonly_fields = ['key', 'created']
extra = 1
class CustomUserAdmin(UserAdmin):
inlines = [
ApiKeyInline,
]
def has_add_permission(self, request):
return False
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
class DomainPermissionsMirrorAdmin(admin.ModelAdmin):
list_display = ['source', 'mirror']
list_filter = ['source', 'mirror']
admin.site.register(DomainPermissionsMirror, DomainPermissionsMirrorAdmin)
|
0470d1243ad2d7e7fd086c2b2f695dc431eaf2ea | pycroscopy/io/translators/df_utils/beps_gen_utils.py | pycroscopy/io/translators/df_utils/beps_gen_utils.py |
import numpy as np
def combine_in_out_field_loops(in_vec, out_vec):
"""
Parameters
----------
in_vec
out_vec
Returns
-------
"""
return np.vstack((in_vec, out_vec))
def build_loop_from_mat(loop_mat, num_steps):
"""
Parameters
----------
loop_mat
num_steps
Returns
-------
"""
return np.vstack((loop_mat[0, :int(num_steps / 4) + 1],
loop_mat[1],
loop_mat[0, int(num_steps / 4) + 1: int(num_steps / 2)]))
def get_noise_vec(num_pts, noise_coeff):
"""
Parameters
----------
num_pts
noise_coeff
Returns
-------
"""
return np.ones(num_pts) * (1 + 0.5 * noise_coeff) - np.random.random(num_pts) * noise_coeff
|
import numpy as np
import os
beps_image_folder = os.path.abspath(os.path.join(os.path.realpath(__file__), '../beps_data_gen_images'))
def combine_in_out_field_loops(in_vec, out_vec):
"""
Parameters
----------
in_vec
out_vec
Returns
-------
"""
return np.vstack((in_vec, out_vec))
def build_loop_from_mat(loop_mat, num_steps):
"""
Parameters
----------
loop_mat
num_steps
Returns
-------
"""
return np.vstack((loop_mat[0, :int(num_steps / 4) + 1],
loop_mat[1],
loop_mat[0, int(num_steps / 4) + 1: int(num_steps / 2)]))
def get_noise_vec(num_pts, noise_coeff):
"""
Parameters
----------
num_pts
noise_coeff
Returns
-------
"""
return np.ones(num_pts) * (1 + 0.5 * noise_coeff) - np.random.random(num_pts) * noise_coeff
| Define a path to the image folder for fake beps generator | Define a path to the image folder for fake beps generator
Path is defined in beps_gen_utils
Users can still provide their own images if they want. | Python | mit | anugrah-saxena/pycroscopy,pycroscopy/pycroscopy |
import numpy as np
def combine_in_out_field_loops(in_vec, out_vec):
"""
Parameters
----------
in_vec
out_vec
Returns
-------
"""
return np.vstack((in_vec, out_vec))
def build_loop_from_mat(loop_mat, num_steps):
"""
Parameters
----------
loop_mat
num_steps
Returns
-------
"""
return np.vstack((loop_mat[0, :int(num_steps / 4) + 1],
loop_mat[1],
loop_mat[0, int(num_steps / 4) + 1: int(num_steps / 2)]))
def get_noise_vec(num_pts, noise_coeff):
"""
Parameters
----------
num_pts
noise_coeff
Returns
-------
"""
return np.ones(num_pts) * (1 + 0.5 * noise_coeff) - np.random.random(num_pts) * noise_coeff
Define a path to the image folder for fake beps generator
Path is defined in beps_gen_utils
Users can still provide their own images if they want. |
import numpy as np
import os
beps_image_folder = os.path.abspath(os.path.join(os.path.realpath(__file__), '../beps_data_gen_images'))
def combine_in_out_field_loops(in_vec, out_vec):
"""
Parameters
----------
in_vec
out_vec
Returns
-------
"""
return np.vstack((in_vec, out_vec))
def build_loop_from_mat(loop_mat, num_steps):
"""
Parameters
----------
loop_mat
num_steps
Returns
-------
"""
return np.vstack((loop_mat[0, :int(num_steps / 4) + 1],
loop_mat[1],
loop_mat[0, int(num_steps / 4) + 1: int(num_steps / 2)]))
def get_noise_vec(num_pts, noise_coeff):
"""
Parameters
----------
num_pts
noise_coeff
Returns
-------
"""
return np.ones(num_pts) * (1 + 0.5 * noise_coeff) - np.random.random(num_pts) * noise_coeff
| <commit_before>
import numpy as np
def combine_in_out_field_loops(in_vec, out_vec):
"""
Parameters
----------
in_vec
out_vec
Returns
-------
"""
return np.vstack((in_vec, out_vec))
def build_loop_from_mat(loop_mat, num_steps):
"""
Parameters
----------
loop_mat
num_steps
Returns
-------
"""
return np.vstack((loop_mat[0, :int(num_steps / 4) + 1],
loop_mat[1],
loop_mat[0, int(num_steps / 4) + 1: int(num_steps / 2)]))
def get_noise_vec(num_pts, noise_coeff):
"""
Parameters
----------
num_pts
noise_coeff
Returns
-------
"""
return np.ones(num_pts) * (1 + 0.5 * noise_coeff) - np.random.random(num_pts) * noise_coeff
<commit_msg>Define a path to the image folder for fake beps generator
Path is defined in beps_gen_utils
Users can still provide their own images if they want.<commit_after> |
import numpy as np
import os
beps_image_folder = os.path.abspath(os.path.join(os.path.realpath(__file__), '../beps_data_gen_images'))
def combine_in_out_field_loops(in_vec, out_vec):
"""
Parameters
----------
in_vec
out_vec
Returns
-------
"""
return np.vstack((in_vec, out_vec))
def build_loop_from_mat(loop_mat, num_steps):
"""
Parameters
----------
loop_mat
num_steps
Returns
-------
"""
return np.vstack((loop_mat[0, :int(num_steps / 4) + 1],
loop_mat[1],
loop_mat[0, int(num_steps / 4) + 1: int(num_steps / 2)]))
def get_noise_vec(num_pts, noise_coeff):
"""
Parameters
----------
num_pts
noise_coeff
Returns
-------
"""
return np.ones(num_pts) * (1 + 0.5 * noise_coeff) - np.random.random(num_pts) * noise_coeff
|
import numpy as np
def combine_in_out_field_loops(in_vec, out_vec):
"""
Parameters
----------
in_vec
out_vec
Returns
-------
"""
return np.vstack((in_vec, out_vec))
def build_loop_from_mat(loop_mat, num_steps):
"""
Parameters
----------
loop_mat
num_steps
Returns
-------
"""
return np.vstack((loop_mat[0, :int(num_steps / 4) + 1],
loop_mat[1],
loop_mat[0, int(num_steps / 4) + 1: int(num_steps / 2)]))
def get_noise_vec(num_pts, noise_coeff):
"""
Parameters
----------
num_pts
noise_coeff
Returns
-------
"""
return np.ones(num_pts) * (1 + 0.5 * noise_coeff) - np.random.random(num_pts) * noise_coeff
Define a path to the image folder for fake beps generator
Path is defined in beps_gen_utils
Users can still provide their own images if they want.
import numpy as np
import os
beps_image_folder = os.path.abspath(os.path.join(os.path.realpath(__file__), '../beps_data_gen_images'))
def combine_in_out_field_loops(in_vec, out_vec):
"""
Parameters
----------
in_vec
out_vec
Returns
-------
"""
return np.vstack((in_vec, out_vec))
def build_loop_from_mat(loop_mat, num_steps):
"""
Parameters
----------
loop_mat
num_steps
Returns
-------
"""
return np.vstack((loop_mat[0, :int(num_steps / 4) + 1],
loop_mat[1],
loop_mat[0, int(num_steps / 4) + 1: int(num_steps / 2)]))
def get_noise_vec(num_pts, noise_coeff):
"""
Parameters
----------
num_pts
noise_coeff
Returns
-------
"""
return np.ones(num_pts) * (1 + 0.5 * noise_coeff) - np.random.random(num_pts) * noise_coeff
| <commit_before>
import numpy as np
def combine_in_out_field_loops(in_vec, out_vec):
"""
Parameters
----------
in_vec
out_vec
Returns
-------
"""
return np.vstack((in_vec, out_vec))
def build_loop_from_mat(loop_mat, num_steps):
"""
Parameters
----------
loop_mat
num_steps
Returns
-------
"""
return np.vstack((loop_mat[0, :int(num_steps / 4) + 1],
loop_mat[1],
loop_mat[0, int(num_steps / 4) + 1: int(num_steps / 2)]))
def get_noise_vec(num_pts, noise_coeff):
"""
Parameters
----------
num_pts
noise_coeff
Returns
-------
"""
return np.ones(num_pts) * (1 + 0.5 * noise_coeff) - np.random.random(num_pts) * noise_coeff
<commit_msg>Define a path to the image folder for fake beps generator
Path is defined in beps_gen_utils
Users can still provide their own images if they want.<commit_after>
import numpy as np
import os
beps_image_folder = os.path.abspath(os.path.join(os.path.realpath(__file__), '../beps_data_gen_images'))
def combine_in_out_field_loops(in_vec, out_vec):
"""
Parameters
----------
in_vec
out_vec
Returns
-------
"""
return np.vstack((in_vec, out_vec))
def build_loop_from_mat(loop_mat, num_steps):
"""
Parameters
----------
loop_mat
num_steps
Returns
-------
"""
return np.vstack((loop_mat[0, :int(num_steps / 4) + 1],
loop_mat[1],
loop_mat[0, int(num_steps / 4) + 1: int(num_steps / 2)]))
def get_noise_vec(num_pts, noise_coeff):
"""
Parameters
----------
num_pts
noise_coeff
Returns
-------
"""
return np.ones(num_pts) * (1 + 0.5 * noise_coeff) - np.random.random(num_pts) * noise_coeff
|
8eaf39db81deeeddd9b9035caa1c249f68d1d96f | ioant/ioant/utils/utils.py | ioant/ioant/utils/utils.py | import os
import sys
import json
def open_file_as_string(filepath):
with open(filepath, 'r') as ftemp:
templateString = ftemp.read()
return templateString
def return_absolut_path(script_path, relative_path):
return os.path.realpath(os.path.join(script_path, relative_path))
def fetch_json_file_as_dict(path_to_json):
#db_schema_path = return_absolut_path(script_path, relative_path)
json_str = open_file_as_string(path_to_json)
json_dict = json.loads(json_str)
return json_dict
if __name__ == '__main__':
unittest.main()
| import os
import sys
import json
def open_file_as_string(filepath):
with open(filepath, 'r') as ftemp:
templateString = ftemp.read()
return templateString
def return_absolut_path(script_path, relative_path):
return os.path.realpath(os.path.join(script_path, relative_path))
def fetch_json_file_as_dict(path_to_json):
#db_schema_path = return_absolut_path(script_path, relative_path)
json_str = open_file_as_string(path_to_json)
json_dict = json.loads(json_str)
return json_dict
def topic_to_dict(topic):
sub_topics_list = topic.split('/')
if len(sub_topics_list) is not 6:
return None
else:
topic_dict = {}
topic_dict['top'] = sub_topics_list[0]
topic_dict['global'] = sub_topics_list[1]
topic_dict['local'] = sub_topics_list[2]
topic_dict['client_id'] = sub_topics_list[3]
topic_dict['message_type'] = int(sub_topics_list[4])
topic_dict['stream_index'] = int(sub_topics_list[5])
return topic_dict
if __name__ == '__main__':
unittest.main()
| Add function for backward compability | Add function for backward compability
| Python | mit | ioants/pypi-packages,ioants/pypi-packages,ioants/pypi-packages | import os
import sys
import json
def open_file_as_string(filepath):
with open(filepath, 'r') as ftemp:
templateString = ftemp.read()
return templateString
def return_absolut_path(script_path, relative_path):
return os.path.realpath(os.path.join(script_path, relative_path))
def fetch_json_file_as_dict(path_to_json):
#db_schema_path = return_absolut_path(script_path, relative_path)
json_str = open_file_as_string(path_to_json)
json_dict = json.loads(json_str)
return json_dict
if __name__ == '__main__':
unittest.main()
Add function for backward compability | import os
import sys
import json
def open_file_as_string(filepath):
with open(filepath, 'r') as ftemp:
templateString = ftemp.read()
return templateString
def return_absolut_path(script_path, relative_path):
return os.path.realpath(os.path.join(script_path, relative_path))
def fetch_json_file_as_dict(path_to_json):
#db_schema_path = return_absolut_path(script_path, relative_path)
json_str = open_file_as_string(path_to_json)
json_dict = json.loads(json_str)
return json_dict
def topic_to_dict(topic):
sub_topics_list = topic.split('/')
if len(sub_topics_list) is not 6:
return None
else:
topic_dict = {}
topic_dict['top'] = sub_topics_list[0]
topic_dict['global'] = sub_topics_list[1]
topic_dict['local'] = sub_topics_list[2]
topic_dict['client_id'] = sub_topics_list[3]
topic_dict['message_type'] = int(sub_topics_list[4])
topic_dict['stream_index'] = int(sub_topics_list[5])
return topic_dict
if __name__ == '__main__':
unittest.main()
| <commit_before>import os
import sys
import json
def open_file_as_string(filepath):
with open(filepath, 'r') as ftemp:
templateString = ftemp.read()
return templateString
def return_absolut_path(script_path, relative_path):
return os.path.realpath(os.path.join(script_path, relative_path))
def fetch_json_file_as_dict(path_to_json):
#db_schema_path = return_absolut_path(script_path, relative_path)
json_str = open_file_as_string(path_to_json)
json_dict = json.loads(json_str)
return json_dict
if __name__ == '__main__':
unittest.main()
<commit_msg>Add function for backward compability<commit_after> | import os
import sys
import json
def open_file_as_string(filepath):
with open(filepath, 'r') as ftemp:
templateString = ftemp.read()
return templateString
def return_absolut_path(script_path, relative_path):
return os.path.realpath(os.path.join(script_path, relative_path))
def fetch_json_file_as_dict(path_to_json):
#db_schema_path = return_absolut_path(script_path, relative_path)
json_str = open_file_as_string(path_to_json)
json_dict = json.loads(json_str)
return json_dict
def topic_to_dict(topic):
sub_topics_list = topic.split('/')
if len(sub_topics_list) is not 6:
return None
else:
topic_dict = {}
topic_dict['top'] = sub_topics_list[0]
topic_dict['global'] = sub_topics_list[1]
topic_dict['local'] = sub_topics_list[2]
topic_dict['client_id'] = sub_topics_list[3]
topic_dict['message_type'] = int(sub_topics_list[4])
topic_dict['stream_index'] = int(sub_topics_list[5])
return topic_dict
if __name__ == '__main__':
unittest.main()
| import os
import sys
import json
def open_file_as_string(filepath):
with open(filepath, 'r') as ftemp:
templateString = ftemp.read()
return templateString
def return_absolut_path(script_path, relative_path):
return os.path.realpath(os.path.join(script_path, relative_path))
def fetch_json_file_as_dict(path_to_json):
#db_schema_path = return_absolut_path(script_path, relative_path)
json_str = open_file_as_string(path_to_json)
json_dict = json.loads(json_str)
return json_dict
if __name__ == '__main__':
unittest.main()
Add function for backward compabilityimport os
import sys
import json
def open_file_as_string(filepath):
with open(filepath, 'r') as ftemp:
templateString = ftemp.read()
return templateString
def return_absolut_path(script_path, relative_path):
return os.path.realpath(os.path.join(script_path, relative_path))
def fetch_json_file_as_dict(path_to_json):
#db_schema_path = return_absolut_path(script_path, relative_path)
json_str = open_file_as_string(path_to_json)
json_dict = json.loads(json_str)
return json_dict
def topic_to_dict(topic):
sub_topics_list = topic.split('/')
if len(sub_topics_list) is not 6:
return None
else:
topic_dict = {}
topic_dict['top'] = sub_topics_list[0]
topic_dict['global'] = sub_topics_list[1]
topic_dict['local'] = sub_topics_list[2]
topic_dict['client_id'] = sub_topics_list[3]
topic_dict['message_type'] = int(sub_topics_list[4])
topic_dict['stream_index'] = int(sub_topics_list[5])
return topic_dict
if __name__ == '__main__':
unittest.main()
| <commit_before>import os
import sys
import json
def open_file_as_string(filepath):
with open(filepath, 'r') as ftemp:
templateString = ftemp.read()
return templateString
def return_absolut_path(script_path, relative_path):
return os.path.realpath(os.path.join(script_path, relative_path))
def fetch_json_file_as_dict(path_to_json):
#db_schema_path = return_absolut_path(script_path, relative_path)
json_str = open_file_as_string(path_to_json)
json_dict = json.loads(json_str)
return json_dict
if __name__ == '__main__':
unittest.main()
<commit_msg>Add function for backward compability<commit_after>import os
import sys
import json
def open_file_as_string(filepath):
with open(filepath, 'r') as ftemp:
templateString = ftemp.read()
return templateString
def return_absolut_path(script_path, relative_path):
return os.path.realpath(os.path.join(script_path, relative_path))
def fetch_json_file_as_dict(path_to_json):
#db_schema_path = return_absolut_path(script_path, relative_path)
json_str = open_file_as_string(path_to_json)
json_dict = json.loads(json_str)
return json_dict
def topic_to_dict(topic):
sub_topics_list = topic.split('/')
if len(sub_topics_list) is not 6:
return None
else:
topic_dict = {}
topic_dict['top'] = sub_topics_list[0]
topic_dict['global'] = sub_topics_list[1]
topic_dict['local'] = sub_topics_list[2]
topic_dict['client_id'] = sub_topics_list[3]
topic_dict['message_type'] = int(sub_topics_list[4])
topic_dict['stream_index'] = int(sub_topics_list[5])
return topic_dict
if __name__ == '__main__':
unittest.main()
|
33bd09ba4c8658064f5b9134e6ffb1b4dfad7de0 | fridge/test/test_cas.py | fridge/test/test_cas.py | from fridge.cas import ContentAddressableStorage
from fridge.memoryfs import MemoryFS
class TestContentAddressableStorage(object):
def create_cas(self, fs=None, path='cas'):
if fs is None:
fs = MemoryFS()
return ContentAddressableStorage(path, fs)
def has_root_property(self):
cas = self.create_cas(path='cas_root')
assert cas.root == 'cas_root'
def test_allows_to_store_and_retrieve_files(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
key = cas.store('testfile')
# Close and reopen
del cas
cas = self.create_cas(fs)
with fs.open(cas.get_path(key), 'r') as f:
content = f.read()
assert content == u'dummy content'
# TODO test write protection
# TODO do symlinking (test whether file can still be accessed)
| from fridge.cas import ContentAddressableStorage
from fridge.memoryfs import MemoryFS
class TestContentAddressableStorage(object):
def create_cas(self, fs=None, path='cas'):
if fs is None:
fs = MemoryFS()
return ContentAddressableStorage(path, fs)
def has_root_property(self):
cas = self.create_cas(path='cas_root')
assert cas.root == 'cas_root'
def test_allows_to_store_and_retrieve_files(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
key = cas.store('testfile')
# Close and reopen
del cas
cas = self.create_cas(fs)
with fs.open(cas.get_path(key), 'r') as f:
content = f.read()
assert content == u'dummy content'
def test_file_can_still_be_accessed_after_store(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
cas.store('testfile')
with fs.open('testfile', 'r') as f:
assert f.read() == u'dummy content'
# TODO test write protection
| Test files stored in CAS are still readable. | Test files stored in CAS are still readable.
| Python | mit | jgosmann/fridge,jgosmann/fridge | from fridge.cas import ContentAddressableStorage
from fridge.memoryfs import MemoryFS
class TestContentAddressableStorage(object):
def create_cas(self, fs=None, path='cas'):
if fs is None:
fs = MemoryFS()
return ContentAddressableStorage(path, fs)
def has_root_property(self):
cas = self.create_cas(path='cas_root')
assert cas.root == 'cas_root'
def test_allows_to_store_and_retrieve_files(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
key = cas.store('testfile')
# Close and reopen
del cas
cas = self.create_cas(fs)
with fs.open(cas.get_path(key), 'r') as f:
content = f.read()
assert content == u'dummy content'
# TODO test write protection
# TODO do symlinking (test whether file can still be accessed)
Test files stored in CAS are still readable. | from fridge.cas import ContentAddressableStorage
from fridge.memoryfs import MemoryFS
class TestContentAddressableStorage(object):
def create_cas(self, fs=None, path='cas'):
if fs is None:
fs = MemoryFS()
return ContentAddressableStorage(path, fs)
def has_root_property(self):
cas = self.create_cas(path='cas_root')
assert cas.root == 'cas_root'
def test_allows_to_store_and_retrieve_files(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
key = cas.store('testfile')
# Close and reopen
del cas
cas = self.create_cas(fs)
with fs.open(cas.get_path(key), 'r') as f:
content = f.read()
assert content == u'dummy content'
def test_file_can_still_be_accessed_after_store(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
cas.store('testfile')
with fs.open('testfile', 'r') as f:
assert f.read() == u'dummy content'
# TODO test write protection
| <commit_before>from fridge.cas import ContentAddressableStorage
from fridge.memoryfs import MemoryFS
class TestContentAddressableStorage(object):
def create_cas(self, fs=None, path='cas'):
if fs is None:
fs = MemoryFS()
return ContentAddressableStorage(path, fs)
def has_root_property(self):
cas = self.create_cas(path='cas_root')
assert cas.root == 'cas_root'
def test_allows_to_store_and_retrieve_files(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
key = cas.store('testfile')
# Close and reopen
del cas
cas = self.create_cas(fs)
with fs.open(cas.get_path(key), 'r') as f:
content = f.read()
assert content == u'dummy content'
# TODO test write protection
# TODO do symlinking (test whether file can still be accessed)
<commit_msg>Test files stored in CAS are still readable.<commit_after> | from fridge.cas import ContentAddressableStorage
from fridge.memoryfs import MemoryFS
class TestContentAddressableStorage(object):
def create_cas(self, fs=None, path='cas'):
if fs is None:
fs = MemoryFS()
return ContentAddressableStorage(path, fs)
def has_root_property(self):
cas = self.create_cas(path='cas_root')
assert cas.root == 'cas_root'
def test_allows_to_store_and_retrieve_files(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
key = cas.store('testfile')
# Close and reopen
del cas
cas = self.create_cas(fs)
with fs.open(cas.get_path(key), 'r') as f:
content = f.read()
assert content == u'dummy content'
def test_file_can_still_be_accessed_after_store(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
cas.store('testfile')
with fs.open('testfile', 'r') as f:
assert f.read() == u'dummy content'
# TODO test write protection
| from fridge.cas import ContentAddressableStorage
from fridge.memoryfs import MemoryFS
class TestContentAddressableStorage(object):
def create_cas(self, fs=None, path='cas'):
if fs is None:
fs = MemoryFS()
return ContentAddressableStorage(path, fs)
def has_root_property(self):
cas = self.create_cas(path='cas_root')
assert cas.root == 'cas_root'
def test_allows_to_store_and_retrieve_files(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
key = cas.store('testfile')
# Close and reopen
del cas
cas = self.create_cas(fs)
with fs.open(cas.get_path(key), 'r') as f:
content = f.read()
assert content == u'dummy content'
# TODO test write protection
# TODO do symlinking (test whether file can still be accessed)
Test files stored in CAS are still readable.from fridge.cas import ContentAddressableStorage
from fridge.memoryfs import MemoryFS
class TestContentAddressableStorage(object):
def create_cas(self, fs=None, path='cas'):
if fs is None:
fs = MemoryFS()
return ContentAddressableStorage(path, fs)
def has_root_property(self):
cas = self.create_cas(path='cas_root')
assert cas.root == 'cas_root'
def test_allows_to_store_and_retrieve_files(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
key = cas.store('testfile')
# Close and reopen
del cas
cas = self.create_cas(fs)
with fs.open(cas.get_path(key), 'r') as f:
content = f.read()
assert content == u'dummy content'
def test_file_can_still_be_accessed_after_store(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
cas.store('testfile')
with fs.open('testfile', 'r') as f:
assert f.read() == u'dummy content'
# TODO test write protection
| <commit_before>from fridge.cas import ContentAddressableStorage
from fridge.memoryfs import MemoryFS
class TestContentAddressableStorage(object):
def create_cas(self, fs=None, path='cas'):
if fs is None:
fs = MemoryFS()
return ContentAddressableStorage(path, fs)
def has_root_property(self):
cas = self.create_cas(path='cas_root')
assert cas.root == 'cas_root'
def test_allows_to_store_and_retrieve_files(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
key = cas.store('testfile')
# Close and reopen
del cas
cas = self.create_cas(fs)
with fs.open(cas.get_path(key), 'r') as f:
content = f.read()
assert content == u'dummy content'
# TODO test write protection
# TODO do symlinking (test whether file can still be accessed)
<commit_msg>Test files stored in CAS are still readable.<commit_after>from fridge.cas import ContentAddressableStorage
from fridge.memoryfs import MemoryFS
class TestContentAddressableStorage(object):
def create_cas(self, fs=None, path='cas'):
if fs is None:
fs = MemoryFS()
return ContentAddressableStorage(path, fs)
def has_root_property(self):
cas = self.create_cas(path='cas_root')
assert cas.root == 'cas_root'
def test_allows_to_store_and_retrieve_files(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
key = cas.store('testfile')
# Close and reopen
del cas
cas = self.create_cas(fs)
with fs.open(cas.get_path(key), 'r') as f:
content = f.read()
assert content == u'dummy content'
def test_file_can_still_be_accessed_after_store(self):
fs = MemoryFS()
cas = self.create_cas(fs)
with fs.open('testfile', 'w') as f:
f.write(u'dummy content')
cas.store('testfile')
with fs.open('testfile', 'r') as f:
assert f.read() == u'dummy content'
# TODO test write protection
|
a7fbed238216bd7a9110829e893c318099cc1e14 | anthemav/__main__.py | anthemav/__main__.py | import argparse
import asyncio
import logging
from .protocol import create_anthemav_reader
def console():
parser = argparse.ArgumentParser(description=console.__doc__)
parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR')
parser.add_argument('--port', default='14999', help='Port of AVR')
parser.add_argument('--verbose', '-v', action='count')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.ERROR
logging.basicConfig(level=level)
loop = asyncio.get_event_loop()
def print_callback(telegram):
"""Callback that prints telegram values."""
for obiref, obj in telegram.items():
if obj:
print(obj.value, obj.unit)
print()
conn = create_anthemav_reader(args.host,args.port,print_callback,loop=loop)
loop.create_task(conn)
loop.run_forever()
| import argparse
import asyncio
import logging
from .protocol import create_anthemav_reader
def console():
parser = argparse.ArgumentParser(description=console.__doc__)
parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR')
parser.add_argument('--port', default='14999', help='Port of AVR')
parser.add_argument('--verbose', '-v', action='count')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.ERROR
logging.basicConfig(level=level)
loop = asyncio.get_event_loop()
def print_callback(callerobj,message):
print(message)
conn = create_anthemav_reader(args.host,args.port,print_callback,loop=loop)
loop.create_task(conn)
loop.run_forever()
| Update callback to support two parameters | Update callback to support two parameters
| Python | mit | nugget/python-anthemav | import argparse
import asyncio
import logging
from .protocol import create_anthemav_reader
def console():
parser = argparse.ArgumentParser(description=console.__doc__)
parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR')
parser.add_argument('--port', default='14999', help='Port of AVR')
parser.add_argument('--verbose', '-v', action='count')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.ERROR
logging.basicConfig(level=level)
loop = asyncio.get_event_loop()
def print_callback(telegram):
"""Callback that prints telegram values."""
for obiref, obj in telegram.items():
if obj:
print(obj.value, obj.unit)
print()
conn = create_anthemav_reader(args.host,args.port,print_callback,loop=loop)
loop.create_task(conn)
loop.run_forever()
Update callback to support two parameters | import argparse
import asyncio
import logging
from .protocol import create_anthemav_reader
def console():
parser = argparse.ArgumentParser(description=console.__doc__)
parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR')
parser.add_argument('--port', default='14999', help='Port of AVR')
parser.add_argument('--verbose', '-v', action='count')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.ERROR
logging.basicConfig(level=level)
loop = asyncio.get_event_loop()
def print_callback(callerobj,message):
print(message)
conn = create_anthemav_reader(args.host,args.port,print_callback,loop=loop)
loop.create_task(conn)
loop.run_forever()
| <commit_before>import argparse
import asyncio
import logging
from .protocol import create_anthemav_reader
def console():
parser = argparse.ArgumentParser(description=console.__doc__)
parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR')
parser.add_argument('--port', default='14999', help='Port of AVR')
parser.add_argument('--verbose', '-v', action='count')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.ERROR
logging.basicConfig(level=level)
loop = asyncio.get_event_loop()
def print_callback(telegram):
"""Callback that prints telegram values."""
for obiref, obj in telegram.items():
if obj:
print(obj.value, obj.unit)
print()
conn = create_anthemav_reader(args.host,args.port,print_callback,loop=loop)
loop.create_task(conn)
loop.run_forever()
<commit_msg>Update callback to support two parameters<commit_after> | import argparse
import asyncio
import logging
from .protocol import create_anthemav_reader
def console():
parser = argparse.ArgumentParser(description=console.__doc__)
parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR')
parser.add_argument('--port', default='14999', help='Port of AVR')
parser.add_argument('--verbose', '-v', action='count')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.ERROR
logging.basicConfig(level=level)
loop = asyncio.get_event_loop()
def print_callback(callerobj,message):
print(message)
conn = create_anthemav_reader(args.host,args.port,print_callback,loop=loop)
loop.create_task(conn)
loop.run_forever()
| import argparse
import asyncio
import logging
from .protocol import create_anthemav_reader
def console():
parser = argparse.ArgumentParser(description=console.__doc__)
parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR')
parser.add_argument('--port', default='14999', help='Port of AVR')
parser.add_argument('--verbose', '-v', action='count')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.ERROR
logging.basicConfig(level=level)
loop = asyncio.get_event_loop()
def print_callback(telegram):
"""Callback that prints telegram values."""
for obiref, obj in telegram.items():
if obj:
print(obj.value, obj.unit)
print()
conn = create_anthemav_reader(args.host,args.port,print_callback,loop=loop)
loop.create_task(conn)
loop.run_forever()
Update callback to support two parametersimport argparse
import asyncio
import logging
from .protocol import create_anthemav_reader
def console():
parser = argparse.ArgumentParser(description=console.__doc__)
parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR')
parser.add_argument('--port', default='14999', help='Port of AVR')
parser.add_argument('--verbose', '-v', action='count')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.ERROR
logging.basicConfig(level=level)
loop = asyncio.get_event_loop()
def print_callback(callerobj,message):
print(message)
conn = create_anthemav_reader(args.host,args.port,print_callback,loop=loop)
loop.create_task(conn)
loop.run_forever()
| <commit_before>import argparse
import asyncio
import logging
from .protocol import create_anthemav_reader
def console():
parser = argparse.ArgumentParser(description=console.__doc__)
parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR')
parser.add_argument('--port', default='14999', help='Port of AVR')
parser.add_argument('--verbose', '-v', action='count')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.ERROR
logging.basicConfig(level=level)
loop = asyncio.get_event_loop()
def print_callback(telegram):
"""Callback that prints telegram values."""
for obiref, obj in telegram.items():
if obj:
print(obj.value, obj.unit)
print()
conn = create_anthemav_reader(args.host,args.port,print_callback,loop=loop)
loop.create_task(conn)
loop.run_forever()
<commit_msg>Update callback to support two parameters<commit_after>import argparse
import asyncio
import logging
from .protocol import create_anthemav_reader
def console():
parser = argparse.ArgumentParser(description=console.__doc__)
parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR')
parser.add_argument('--port', default='14999', help='Port of AVR')
parser.add_argument('--verbose', '-v', action='count')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.ERROR
logging.basicConfig(level=level)
loop = asyncio.get_event_loop()
def print_callback(callerobj,message):
print(message)
conn = create_anthemav_reader(args.host,args.port,print_callback,loop=loop)
loop.create_task(conn)
loop.run_forever()
|
ee0a0b492b5536e0cc8c8e561875254698416eb4 | lib/ansible/utils/string_functions.py | lib/ansible/utils/string_functions.py | def isprintable(instring):
#http://stackoverflow.com/a/3637294
import string
printset = set(string.printable)
isprintable = set(instring).issubset(printset)
return isprintable
def count_newlines_from_end(str):
i = len(str)
while i > 0:
if str[i-1] != '\n':
break
i -= 1
return len(str) - i
| def isprintable(instring):
if isinstance(instring, str):
#http://stackoverflow.com/a/3637294
import string
printset = set(string.printable)
isprintable = set(instring).issubset(printset)
return isprintable
else:
return True
def count_newlines_from_end(str):
i = len(str)
while i > 0:
if str[i-1] != '\n':
break
i -= 1
return len(str) - i
| Allow isprintable() util function to work with unicode | Allow isprintable() util function to work with unicode
Fixes #6842
| Python | mit | thaim/ansible,thaim/ansible | def isprintable(instring):
#http://stackoverflow.com/a/3637294
import string
printset = set(string.printable)
isprintable = set(instring).issubset(printset)
return isprintable
def count_newlines_from_end(str):
i = len(str)
while i > 0:
if str[i-1] != '\n':
break
i -= 1
return len(str) - i
Allow isprintable() util function to work with unicode
Fixes #6842 | def isprintable(instring):
if isinstance(instring, str):
#http://stackoverflow.com/a/3637294
import string
printset = set(string.printable)
isprintable = set(instring).issubset(printset)
return isprintable
else:
return True
def count_newlines_from_end(str):
i = len(str)
while i > 0:
if str[i-1] != '\n':
break
i -= 1
return len(str) - i
| <commit_before>def isprintable(instring):
#http://stackoverflow.com/a/3637294
import string
printset = set(string.printable)
isprintable = set(instring).issubset(printset)
return isprintable
def count_newlines_from_end(str):
i = len(str)
while i > 0:
if str[i-1] != '\n':
break
i -= 1
return len(str) - i
<commit_msg>Allow isprintable() util function to work with unicode
Fixes #6842<commit_after> | def isprintable(instring):
if isinstance(instring, str):
#http://stackoverflow.com/a/3637294
import string
printset = set(string.printable)
isprintable = set(instring).issubset(printset)
return isprintable
else:
return True
def count_newlines_from_end(str):
i = len(str)
while i > 0:
if str[i-1] != '\n':
break
i -= 1
return len(str) - i
| def isprintable(instring):
#http://stackoverflow.com/a/3637294
import string
printset = set(string.printable)
isprintable = set(instring).issubset(printset)
return isprintable
def count_newlines_from_end(str):
i = len(str)
while i > 0:
if str[i-1] != '\n':
break
i -= 1
return len(str) - i
Allow isprintable() util function to work with unicode
Fixes #6842def isprintable(instring):
if isinstance(instring, str):
#http://stackoverflow.com/a/3637294
import string
printset = set(string.printable)
isprintable = set(instring).issubset(printset)
return isprintable
else:
return True
def count_newlines_from_end(str):
i = len(str)
while i > 0:
if str[i-1] != '\n':
break
i -= 1
return len(str) - i
| <commit_before>def isprintable(instring):
#http://stackoverflow.com/a/3637294
import string
printset = set(string.printable)
isprintable = set(instring).issubset(printset)
return isprintable
def count_newlines_from_end(str):
i = len(str)
while i > 0:
if str[i-1] != '\n':
break
i -= 1
return len(str) - i
<commit_msg>Allow isprintable() util function to work with unicode
Fixes #6842<commit_after>def isprintable(instring):
if isinstance(instring, str):
#http://stackoverflow.com/a/3637294
import string
printset = set(string.printable)
isprintable = set(instring).issubset(printset)
return isprintable
else:
return True
def count_newlines_from_end(str):
i = len(str)
while i > 0:
if str[i-1] != '\n':
break
i -= 1
return len(str) - i
|
e284968b7e234b30a8a593b298a96b78bb151c03 | pyscf/pbc/tdscf/rhf_slow.py | pyscf/pbc/tdscf/rhf_slow.py | """
This module is an alias for `pyscf.tdscf.rhf_slow`. It works with single k-point HF objects.
"""
from pyscf.tdscf.rhf_slow import * | """
This and other `_slow` modules implement the time-dependent Hartree-Fock procedure. The primary performance drawback is
that, unlike other 'fast' routines with an implicit construction of the eigenvalue problem, these modules construct
TDHF matrices explicitly via an AO-MO transformation, i.e. with a O(N^5) complexity scaling. As a result, regular
`numpy.linalg.eig` can be used to retrieve TDHF roots in a reliable fashion without any issues related to the Davidson
procedure. Several variants of TDHF are available:
* `pyscf.tdscf.rhf.slow`: the molecular implementation;
* (this module) `pyscf.pbc.tdscf.rhf_slow`: PBC (periodic boundary condition) implementation for RHF objects of
`pyscf.pbc.scf` modules;
* `pyscf.pbc.tdscf.krhf_slow_supercell`: PBC implementation for KRHF objects of `pyscf.pbc.scf` modules. Works with
an arbitrary number of k-points but has a overhead due to an effective construction of a supercell.
* `pyscf.pbc.tdscf.krhf_slow`: PBC implementation for KRHF objects of `pyscf.pbc.scf` modules. Works with an arbitrary
number of k-points and employs k-point conservation (diagonalizes matrix blocks separately).
"""
# This module is simply an alias of the molecular code
from pyscf.tdscf.rhf_slow import * | Extend a docstring in PBC-Gamma TDHF | Extend a docstring in PBC-Gamma TDHF
| Python | apache-2.0 | gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,sunqm/pyscf,sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,gkc1000/pyscf | """
This module is an alias for `pyscf.tdscf.rhf_slow`. It works with single k-point HF objects.
"""
from pyscf.tdscf.rhf_slow import *Extend a docstring in PBC-Gamma TDHF | """
This and other `_slow` modules implement the time-dependent Hartree-Fock procedure. The primary performance drawback is
that, unlike other 'fast' routines with an implicit construction of the eigenvalue problem, these modules construct
TDHF matrices explicitly via an AO-MO transformation, i.e. with a O(N^5) complexity scaling. As a result, regular
`numpy.linalg.eig` can be used to retrieve TDHF roots in a reliable fashion without any issues related to the Davidson
procedure. Several variants of TDHF are available:
* `pyscf.tdscf.rhf.slow`: the molecular implementation;
* (this module) `pyscf.pbc.tdscf.rhf_slow`: PBC (periodic boundary condition) implementation for RHF objects of
`pyscf.pbc.scf` modules;
* `pyscf.pbc.tdscf.krhf_slow_supercell`: PBC implementation for KRHF objects of `pyscf.pbc.scf` modules. Works with
an arbitrary number of k-points but has a overhead due to an effective construction of a supercell.
* `pyscf.pbc.tdscf.krhf_slow`: PBC implementation for KRHF objects of `pyscf.pbc.scf` modules. Works with an arbitrary
number of k-points and employs k-point conservation (diagonalizes matrix blocks separately).
"""
# This module is simply an alias of the molecular code
from pyscf.tdscf.rhf_slow import * | <commit_before>"""
This module is an alias for `pyscf.tdscf.rhf_slow`. It works with single k-point HF objects.
"""
from pyscf.tdscf.rhf_slow import *<commit_msg>Extend a docstring in PBC-Gamma TDHF<commit_after> | """
This and other `_slow` modules implement the time-dependent Hartree-Fock procedure. The primary performance drawback is
that, unlike other 'fast' routines with an implicit construction of the eigenvalue problem, these modules construct
TDHF matrices explicitly via an AO-MO transformation, i.e. with a O(N^5) complexity scaling. As a result, regular
`numpy.linalg.eig` can be used to retrieve TDHF roots in a reliable fashion without any issues related to the Davidson
procedure. Several variants of TDHF are available:
* `pyscf.tdscf.rhf.slow`: the molecular implementation;
* (this module) `pyscf.pbc.tdscf.rhf_slow`: PBC (periodic boundary condition) implementation for RHF objects of
`pyscf.pbc.scf` modules;
* `pyscf.pbc.tdscf.krhf_slow_supercell`: PBC implementation for KRHF objects of `pyscf.pbc.scf` modules. Works with
an arbitrary number of k-points but has a overhead due to an effective construction of a supercell.
* `pyscf.pbc.tdscf.krhf_slow`: PBC implementation for KRHF objects of `pyscf.pbc.scf` modules. Works with an arbitrary
number of k-points and employs k-point conservation (diagonalizes matrix blocks separately).
"""
# This module is simply an alias of the molecular code
from pyscf.tdscf.rhf_slow import * | """
This module is an alias for `pyscf.tdscf.rhf_slow`. It works with single k-point HF objects.
"""
from pyscf.tdscf.rhf_slow import *Extend a docstring in PBC-Gamma TDHF"""
This and other `_slow` modules implement the time-dependent Hartree-Fock procedure. The primary performance drawback is
that, unlike other 'fast' routines with an implicit construction of the eigenvalue problem, these modules construct
TDHF matrices explicitly via an AO-MO transformation, i.e. with a O(N^5) complexity scaling. As a result, regular
`numpy.linalg.eig` can be used to retrieve TDHF roots in a reliable fashion without any issues related to the Davidson
procedure. Several variants of TDHF are available:
* `pyscf.tdscf.rhf.slow`: the molecular implementation;
* (this module) `pyscf.pbc.tdscf.rhf_slow`: PBC (periodic boundary condition) implementation for RHF objects of
`pyscf.pbc.scf` modules;
* `pyscf.pbc.tdscf.krhf_slow_supercell`: PBC implementation for KRHF objects of `pyscf.pbc.scf` modules. Works with
an arbitrary number of k-points but has a overhead due to an effective construction of a supercell.
* `pyscf.pbc.tdscf.krhf_slow`: PBC implementation for KRHF objects of `pyscf.pbc.scf` modules. Works with an arbitrary
number of k-points and employs k-point conservation (diagonalizes matrix blocks separately).
"""
# This module is simply an alias of the molecular code
from pyscf.tdscf.rhf_slow import * | <commit_before>"""
This module is an alias for `pyscf.tdscf.rhf_slow`. It works with single k-point HF objects.
"""
from pyscf.tdscf.rhf_slow import *<commit_msg>Extend a docstring in PBC-Gamma TDHF<commit_after>"""
This and other `_slow` modules implement the time-dependent Hartree-Fock procedure. The primary performance drawback is
that, unlike other 'fast' routines with an implicit construction of the eigenvalue problem, these modules construct
TDHF matrices explicitly via an AO-MO transformation, i.e. with a O(N^5) complexity scaling. As a result, regular
`numpy.linalg.eig` can be used to retrieve TDHF roots in a reliable fashion without any issues related to the Davidson
procedure. Several variants of TDHF are available:
* `pyscf.tdscf.rhf.slow`: the molecular implementation;
* (this module) `pyscf.pbc.tdscf.rhf_slow`: PBC (periodic boundary condition) implementation for RHF objects of
`pyscf.pbc.scf` modules;
* `pyscf.pbc.tdscf.krhf_slow_supercell`: PBC implementation for KRHF objects of `pyscf.pbc.scf` modules. Works with
an arbitrary number of k-points but has a overhead due to an effective construction of a supercell.
* `pyscf.pbc.tdscf.krhf_slow`: PBC implementation for KRHF objects of `pyscf.pbc.scf` modules. Works with an arbitrary
number of k-points and employs k-point conservation (diagonalizes matrix blocks separately).
"""
# This module is simply an alias of the molecular code
from pyscf.tdscf.rhf_slow import * |
01fa3a2ce4181629db2027fd9797e5592bdadada | python/balcaza/t2wrapper.py | python/balcaza/t2wrapper.py | from t2activity import NestedWorkflow
from t2types import ListType, String
from t2flow import Workflow
class WrapperWorkflow(Workflow):
def __init__(self, flow):
self.flow = flow
Workflow.__init__(self, flow.title, flow.author, flow.description)
setattr(self.task, flow.name, NestedWorkflow(flow))
nested = getattr(self.task, flow.name)
for port in flow.input:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
setattr(self.input, port.name, type)
getattr(self.input, port.name) >> getattr(nested.input, port.name)
for port in flow.output:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
setattr(self.output, port.name, type)
getattr(nested.output, port.name) >> getattr(self.output, port.name)
| from t2activity import NestedWorkflow
from t2types import ListType, String
from t2flow import Workflow
class WrapperWorkflow(Workflow):
def __init__(self, flow):
self.flow = flow
Workflow.__init__(self, flow.title, flow.author, flow.description)
setattr(self.task, flow.name, NestedWorkflow(flow))
nested = getattr(self.task, flow.name)
for port in flow.input:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.input[port.name] = type
self.input[port.name] >> nested.input[port.name]
for port in flow.output:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.output[port.name] = type
nested.output[port.name] >> self.output[port.name]
| Change wrapper code to use [] notation for attribute access | Change wrapper code to use [] notation for attribute access
| Python | lgpl-2.1 | jongiddy/balcazapy,jongiddy/balcazapy,jongiddy/balcazapy | from t2activity import NestedWorkflow
from t2types import ListType, String
from t2flow import Workflow
class WrapperWorkflow(Workflow):
def __init__(self, flow):
self.flow = flow
Workflow.__init__(self, flow.title, flow.author, flow.description)
setattr(self.task, flow.name, NestedWorkflow(flow))
nested = getattr(self.task, flow.name)
for port in flow.input:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
setattr(self.input, port.name, type)
getattr(self.input, port.name) >> getattr(nested.input, port.name)
for port in flow.output:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
setattr(self.output, port.name, type)
getattr(nested.output, port.name) >> getattr(self.output, port.name)
Change wrapper code to use [] notation for attribute access | from t2activity import NestedWorkflow
from t2types import ListType, String
from t2flow import Workflow
class WrapperWorkflow(Workflow):
def __init__(self, flow):
self.flow = flow
Workflow.__init__(self, flow.title, flow.author, flow.description)
setattr(self.task, flow.name, NestedWorkflow(flow))
nested = getattr(self.task, flow.name)
for port in flow.input:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.input[port.name] = type
self.input[port.name] >> nested.input[port.name]
for port in flow.output:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.output[port.name] = type
nested.output[port.name] >> self.output[port.name]
| <commit_before>from t2activity import NestedWorkflow
from t2types import ListType, String
from t2flow import Workflow
class WrapperWorkflow(Workflow):
def __init__(self, flow):
self.flow = flow
Workflow.__init__(self, flow.title, flow.author, flow.description)
setattr(self.task, flow.name, NestedWorkflow(flow))
nested = getattr(self.task, flow.name)
for port in flow.input:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
setattr(self.input, port.name, type)
getattr(self.input, port.name) >> getattr(nested.input, port.name)
for port in flow.output:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
setattr(self.output, port.name, type)
getattr(nested.output, port.name) >> getattr(self.output, port.name)
<commit_msg>Change wrapper code to use [] notation for attribute access<commit_after> | from t2activity import NestedWorkflow
from t2types import ListType, String
from t2flow import Workflow
class WrapperWorkflow(Workflow):
def __init__(self, flow):
self.flow = flow
Workflow.__init__(self, flow.title, flow.author, flow.description)
setattr(self.task, flow.name, NestedWorkflow(flow))
nested = getattr(self.task, flow.name)
for port in flow.input:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.input[port.name] = type
self.input[port.name] >> nested.input[port.name]
for port in flow.output:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.output[port.name] = type
nested.output[port.name] >> self.output[port.name]
| from t2activity import NestedWorkflow
from t2types import ListType, String
from t2flow import Workflow
class WrapperWorkflow(Workflow):
def __init__(self, flow):
self.flow = flow
Workflow.__init__(self, flow.title, flow.author, flow.description)
setattr(self.task, flow.name, NestedWorkflow(flow))
nested = getattr(self.task, flow.name)
for port in flow.input:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
setattr(self.input, port.name, type)
getattr(self.input, port.name) >> getattr(nested.input, port.name)
for port in flow.output:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
setattr(self.output, port.name, type)
getattr(nested.output, port.name) >> getattr(self.output, port.name)
Change wrapper code to use [] notation for attribute accessfrom t2activity import NestedWorkflow
from t2types import ListType, String
from t2flow import Workflow
class WrapperWorkflow(Workflow):
def __init__(self, flow):
self.flow = flow
Workflow.__init__(self, flow.title, flow.author, flow.description)
setattr(self.task, flow.name, NestedWorkflow(flow))
nested = getattr(self.task, flow.name)
for port in flow.input:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.input[port.name] = type
self.input[port.name] >> nested.input[port.name]
for port in flow.output:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.output[port.name] = type
nested.output[port.name] >> self.output[port.name]
| <commit_before>from t2activity import NestedWorkflow
from t2types import ListType, String
from t2flow import Workflow
class WrapperWorkflow(Workflow):
def __init__(self, flow):
self.flow = flow
Workflow.__init__(self, flow.title, flow.author, flow.description)
setattr(self.task, flow.name, NestedWorkflow(flow))
nested = getattr(self.task, flow.name)
for port in flow.input:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
setattr(self.input, port.name, type)
getattr(self.input, port.name) >> getattr(nested.input, port.name)
for port in flow.output:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
setattr(self.output, port.name, type)
getattr(nested.output, port.name) >> getattr(self.output, port.name)
<commit_msg>Change wrapper code to use [] notation for attribute access<commit_after>from t2activity import NestedWorkflow
from t2types import ListType, String
from t2flow import Workflow
class WrapperWorkflow(Workflow):
def __init__(self, flow):
self.flow = flow
Workflow.__init__(self, flow.title, flow.author, flow.description)
setattr(self.task, flow.name, NestedWorkflow(flow))
nested = getattr(self.task, flow.name)
for port in flow.input:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.input[port.name] = type
self.input[port.name] >> nested.input[port.name]
for port in flow.output:
# Set type to same depth, but basetype of String
depth = port.type.getDepth()
if depth == 0:
type = String
else:
type = ListType(String, depth)
# Copy any annotations
type.dict = port.type.dict
self.output[port.name] = type
nested.output[port.name] >> self.output[port.name]
|
ff4b241b33a5e2896110f4575e9aff41a3e04e72 | dimod/compatibility23.py | dimod/compatibility23.py | import sys
import itertools
import inspect
from collections import namedtuple
_PY2 = sys.version_info.major == 2
if _PY2:
range_ = xrange
zip_ = itertools.izip
def iteritems(d):
return d.iteritems()
def itervalues(d):
return d.itervalues()
def iterkeys(d):
return d.iterkeys()
zip_longest = itertools.izip_longest
def getargspec(f):
return inspect.getargspec(f)
else:
range_ = range
zip_ = zip
def iteritems(d):
return iter(d.items())
def itervalues(d):
return iter(d.values())
def iterkeys(d):
return iter(d.keys())
zip_longest = itertools.zip_longest
def getargspec(f):
ArgSpec = namedtuple('ArgSpec', ('args', 'varargs', 'keywords', 'defaults'))
# FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)
argspec = inspect.getfullargspec(f)
return ArgSpec(argspec.args, argspec.varargs, argspec.varkw, argspec.defaults)
| import sys
import itertools
import inspect
from collections import namedtuple
_PY2 = sys.version_info.major == 2
if _PY2:
range_ = xrange
zip_ = itertools.izip
def iteritems(d):
return d.iteritems()
def itervalues(d):
return d.itervalues()
def iterkeys(d):
return d.iterkeys()
zip_longest = itertools.izip_longest
def getargspec(f):
return inspect.getargspec(f)
else:
range_ = range
zip_ = zip
def iteritems(d):
return iter(d.items())
def itervalues(d):
return iter(d.values())
def iterkeys(d):
return iter(d.keys())
zip_longest = itertools.zip_longest
_ArgSpec = namedtuple('ArgSpec', ('args', 'varargs', 'keywords', 'defaults'))
def getargspec(f):
argspec = inspect.getfullargspec(f)
# FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)
return _ArgSpec(argspec.args, argspec.varargs, argspec.varkw, argspec.defaults)
| Move namedtuple definition outside of argspec function | Move namedtuple definition outside of argspec function
| Python | apache-2.0 | dwavesystems/dimod,dwavesystems/dimod | import sys
import itertools
import inspect
from collections import namedtuple
_PY2 = sys.version_info.major == 2
if _PY2:
range_ = xrange
zip_ = itertools.izip
def iteritems(d):
return d.iteritems()
def itervalues(d):
return d.itervalues()
def iterkeys(d):
return d.iterkeys()
zip_longest = itertools.izip_longest
def getargspec(f):
return inspect.getargspec(f)
else:
range_ = range
zip_ = zip
def iteritems(d):
return iter(d.items())
def itervalues(d):
return iter(d.values())
def iterkeys(d):
return iter(d.keys())
zip_longest = itertools.zip_longest
def getargspec(f):
ArgSpec = namedtuple('ArgSpec', ('args', 'varargs', 'keywords', 'defaults'))
# FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)
argspec = inspect.getfullargspec(f)
return ArgSpec(argspec.args, argspec.varargs, argspec.varkw, argspec.defaults)
Move namedtuple definition outside of argspec function | import sys
import itertools
import inspect
from collections import namedtuple
_PY2 = sys.version_info.major == 2
if _PY2:
range_ = xrange
zip_ = itertools.izip
def iteritems(d):
return d.iteritems()
def itervalues(d):
return d.itervalues()
def iterkeys(d):
return d.iterkeys()
zip_longest = itertools.izip_longest
def getargspec(f):
return inspect.getargspec(f)
else:
range_ = range
zip_ = zip
def iteritems(d):
return iter(d.items())
def itervalues(d):
return iter(d.values())
def iterkeys(d):
return iter(d.keys())
zip_longest = itertools.zip_longest
_ArgSpec = namedtuple('ArgSpec', ('args', 'varargs', 'keywords', 'defaults'))
def getargspec(f):
argspec = inspect.getfullargspec(f)
# FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)
return _ArgSpec(argspec.args, argspec.varargs, argspec.varkw, argspec.defaults)
| <commit_before>import sys
import itertools
import inspect
from collections import namedtuple
_PY2 = sys.version_info.major == 2
if _PY2:
range_ = xrange
zip_ = itertools.izip
def iteritems(d):
return d.iteritems()
def itervalues(d):
return d.itervalues()
def iterkeys(d):
return d.iterkeys()
zip_longest = itertools.izip_longest
def getargspec(f):
return inspect.getargspec(f)
else:
range_ = range
zip_ = zip
def iteritems(d):
return iter(d.items())
def itervalues(d):
return iter(d.values())
def iterkeys(d):
return iter(d.keys())
zip_longest = itertools.zip_longest
def getargspec(f):
ArgSpec = namedtuple('ArgSpec', ('args', 'varargs', 'keywords', 'defaults'))
# FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)
argspec = inspect.getfullargspec(f)
return ArgSpec(argspec.args, argspec.varargs, argspec.varkw, argspec.defaults)
<commit_msg>Move namedtuple definition outside of argspec function<commit_after> | import sys
import itertools
import inspect
from collections import namedtuple
_PY2 = sys.version_info.major == 2
if _PY2:
range_ = xrange
zip_ = itertools.izip
def iteritems(d):
return d.iteritems()
def itervalues(d):
return d.itervalues()
def iterkeys(d):
return d.iterkeys()
zip_longest = itertools.izip_longest
def getargspec(f):
return inspect.getargspec(f)
else:
range_ = range
zip_ = zip
def iteritems(d):
return iter(d.items())
def itervalues(d):
return iter(d.values())
def iterkeys(d):
return iter(d.keys())
zip_longest = itertools.zip_longest
_ArgSpec = namedtuple('ArgSpec', ('args', 'varargs', 'keywords', 'defaults'))
def getargspec(f):
argspec = inspect.getfullargspec(f)
# FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)
return _ArgSpec(argspec.args, argspec.varargs, argspec.varkw, argspec.defaults)
| import sys
import itertools
import inspect
from collections import namedtuple
_PY2 = sys.version_info.major == 2
if _PY2:
range_ = xrange
zip_ = itertools.izip
def iteritems(d):
return d.iteritems()
def itervalues(d):
return d.itervalues()
def iterkeys(d):
return d.iterkeys()
zip_longest = itertools.izip_longest
def getargspec(f):
return inspect.getargspec(f)
else:
range_ = range
zip_ = zip
def iteritems(d):
return iter(d.items())
def itervalues(d):
return iter(d.values())
def iterkeys(d):
return iter(d.keys())
zip_longest = itertools.zip_longest
def getargspec(f):
ArgSpec = namedtuple('ArgSpec', ('args', 'varargs', 'keywords', 'defaults'))
# FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)
argspec = inspect.getfullargspec(f)
return ArgSpec(argspec.args, argspec.varargs, argspec.varkw, argspec.defaults)
Move namedtuple definition outside of argspec functionimport sys
import itertools
import inspect
from collections import namedtuple
_PY2 = sys.version_info.major == 2
if _PY2:
range_ = xrange
zip_ = itertools.izip
def iteritems(d):
return d.iteritems()
def itervalues(d):
return d.itervalues()
def iterkeys(d):
return d.iterkeys()
zip_longest = itertools.izip_longest
def getargspec(f):
return inspect.getargspec(f)
else:
range_ = range
zip_ = zip
def iteritems(d):
return iter(d.items())
def itervalues(d):
return iter(d.values())
def iterkeys(d):
return iter(d.keys())
zip_longest = itertools.zip_longest
_ArgSpec = namedtuple('ArgSpec', ('args', 'varargs', 'keywords', 'defaults'))
def getargspec(f):
argspec = inspect.getfullargspec(f)
# FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)
return _ArgSpec(argspec.args, argspec.varargs, argspec.varkw, argspec.defaults)
| <commit_before>import sys
import itertools
import inspect
from collections import namedtuple
_PY2 = sys.version_info.major == 2
if _PY2:
range_ = xrange
zip_ = itertools.izip
def iteritems(d):
return d.iteritems()
def itervalues(d):
return d.itervalues()
def iterkeys(d):
return d.iterkeys()
zip_longest = itertools.izip_longest
def getargspec(f):
return inspect.getargspec(f)
else:
range_ = range
zip_ = zip
def iteritems(d):
return iter(d.items())
def itervalues(d):
return iter(d.values())
def iterkeys(d):
return iter(d.keys())
zip_longest = itertools.zip_longest
def getargspec(f):
ArgSpec = namedtuple('ArgSpec', ('args', 'varargs', 'keywords', 'defaults'))
# FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)
argspec = inspect.getfullargspec(f)
return ArgSpec(argspec.args, argspec.varargs, argspec.varkw, argspec.defaults)
<commit_msg>Move namedtuple definition outside of argspec function<commit_after>import sys
import itertools
import inspect
from collections import namedtuple
_PY2 = sys.version_info.major == 2
if _PY2:
range_ = xrange
zip_ = itertools.izip
def iteritems(d):
return d.iteritems()
def itervalues(d):
return d.itervalues()
def iterkeys(d):
return d.iterkeys()
zip_longest = itertools.izip_longest
def getargspec(f):
return inspect.getargspec(f)
else:
range_ = range
zip_ = zip
def iteritems(d):
return iter(d.items())
def itervalues(d):
return iter(d.values())
def iterkeys(d):
return iter(d.keys())
zip_longest = itertools.zip_longest
_ArgSpec = namedtuple('ArgSpec', ('args', 'varargs', 'keywords', 'defaults'))
def getargspec(f):
argspec = inspect.getfullargspec(f)
# FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)
return _ArgSpec(argspec.args, argspec.varargs, argspec.varkw, argspec.defaults)
|
fce5851733205a2b15e53971af13f56c42063eb3 | qual/calendar.py | qual/calendar.py | from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
def __str__(self):
return "%s (%s)" % (self.date, self.calendar.__name__)
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
d = d + timedelta(days=11)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
| from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
def __str__(self):
return "%s (%s)" % (self.date, self.calendar.__name__)
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
| Add 10 days only in the leap day case too. | Add 10 days only in the leap day case too.
| Python | apache-2.0 | jwg4/qual,jwg4/calexicon | from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
def __str__(self):
return "%s (%s)" % (self.date, self.calendar.__name__)
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
d = d + timedelta(days=11)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
Add 10 days only in the leap day case too. | from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
def __str__(self):
return "%s (%s)" % (self.date, self.calendar.__name__)
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
| <commit_before>from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
def __str__(self):
return "%s (%s)" % (self.date, self.calendar.__name__)
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
d = d + timedelta(days=11)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
<commit_msg>Add 10 days only in the leap day case too.<commit_after> | from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
def __str__(self):
return "%s (%s)" % (self.date, self.calendar.__name__)
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
| from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
def __str__(self):
return "%s (%s)" % (self.date, self.calendar.__name__)
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
d = d + timedelta(days=11)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
Add 10 days only in the leap day case too.from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
def __str__(self):
return "%s (%s)" % (self.date, self.calendar.__name__)
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
| <commit_before>from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
def __str__(self):
return "%s (%s)" % (self.date, self.calendar.__name__)
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
d = d + timedelta(days=11)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
<commit_msg>Add 10 days only in the leap day case too.<commit_after>from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
def __str__(self):
return "%s (%s)" % (self.date, self.calendar.__name__)
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
|
5fe53a31bd7f37f8d9bd4fbe3796c8a0fa85019a | storm/db.py | storm/db.py | import motor
import error
from tornado import gen
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
def select_one(self, table, **args):
self.connect()
result = yield motor.Op(getattr(self.db, table).find_one, args)
if result is None:
raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, args))
callback = args.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
| import motor
import error
from tornado import gen
from bson.objectid import ObjectId
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
def select_one(self, table, **kwargs):
self.connect()
if '_id' in kwargs:
kwargs['_id'] = ObjectId(kwargs['_id'])
result = yield motor.Op(getattr(self.db, table).find_one, **kwargs)
if result is None:
raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, kwargs))
callback = kwargs.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
| Make sure looking up by id works correctly | Make sure looking up by id works correctly
| Python | mit | ccampbell/storm,liujiantong/storm | import motor
import error
from tornado import gen
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
def select_one(self, table, **args):
self.connect()
result = yield motor.Op(getattr(self.db, table).find_one, args)
if result is None:
raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, args))
callback = args.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
Make sure looking up by id works correctly | import motor
import error
from tornado import gen
from bson.objectid import ObjectId
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
def select_one(self, table, **kwargs):
self.connect()
if '_id' in kwargs:
kwargs['_id'] = ObjectId(kwargs['_id'])
result = yield motor.Op(getattr(self.db, table).find_one, **kwargs)
if result is None:
raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, kwargs))
callback = kwargs.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
| <commit_before>import motor
import error
from tornado import gen
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
def select_one(self, table, **args):
self.connect()
result = yield motor.Op(getattr(self.db, table).find_one, args)
if result is None:
raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, args))
callback = args.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
<commit_msg>Make sure looking up by id works correctly<commit_after> | import motor
import error
from tornado import gen
from bson.objectid import ObjectId
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
def select_one(self, table, **kwargs):
self.connect()
if '_id' in kwargs:
kwargs['_id'] = ObjectId(kwargs['_id'])
result = yield motor.Op(getattr(self.db, table).find_one, **kwargs)
if result is None:
raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, kwargs))
callback = kwargs.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
| import motor
import error
from tornado import gen
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
def select_one(self, table, **args):
self.connect()
result = yield motor.Op(getattr(self.db, table).find_one, args)
if result is None:
raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, args))
callback = args.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
Make sure looking up by id works correctlyimport motor
import error
from tornado import gen
from bson.objectid import ObjectId
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
def select_one(self, table, **kwargs):
self.connect()
if '_id' in kwargs:
kwargs['_id'] = ObjectId(kwargs['_id'])
result = yield motor.Op(getattr(self.db, table).find_one, **kwargs)
if result is None:
raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, kwargs))
callback = kwargs.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
| <commit_before>import motor
import error
from tornado import gen
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
def select_one(self, table, **args):
self.connect()
result = yield motor.Op(getattr(self.db, table).find_one, args)
if result is None:
raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, args))
callback = args.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
<commit_msg>Make sure looking up by id works correctly<commit_after>import motor
import error
from tornado import gen
from bson.objectid import ObjectId
class Connection(object):
def __init__(self, host='localhost', port=None, db=None):
self.host = host
self.port = port
self.db = db
class Database(object):
def __init__(self, connection):
if not isinstance(connection, Connection):
raise error.StormError('connection must be instance of storm.db.Connection')
self.connection = connection
self.is_connected = False
class MongoDb(Database):
def connect(self):
if self.is_connected:
return
self.motor_client = motor.MotorClient(
self.connection.host,
self.connection.port
).open_sync()
self.db = self.motor_client[self.connection.db]
self.is_connected = True
@gen.coroutine
def select_one(self, table, **kwargs):
self.connect()
if '_id' in kwargs:
kwargs['_id'] = ObjectId(kwargs['_id'])
result = yield motor.Op(getattr(self.db, table).find_one, **kwargs)
if result is None:
raise error.StormNotFoundError("Object of type: %s not found with args: %s" % (table, kwargs))
callback = kwargs.get('callback')
if callback is None:
raise gen.Return(result)
callback(result)
@gen.coroutine
def insert(self, table, data, callback=None):
self.connect()
result = yield motor.Op(self.db[table].insert, data)
if callback is None:
raise gen.Return(result)
callback(result)
|
c7364a95e06e1743fdd0fef73367f458ec3630d7 | xqueue/aws_settings.py | xqueue/aws_settings.py | from settings import *
import json
from logsettings import get_logger_config
with open(ENV_ROOT / "env.json") as env_file:
ENV_TOKENS = json.load(env_file)
XQUEUES = ENV_TOKENS['XQUEUES']
XQUEUE_WORKERS_PER_QUEUE = ENV_TOKENS['XQUEUE_WORKERS_PER_QUEUE']
LOG_DIR = ENV_TOKENS['LOG_DIR']
local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL','INFO')
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
local_loglevel=local_loglevel,
debug=False)
with open(ENV_ROOT / "auth.json") as auth_file:
AUTH_TOKENS = json.load(auth_file)
DATABASES = AUTH_TOKENS['DATABASES']
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
REQUESTS_BASIC_AUTH = AUTH_TOKENS["REQUESTS_BASIC_AUTH"]
| from settings import *
import json
from logsettings import get_logger_config
with open(ENV_ROOT / "env.json") as env_file:
ENV_TOKENS = json.load(env_file)
XQUEUES = ENV_TOKENS['XQUEUES']
XQUEUE_WORKERS_PER_QUEUE = ENV_TOKENS['XQUEUE_WORKERS_PER_QUEUE']
LOG_DIR = ENV_TOKENS['LOG_DIR']
local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO')
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
local_loglevel=local_loglevel,
debug=False)
RABBIT_HOST = ENV_TOKENS.get('RABBIT_HOST', RABBIT_HOST)
with open(ENV_ROOT / "auth.json") as auth_file:
AUTH_TOKENS = json.load(auth_file)
DATABASES = AUTH_TOKENS['DATABASES']
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
REQUESTS_BASIC_AUTH = AUTH_TOKENS["REQUESTS_BASIC_AUTH"]
| Use RABBIT_HOST in env.json if it exists | Use RABBIT_HOST in env.json if it exists
| Python | agpl-3.0 | edx/xqueue,EDUlib/xqueue,Marx86/xqueue,knehez/xqueue,EDUlib/xqueue,EDUlib/xqueue,Marx86/xqueue,nttks/xqueue,knehez/xqueue,knehez/xqueue,nttks/xqueue,edx/xqueue,edx/xqueue | from settings import *
import json
from logsettings import get_logger_config
with open(ENV_ROOT / "env.json") as env_file:
ENV_TOKENS = json.load(env_file)
XQUEUES = ENV_TOKENS['XQUEUES']
XQUEUE_WORKERS_PER_QUEUE = ENV_TOKENS['XQUEUE_WORKERS_PER_QUEUE']
LOG_DIR = ENV_TOKENS['LOG_DIR']
local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL','INFO')
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
local_loglevel=local_loglevel,
debug=False)
with open(ENV_ROOT / "auth.json") as auth_file:
AUTH_TOKENS = json.load(auth_file)
DATABASES = AUTH_TOKENS['DATABASES']
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
REQUESTS_BASIC_AUTH = AUTH_TOKENS["REQUESTS_BASIC_AUTH"]
Use RABBIT_HOST in env.json if it exists | from settings import *
import json
from logsettings import get_logger_config
with open(ENV_ROOT / "env.json") as env_file:
ENV_TOKENS = json.load(env_file)
XQUEUES = ENV_TOKENS['XQUEUES']
XQUEUE_WORKERS_PER_QUEUE = ENV_TOKENS['XQUEUE_WORKERS_PER_QUEUE']
LOG_DIR = ENV_TOKENS['LOG_DIR']
local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO')
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
local_loglevel=local_loglevel,
debug=False)
RABBIT_HOST = ENV_TOKENS.get('RABBIT_HOST', RABBIT_HOST)
with open(ENV_ROOT / "auth.json") as auth_file:
AUTH_TOKENS = json.load(auth_file)
DATABASES = AUTH_TOKENS['DATABASES']
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
REQUESTS_BASIC_AUTH = AUTH_TOKENS["REQUESTS_BASIC_AUTH"]
| <commit_before>from settings import *
import json
from logsettings import get_logger_config
with open(ENV_ROOT / "env.json") as env_file:
ENV_TOKENS = json.load(env_file)
XQUEUES = ENV_TOKENS['XQUEUES']
XQUEUE_WORKERS_PER_QUEUE = ENV_TOKENS['XQUEUE_WORKERS_PER_QUEUE']
LOG_DIR = ENV_TOKENS['LOG_DIR']
local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL','INFO')
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
local_loglevel=local_loglevel,
debug=False)
with open(ENV_ROOT / "auth.json") as auth_file:
AUTH_TOKENS = json.load(auth_file)
DATABASES = AUTH_TOKENS['DATABASES']
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
REQUESTS_BASIC_AUTH = AUTH_TOKENS["REQUESTS_BASIC_AUTH"]
<commit_msg>Use RABBIT_HOST in env.json if it exists<commit_after> | from settings import *
import json
from logsettings import get_logger_config
with open(ENV_ROOT / "env.json") as env_file:
ENV_TOKENS = json.load(env_file)
XQUEUES = ENV_TOKENS['XQUEUES']
XQUEUE_WORKERS_PER_QUEUE = ENV_TOKENS['XQUEUE_WORKERS_PER_QUEUE']
LOG_DIR = ENV_TOKENS['LOG_DIR']
local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO')
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
local_loglevel=local_loglevel,
debug=False)
RABBIT_HOST = ENV_TOKENS.get('RABBIT_HOST', RABBIT_HOST)
with open(ENV_ROOT / "auth.json") as auth_file:
AUTH_TOKENS = json.load(auth_file)
DATABASES = AUTH_TOKENS['DATABASES']
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
REQUESTS_BASIC_AUTH = AUTH_TOKENS["REQUESTS_BASIC_AUTH"]
| from settings import *
import json
from logsettings import get_logger_config
with open(ENV_ROOT / "env.json") as env_file:
ENV_TOKENS = json.load(env_file)
XQUEUES = ENV_TOKENS['XQUEUES']
XQUEUE_WORKERS_PER_QUEUE = ENV_TOKENS['XQUEUE_WORKERS_PER_QUEUE']
LOG_DIR = ENV_TOKENS['LOG_DIR']
local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL','INFO')
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
local_loglevel=local_loglevel,
debug=False)
with open(ENV_ROOT / "auth.json") as auth_file:
AUTH_TOKENS = json.load(auth_file)
DATABASES = AUTH_TOKENS['DATABASES']
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
REQUESTS_BASIC_AUTH = AUTH_TOKENS["REQUESTS_BASIC_AUTH"]
Use RABBIT_HOST in env.json if it existsfrom settings import *
import json
from logsettings import get_logger_config
with open(ENV_ROOT / "env.json") as env_file:
ENV_TOKENS = json.load(env_file)
XQUEUES = ENV_TOKENS['XQUEUES']
XQUEUE_WORKERS_PER_QUEUE = ENV_TOKENS['XQUEUE_WORKERS_PER_QUEUE']
LOG_DIR = ENV_TOKENS['LOG_DIR']
local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO')
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
local_loglevel=local_loglevel,
debug=False)
RABBIT_HOST = ENV_TOKENS.get('RABBIT_HOST', RABBIT_HOST)
with open(ENV_ROOT / "auth.json") as auth_file:
AUTH_TOKENS = json.load(auth_file)
DATABASES = AUTH_TOKENS['DATABASES']
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
REQUESTS_BASIC_AUTH = AUTH_TOKENS["REQUESTS_BASIC_AUTH"]
| <commit_before>from settings import *
import json
from logsettings import get_logger_config
with open(ENV_ROOT / "env.json") as env_file:
ENV_TOKENS = json.load(env_file)
XQUEUES = ENV_TOKENS['XQUEUES']
XQUEUE_WORKERS_PER_QUEUE = ENV_TOKENS['XQUEUE_WORKERS_PER_QUEUE']
LOG_DIR = ENV_TOKENS['LOG_DIR']
local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL','INFO')
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
local_loglevel=local_loglevel,
debug=False)
with open(ENV_ROOT / "auth.json") as auth_file:
AUTH_TOKENS = json.load(auth_file)
DATABASES = AUTH_TOKENS['DATABASES']
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
REQUESTS_BASIC_AUTH = AUTH_TOKENS["REQUESTS_BASIC_AUTH"]
<commit_msg>Use RABBIT_HOST in env.json if it exists<commit_after>from settings import *
import json
from logsettings import get_logger_config
with open(ENV_ROOT / "env.json") as env_file:
ENV_TOKENS = json.load(env_file)
XQUEUES = ENV_TOKENS['XQUEUES']
XQUEUE_WORKERS_PER_QUEUE = ENV_TOKENS['XQUEUE_WORKERS_PER_QUEUE']
LOG_DIR = ENV_TOKENS['LOG_DIR']
local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO')
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
local_loglevel=local_loglevel,
debug=False)
RABBIT_HOST = ENV_TOKENS.get('RABBIT_HOST', RABBIT_HOST)
with open(ENV_ROOT / "auth.json") as auth_file:
AUTH_TOKENS = json.load(auth_file)
DATABASES = AUTH_TOKENS['DATABASES']
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
REQUESTS_BASIC_AUTH = AUTH_TOKENS["REQUESTS_BASIC_AUTH"]
|
cfb42cd7ddbba5e197e0c2454e1a92ce913b6913 | calaccess_campaign_browser/management/commands/buildcalaccesscampaignbrowser.py | calaccess_campaign_browser/management/commands/buildcalaccesscampaignbrowser.py | from django.core.management import call_command
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = 'Transforms and loads refined data from raw CAL-ACCESS source files'
def handle(self, *args, **options):
call_command("flushcalaccesscampaignbrowser")
call_command("loadcalaccesscampaignfilers")
call_command("loadcalaccesscampaignfilings")
call_command("loadcalaccesscampaignsummaries")
call_command("loadcalaccesscampaigncontributions")
call_command("loadcalaccesscampaignexpenditures")
self.success("Done!")
| from django.core.management import call_command
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = 'Transforms and loads refined data from raw CAL-ACCESS source files'
def handle(self, *args, **options):
call_command("flushcalaccesscampaignbrowser")
call_command("loadcalaccesscampaignfilers")
call_command("loadcalaccesscampaignfilings")
call_command("loadcalaccesscampaignsummaries")
call_command("loadcalaccesscampaigncontributions")
#call_command("loadcalaccesscampaignexpenditures")
self.success("Done!")
| Comment out expends until we get them cleaned up | Comment out expends until we get them cleaned up
| Python | mit | myersjustinc/django-calaccess-campaign-browser,dwillis/django-calaccess-campaign-browser,california-civic-data-coalition/django-calaccess-campaign-browser,dwillis/django-calaccess-campaign-browser,california-civic-data-coalition/django-calaccess-campaign-browser,myersjustinc/django-calaccess-campaign-browser | from django.core.management import call_command
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = 'Transforms and loads refined data from raw CAL-ACCESS source files'
def handle(self, *args, **options):
call_command("flushcalaccesscampaignbrowser")
call_command("loadcalaccesscampaignfilers")
call_command("loadcalaccesscampaignfilings")
call_command("loadcalaccesscampaignsummaries")
call_command("loadcalaccesscampaigncontributions")
call_command("loadcalaccesscampaignexpenditures")
self.success("Done!")
Comment out expends until we get them cleaned up | from django.core.management import call_command
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = 'Transforms and loads refined data from raw CAL-ACCESS source files'
def handle(self, *args, **options):
call_command("flushcalaccesscampaignbrowser")
call_command("loadcalaccesscampaignfilers")
call_command("loadcalaccesscampaignfilings")
call_command("loadcalaccesscampaignsummaries")
call_command("loadcalaccesscampaigncontributions")
#call_command("loadcalaccesscampaignexpenditures")
self.success("Done!")
| <commit_before>from django.core.management import call_command
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = 'Transforms and loads refined data from raw CAL-ACCESS source files'
def handle(self, *args, **options):
call_command("flushcalaccesscampaignbrowser")
call_command("loadcalaccesscampaignfilers")
call_command("loadcalaccesscampaignfilings")
call_command("loadcalaccesscampaignsummaries")
call_command("loadcalaccesscampaigncontributions")
call_command("loadcalaccesscampaignexpenditures")
self.success("Done!")
<commit_msg>Comment out expends until we get them cleaned up<commit_after> | from django.core.management import call_command
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = 'Transforms and loads refined data from raw CAL-ACCESS source files'
def handle(self, *args, **options):
call_command("flushcalaccesscampaignbrowser")
call_command("loadcalaccesscampaignfilers")
call_command("loadcalaccesscampaignfilings")
call_command("loadcalaccesscampaignsummaries")
call_command("loadcalaccesscampaigncontributions")
#call_command("loadcalaccesscampaignexpenditures")
self.success("Done!")
| from django.core.management import call_command
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = 'Transforms and loads refined data from raw CAL-ACCESS source files'
def handle(self, *args, **options):
call_command("flushcalaccesscampaignbrowser")
call_command("loadcalaccesscampaignfilers")
call_command("loadcalaccesscampaignfilings")
call_command("loadcalaccesscampaignsummaries")
call_command("loadcalaccesscampaigncontributions")
call_command("loadcalaccesscampaignexpenditures")
self.success("Done!")
Comment out expends until we get them cleaned upfrom django.core.management import call_command
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = 'Transforms and loads refined data from raw CAL-ACCESS source files'
def handle(self, *args, **options):
call_command("flushcalaccesscampaignbrowser")
call_command("loadcalaccesscampaignfilers")
call_command("loadcalaccesscampaignfilings")
call_command("loadcalaccesscampaignsummaries")
call_command("loadcalaccesscampaigncontributions")
#call_command("loadcalaccesscampaignexpenditures")
self.success("Done!")
| <commit_before>from django.core.management import call_command
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = 'Transforms and loads refined data from raw CAL-ACCESS source files'
def handle(self, *args, **options):
call_command("flushcalaccesscampaignbrowser")
call_command("loadcalaccesscampaignfilers")
call_command("loadcalaccesscampaignfilings")
call_command("loadcalaccesscampaignsummaries")
call_command("loadcalaccesscampaigncontributions")
call_command("loadcalaccesscampaignexpenditures")
self.success("Done!")
<commit_msg>Comment out expends until we get them cleaned up<commit_after>from django.core.management import call_command
from calaccess_campaign_browser.management.commands import CalAccessCommand
class Command(CalAccessCommand):
help = 'Transforms and loads refined data from raw CAL-ACCESS source files'
def handle(self, *args, **options):
call_command("flushcalaccesscampaignbrowser")
call_command("loadcalaccesscampaignfilers")
call_command("loadcalaccesscampaignfilings")
call_command("loadcalaccesscampaignsummaries")
call_command("loadcalaccesscampaigncontributions")
#call_command("loadcalaccesscampaignexpenditures")
self.success("Done!")
|
b79f5173629cbe5dfe15633084e203d4a52c0b1b | pyxray/parser/test_wikipedia.py | pyxray/parser/test_wikipedia.py | #!/usr/bin/env python
""" """
# Standard library modules.
import unittest
import logging
# Third party modules.
# Local modules.
from pyxray.parser.wikipedia import WikipediaElementNameParser
# Globals and constants variables.
class TestWikipediaElementNameParser(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.parser = WikipediaElementNameParser()
def tearDown(self):
unittest.TestCase.tearDown(self)
def test__iter__(self):
props = list(self.parser)
self.assertEqual(2819, len(props))
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
| #!/usr/bin/env python
""" """
# Standard library modules.
import unittest
import logging
# Third party modules.
# Local modules.
from pyxray.parser.wikipedia import WikipediaElementNameParser
# Globals and constants variables.
class TestWikipediaElementNameParser(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.parser = WikipediaElementNameParser()
def tearDown(self):
unittest.TestCase.tearDown(self)
def test__iter__(self):
props = list(self.parser)
self.assertEqual(2820, len(props))
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
| Fix test (wikipedia entries seem to change) | Fix test (wikipedia entries seem to change) | Python | mit | openmicroanalysis/pyxray,ppinard/pyxray,openmicroanalysis/pyxray | #!/usr/bin/env python
""" """
# Standard library modules.
import unittest
import logging
# Third party modules.
# Local modules.
from pyxray.parser.wikipedia import WikipediaElementNameParser
# Globals and constants variables.
class TestWikipediaElementNameParser(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.parser = WikipediaElementNameParser()
def tearDown(self):
unittest.TestCase.tearDown(self)
def test__iter__(self):
props = list(self.parser)
self.assertEqual(2819, len(props))
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
Fix test (wikipedia entries seem to change) | #!/usr/bin/env python
""" """
# Standard library modules.
import unittest
import logging
# Third party modules.
# Local modules.
from pyxray.parser.wikipedia import WikipediaElementNameParser
# Globals and constants variables.
class TestWikipediaElementNameParser(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.parser = WikipediaElementNameParser()
def tearDown(self):
unittest.TestCase.tearDown(self)
def test__iter__(self):
props = list(self.parser)
self.assertEqual(2820, len(props))
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
| <commit_before>#!/usr/bin/env python
""" """
# Standard library modules.
import unittest
import logging
# Third party modules.
# Local modules.
from pyxray.parser.wikipedia import WikipediaElementNameParser
# Globals and constants variables.
class TestWikipediaElementNameParser(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.parser = WikipediaElementNameParser()
def tearDown(self):
unittest.TestCase.tearDown(self)
def test__iter__(self):
props = list(self.parser)
self.assertEqual(2819, len(props))
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
<commit_msg>Fix test (wikipedia entries seem to change)<commit_after> | #!/usr/bin/env python
""" """
# Standard library modules.
import unittest
import logging
# Third party modules.
# Local modules.
from pyxray.parser.wikipedia import WikipediaElementNameParser
# Globals and constants variables.
class TestWikipediaElementNameParser(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.parser = WikipediaElementNameParser()
def tearDown(self):
unittest.TestCase.tearDown(self)
def test__iter__(self):
props = list(self.parser)
self.assertEqual(2820, len(props))
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
| #!/usr/bin/env python
""" """
# Standard library modules.
import unittest
import logging
# Third party modules.
# Local modules.
from pyxray.parser.wikipedia import WikipediaElementNameParser
# Globals and constants variables.
class TestWikipediaElementNameParser(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.parser = WikipediaElementNameParser()
def tearDown(self):
unittest.TestCase.tearDown(self)
def test__iter__(self):
props = list(self.parser)
self.assertEqual(2819, len(props))
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
Fix test (wikipedia entries seem to change)#!/usr/bin/env python
""" """
# Standard library modules.
import unittest
import logging
# Third party modules.
# Local modules.
from pyxray.parser.wikipedia import WikipediaElementNameParser
# Globals and constants variables.
class TestWikipediaElementNameParser(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.parser = WikipediaElementNameParser()
def tearDown(self):
unittest.TestCase.tearDown(self)
def test__iter__(self):
props = list(self.parser)
self.assertEqual(2820, len(props))
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
| <commit_before>#!/usr/bin/env python
""" """
# Standard library modules.
import unittest
import logging
# Third party modules.
# Local modules.
from pyxray.parser.wikipedia import WikipediaElementNameParser
# Globals and constants variables.
class TestWikipediaElementNameParser(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.parser = WikipediaElementNameParser()
def tearDown(self):
unittest.TestCase.tearDown(self)
def test__iter__(self):
props = list(self.parser)
self.assertEqual(2819, len(props))
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
<commit_msg>Fix test (wikipedia entries seem to change)<commit_after>#!/usr/bin/env python
""" """
# Standard library modules.
import unittest
import logging
# Third party modules.
# Local modules.
from pyxray.parser.wikipedia import WikipediaElementNameParser
# Globals and constants variables.
class TestWikipediaElementNameParser(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
self.parser = WikipediaElementNameParser()
def tearDown(self):
unittest.TestCase.tearDown(self)
def test__iter__(self):
props = list(self.parser)
self.assertEqual(2820, len(props))
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
|
93cd0d5dc6388582bbcfdafe21c4f41793af75dc | hamlpy/template/utils.py | hamlpy/template/utils.py | import imp
from django.template import loaders
from os import listdir
from os.path import dirname, splitext
MODULE_EXTENSIONS = tuple([suffix[0] for suffix in imp.get_suffixes()])
def get_django_template_loaders():
return [(loader.__name__.rsplit('.', 1)[1], loader)
for loader in get_submodules(loaders) if hasattr(loader, 'Loader')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module) for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]]) for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0] for module in listdir(package_path) if module.endswith(MODULE_EXTENSIONS)])
return contents
| from importlib import machinery
from django.template import loaders
from os import listdir
from os.path import dirname, splitext
MODULE_EXTENSIONS = tuple(machinery.all_suffixes())
def get_django_template_loaders():
return [(loader.__name__.rsplit('.', 1)[1], loader)
for loader in get_submodules(loaders) if hasattr(loader, 'Loader')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module) for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]]) for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0] for module in listdir(package_path) if module.endswith(MODULE_EXTENSIONS)])
return contents
| Replace deprecated imp usage with importlib | Replace deprecated imp usage with importlib
| Python | mit | nyaruka/django-hamlpy,nyaruka/django-hamlpy | import imp
from django.template import loaders
from os import listdir
from os.path import dirname, splitext
MODULE_EXTENSIONS = tuple([suffix[0] for suffix in imp.get_suffixes()])
def get_django_template_loaders():
return [(loader.__name__.rsplit('.', 1)[1], loader)
for loader in get_submodules(loaders) if hasattr(loader, 'Loader')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module) for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]]) for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0] for module in listdir(package_path) if module.endswith(MODULE_EXTENSIONS)])
return contents
Replace deprecated imp usage with importlib | from importlib import machinery
from django.template import loaders
from os import listdir
from os.path import dirname, splitext
MODULE_EXTENSIONS = tuple(machinery.all_suffixes())
def get_django_template_loaders():
return [(loader.__name__.rsplit('.', 1)[1], loader)
for loader in get_submodules(loaders) if hasattr(loader, 'Loader')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module) for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]]) for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0] for module in listdir(package_path) if module.endswith(MODULE_EXTENSIONS)])
return contents
| <commit_before>import imp
from django.template import loaders
from os import listdir
from os.path import dirname, splitext
MODULE_EXTENSIONS = tuple([suffix[0] for suffix in imp.get_suffixes()])
def get_django_template_loaders():
return [(loader.__name__.rsplit('.', 1)[1], loader)
for loader in get_submodules(loaders) if hasattr(loader, 'Loader')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module) for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]]) for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0] for module in listdir(package_path) if module.endswith(MODULE_EXTENSIONS)])
return contents
<commit_msg>Replace deprecated imp usage with importlib<commit_after> | from importlib import machinery
from django.template import loaders
from os import listdir
from os.path import dirname, splitext
MODULE_EXTENSIONS = tuple(machinery.all_suffixes())
def get_django_template_loaders():
return [(loader.__name__.rsplit('.', 1)[1], loader)
for loader in get_submodules(loaders) if hasattr(loader, 'Loader')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module) for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]]) for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0] for module in listdir(package_path) if module.endswith(MODULE_EXTENSIONS)])
return contents
| import imp
from django.template import loaders
from os import listdir
from os.path import dirname, splitext
MODULE_EXTENSIONS = tuple([suffix[0] for suffix in imp.get_suffixes()])
def get_django_template_loaders():
return [(loader.__name__.rsplit('.', 1)[1], loader)
for loader in get_submodules(loaders) if hasattr(loader, 'Loader')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module) for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]]) for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0] for module in listdir(package_path) if module.endswith(MODULE_EXTENSIONS)])
return contents
Replace deprecated imp usage with importlibfrom importlib import machinery
from django.template import loaders
from os import listdir
from os.path import dirname, splitext
MODULE_EXTENSIONS = tuple(machinery.all_suffixes())
def get_django_template_loaders():
return [(loader.__name__.rsplit('.', 1)[1], loader)
for loader in get_submodules(loaders) if hasattr(loader, 'Loader')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module) for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]]) for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0] for module in listdir(package_path) if module.endswith(MODULE_EXTENSIONS)])
return contents
| <commit_before>import imp
from django.template import loaders
from os import listdir
from os.path import dirname, splitext
MODULE_EXTENSIONS = tuple([suffix[0] for suffix in imp.get_suffixes()])
def get_django_template_loaders():
return [(loader.__name__.rsplit('.', 1)[1], loader)
for loader in get_submodules(loaders) if hasattr(loader, 'Loader')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module) for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]]) for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0] for module in listdir(package_path) if module.endswith(MODULE_EXTENSIONS)])
return contents
<commit_msg>Replace deprecated imp usage with importlib<commit_after>from importlib import machinery
from django.template import loaders
from os import listdir
from os.path import dirname, splitext
MODULE_EXTENSIONS = tuple(machinery.all_suffixes())
def get_django_template_loaders():
return [(loader.__name__.rsplit('.', 1)[1], loader)
for loader in get_submodules(loaders) if hasattr(loader, 'Loader')]
def get_submodules(package):
submodules = ("%s.%s" % (package.__name__, module) for module in package_contents(package))
return [__import__(module, {}, {}, [module.rsplit(".", 1)[-1]]) for module in submodules]
def package_contents(package):
package_path = dirname(loaders.__file__)
contents = set([splitext(module)[0] for module in listdir(package_path) if module.endswith(MODULE_EXTENSIONS)])
return contents
|
2e88043e2f7a987469f1af5dffa1c4675368c667 | tests/schema-validator.py | tests/schema-validator.py | #!/usr/bin/python
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)) + "/../third-party/jsonschema")
import jsonschema
import jsonschema.exceptions
def main(argv):
if len(argv) < 3:
print "Usage: "
print "\t" + os.path.basename(__file__) + " <json file> <schema file>"
sys.exit(-1)
jsonFile = open(argv[1])
jsonContents = jsonFile.read()
jsonFile.close()
schemaFile = open(argv[2])
jsonSchema = schemaFile.read()
schemaFile.close()
try:
jsonschema.validate(eval(jsonContents), eval(jsonSchema))
print "Provided JSON is valid against the schema."
except jsonschema.ValidationError as e:
print e
if (__name__ == "__main__"):
sys.exit(main(sys.argv))
| #!/usr/bin/python
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)) + "/../third-party/jsonschema")
import jsonschema
import jsonschema.exceptions
def main(argv):
if len(argv) < 3:
print "Usage: "
print "\t" + os.path.basename(__file__) + " <json file> <schema file>"
sys.exit(-1)
jsonFile = open(argv[1])
jsonContents = jsonFile.read()
jsonContents = jsonContents.replace('false', 'False')
jsonContents = jsonContents.replace('true', 'True')
jsonFile.close()
schemaFile = open(argv[2])
jsonSchema = schemaFile.read()
jsonSchema = jsonSchema.replace('false', 'False')
jsonSchema = jsonSchema.replace('true', 'True')
schemaFile.close()
try:
jsonschema.validate(eval(jsonContents), eval(jsonSchema))
print "Provided JSON is valid against the schema."
except jsonschema.ValidationError as e:
print e
if (__name__ == "__main__"):
sys.exit(main(sys.argv))
| Replace true/false from JSON to python False/True | Replace true/false from JSON to python False/True
Signed-off-by: Vivek Galatage <bbe41406aa2af935662c4582fd181c8ca0156a8e@visteon.com>
| Python | mit | vivekgalatage/libtracing | #!/usr/bin/python
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)) + "/../third-party/jsonschema")
import jsonschema
import jsonschema.exceptions
def main(argv):
if len(argv) < 3:
print "Usage: "
print "\t" + os.path.basename(__file__) + " <json file> <schema file>"
sys.exit(-1)
jsonFile = open(argv[1])
jsonContents = jsonFile.read()
jsonFile.close()
schemaFile = open(argv[2])
jsonSchema = schemaFile.read()
schemaFile.close()
try:
jsonschema.validate(eval(jsonContents), eval(jsonSchema))
print "Provided JSON is valid against the schema."
except jsonschema.ValidationError as e:
print e
if (__name__ == "__main__"):
sys.exit(main(sys.argv))
Replace true/false from JSON to python False/True
Signed-off-by: Vivek Galatage <bbe41406aa2af935662c4582fd181c8ca0156a8e@visteon.com> | #!/usr/bin/python
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)) + "/../third-party/jsonschema")
import jsonschema
import jsonschema.exceptions
def main(argv):
if len(argv) < 3:
print "Usage: "
print "\t" + os.path.basename(__file__) + " <json file> <schema file>"
sys.exit(-1)
jsonFile = open(argv[1])
jsonContents = jsonFile.read()
jsonContents = jsonContents.replace('false', 'False')
jsonContents = jsonContents.replace('true', 'True')
jsonFile.close()
schemaFile = open(argv[2])
jsonSchema = schemaFile.read()
jsonSchema = jsonSchema.replace('false', 'False')
jsonSchema = jsonSchema.replace('true', 'True')
schemaFile.close()
try:
jsonschema.validate(eval(jsonContents), eval(jsonSchema))
print "Provided JSON is valid against the schema."
except jsonschema.ValidationError as e:
print e
if (__name__ == "__main__"):
sys.exit(main(sys.argv))
| <commit_before>#!/usr/bin/python
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)) + "/../third-party/jsonschema")
import jsonschema
import jsonschema.exceptions
def main(argv):
if len(argv) < 3:
print "Usage: "
print "\t" + os.path.basename(__file__) + " <json file> <schema file>"
sys.exit(-1)
jsonFile = open(argv[1])
jsonContents = jsonFile.read()
jsonFile.close()
schemaFile = open(argv[2])
jsonSchema = schemaFile.read()
schemaFile.close()
try:
jsonschema.validate(eval(jsonContents), eval(jsonSchema))
print "Provided JSON is valid against the schema."
except jsonschema.ValidationError as e:
print e
if (__name__ == "__main__"):
sys.exit(main(sys.argv))
<commit_msg>Replace true/false from JSON to python False/True
Signed-off-by: Vivek Galatage <bbe41406aa2af935662c4582fd181c8ca0156a8e@visteon.com><commit_after> | #!/usr/bin/python
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)) + "/../third-party/jsonschema")
import jsonschema
import jsonschema.exceptions
def main(argv):
if len(argv) < 3:
print "Usage: "
print "\t" + os.path.basename(__file__) + " <json file> <schema file>"
sys.exit(-1)
jsonFile = open(argv[1])
jsonContents = jsonFile.read()
jsonContents = jsonContents.replace('false', 'False')
jsonContents = jsonContents.replace('true', 'True')
jsonFile.close()
schemaFile = open(argv[2])
jsonSchema = schemaFile.read()
jsonSchema = jsonSchema.replace('false', 'False')
jsonSchema = jsonSchema.replace('true', 'True')
schemaFile.close()
try:
jsonschema.validate(eval(jsonContents), eval(jsonSchema))
print "Provided JSON is valid against the schema."
except jsonschema.ValidationError as e:
print e
if (__name__ == "__main__"):
sys.exit(main(sys.argv))
| #!/usr/bin/python
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)) + "/../third-party/jsonschema")
import jsonschema
import jsonschema.exceptions
def main(argv):
if len(argv) < 3:
print "Usage: "
print "\t" + os.path.basename(__file__) + " <json file> <schema file>"
sys.exit(-1)
jsonFile = open(argv[1])
jsonContents = jsonFile.read()
jsonFile.close()
schemaFile = open(argv[2])
jsonSchema = schemaFile.read()
schemaFile.close()
try:
jsonschema.validate(eval(jsonContents), eval(jsonSchema))
print "Provided JSON is valid against the schema."
except jsonschema.ValidationError as e:
print e
if (__name__ == "__main__"):
sys.exit(main(sys.argv))
Replace true/false from JSON to python False/True
Signed-off-by: Vivek Galatage <bbe41406aa2af935662c4582fd181c8ca0156a8e@visteon.com>#!/usr/bin/python
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)) + "/../third-party/jsonschema")
import jsonschema
import jsonschema.exceptions
def main(argv):
if len(argv) < 3:
print "Usage: "
print "\t" + os.path.basename(__file__) + " <json file> <schema file>"
sys.exit(-1)
jsonFile = open(argv[1])
jsonContents = jsonFile.read()
jsonContents = jsonContents.replace('false', 'False')
jsonContents = jsonContents.replace('true', 'True')
jsonFile.close()
schemaFile = open(argv[2])
jsonSchema = schemaFile.read()
jsonSchema = jsonSchema.replace('false', 'False')
jsonSchema = jsonSchema.replace('true', 'True')
schemaFile.close()
try:
jsonschema.validate(eval(jsonContents), eval(jsonSchema))
print "Provided JSON is valid against the schema."
except jsonschema.ValidationError as e:
print e
if (__name__ == "__main__"):
sys.exit(main(sys.argv))
| <commit_before>#!/usr/bin/python
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)) + "/../third-party/jsonschema")
import jsonschema
import jsonschema.exceptions
def main(argv):
if len(argv) < 3:
print "Usage: "
print "\t" + os.path.basename(__file__) + " <json file> <schema file>"
sys.exit(-1)
jsonFile = open(argv[1])
jsonContents = jsonFile.read()
jsonFile.close()
schemaFile = open(argv[2])
jsonSchema = schemaFile.read()
schemaFile.close()
try:
jsonschema.validate(eval(jsonContents), eval(jsonSchema))
print "Provided JSON is valid against the schema."
except jsonschema.ValidationError as e:
print e
if (__name__ == "__main__"):
sys.exit(main(sys.argv))
<commit_msg>Replace true/false from JSON to python False/True
Signed-off-by: Vivek Galatage <bbe41406aa2af935662c4582fd181c8ca0156a8e@visteon.com><commit_after>#!/usr/bin/python
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)) + "/../third-party/jsonschema")
import jsonschema
import jsonschema.exceptions
def main(argv):
if len(argv) < 3:
print "Usage: "
print "\t" + os.path.basename(__file__) + " <json file> <schema file>"
sys.exit(-1)
jsonFile = open(argv[1])
jsonContents = jsonFile.read()
jsonContents = jsonContents.replace('false', 'False')
jsonContents = jsonContents.replace('true', 'True')
jsonFile.close()
schemaFile = open(argv[2])
jsonSchema = schemaFile.read()
jsonSchema = jsonSchema.replace('false', 'False')
jsonSchema = jsonSchema.replace('true', 'True')
schemaFile.close()
try:
jsonschema.validate(eval(jsonContents), eval(jsonSchema))
print "Provided JSON is valid against the schema."
except jsonschema.ValidationError as e:
print e
if (__name__ == "__main__"):
sys.exit(main(sys.argv))
|
b1dae11860d61e3b574c7bd6b332053819675ddb | tests/test_block_cache.py | tests/test_block_cache.py | import angr
import logging
l = logging.getLogger("angr.tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_block_cache():
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=True)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry) is b
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=False)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry) is not b
if __name__ == "__main__":
test_block_cache()
| import angr
import logging
l = logging.getLogger("angr.tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_block_cache():
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=True)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry).vex is b.vex
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=False)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry).vex is not b.vex
if __name__ == "__main__":
test_block_cache()
| Fix the test case for block cache. | Fix the test case for block cache.
| Python | bsd-2-clause | f-prettyland/angr,f-prettyland/angr,axt/angr,chubbymaggie/angr,schieb/angr,schieb/angr,angr/angr,tyb0807/angr,tyb0807/angr,f-prettyland/angr,iamahuman/angr,iamahuman/angr,tyb0807/angr,angr/angr,schieb/angr,iamahuman/angr,chubbymaggie/angr,chubbymaggie/angr,axt/angr,axt/angr,angr/angr | import angr
import logging
l = logging.getLogger("angr.tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_block_cache():
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=True)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry) is b
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=False)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry) is not b
if __name__ == "__main__":
test_block_cache()
Fix the test case for block cache. | import angr
import logging
l = logging.getLogger("angr.tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_block_cache():
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=True)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry).vex is b.vex
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=False)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry).vex is not b.vex
if __name__ == "__main__":
test_block_cache()
| <commit_before>import angr
import logging
l = logging.getLogger("angr.tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_block_cache():
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=True)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry) is b
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=False)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry) is not b
if __name__ == "__main__":
test_block_cache()
<commit_msg>Fix the test case for block cache.<commit_after> | import angr
import logging
l = logging.getLogger("angr.tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_block_cache():
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=True)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry).vex is b.vex
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=False)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry).vex is not b.vex
if __name__ == "__main__":
test_block_cache()
| import angr
import logging
l = logging.getLogger("angr.tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_block_cache():
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=True)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry) is b
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=False)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry) is not b
if __name__ == "__main__":
test_block_cache()
Fix the test case for block cache.import angr
import logging
l = logging.getLogger("angr.tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_block_cache():
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=True)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry).vex is b.vex
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=False)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry).vex is not b.vex
if __name__ == "__main__":
test_block_cache()
| <commit_before>import angr
import logging
l = logging.getLogger("angr.tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_block_cache():
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=True)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry) is b
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=False)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry) is not b
if __name__ == "__main__":
test_block_cache()
<commit_msg>Fix the test case for block cache.<commit_after>import angr
import logging
l = logging.getLogger("angr.tests")
import os
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests'))
def test_block_cache():
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=True)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry).vex is b.vex
p = angr.Project(os.path.join(test_location, "x86_64", "fauxware"), translation_cache=False)
b = p.factory.block(p.entry)
assert p.factory.block(p.entry).vex is not b.vex
if __name__ == "__main__":
test_block_cache()
|
b5744150da20f9b3b0f37704eb91878de21072cf | deploy/scripts/upgrade-web.py | deploy/scripts/upgrade-web.py | #!/usr/bin/python3
import errno
import pathlib
import platform
import sys
import subprocess
def main():
dist = platform.dist()
if dist[0] != 'debian' and dist[0] != 'Ubuntu':
print('This script can only be run on Debian GNU/Linux or Ubuntu.')
sys.exit(errno.EPERM)
workdir = pathlib.Path(__file__).resolve().parent.parent
subprocess.check_call(
[
'cp'
] +
[str(path) for path in ((workdir / 'scripts').glob('*'))] +
[
str(workdir / 'deploy' / 'tmp' / 'scripts')
]
)
if __name__ == '__main__':
main()
| #!/usr/bin/python3
import errno
import pathlib
import platform
import sys
import subprocess
def main():
dist = platform.dist()
if dist[0] != 'debian' and dist[0] != 'Ubuntu':
print('This script can only be run on Debian GNU/Linux or Ubuntu.')
sys.exit(errno.EPERM)
workdir = pathlib.Path(__file__).resolve().parent.parent
with (workdir / 'etc' / 'revision.txt').open('r') as revision_file:
revision = (revision_file.readline().strip())
venv_dir = pathlib.Path('/home/cliche/venv_{}'.format(revision))
subprocess.check_call(
[
'sudo',
'-ucliche',
str(venv_dir / 'bin' / 'pip'),
'install',
'uwsgi',
]
)
if __name__ == '__main__':
main()
| Install uwsgi in venv on web upgrade | Install uwsgi in venv on web upgrade
| Python | mit | clicheio/cliche,clicheio/cliche,item4/cliche,clicheio/cliche,item4/cliche | #!/usr/bin/python3
import errno
import pathlib
import platform
import sys
import subprocess
def main():
dist = platform.dist()
if dist[0] != 'debian' and dist[0] != 'Ubuntu':
print('This script can only be run on Debian GNU/Linux or Ubuntu.')
sys.exit(errno.EPERM)
workdir = pathlib.Path(__file__).resolve().parent.parent
subprocess.check_call(
[
'cp'
] +
[str(path) for path in ((workdir / 'scripts').glob('*'))] +
[
str(workdir / 'deploy' / 'tmp' / 'scripts')
]
)
if __name__ == '__main__':
main()
Install uwsgi in venv on web upgrade | #!/usr/bin/python3
import errno
import pathlib
import platform
import sys
import subprocess
def main():
dist = platform.dist()
if dist[0] != 'debian' and dist[0] != 'Ubuntu':
print('This script can only be run on Debian GNU/Linux or Ubuntu.')
sys.exit(errno.EPERM)
workdir = pathlib.Path(__file__).resolve().parent.parent
with (workdir / 'etc' / 'revision.txt').open('r') as revision_file:
revision = (revision_file.readline().strip())
venv_dir = pathlib.Path('/home/cliche/venv_{}'.format(revision))
subprocess.check_call(
[
'sudo',
'-ucliche',
str(venv_dir / 'bin' / 'pip'),
'install',
'uwsgi',
]
)
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/python3
import errno
import pathlib
import platform
import sys
import subprocess
def main():
dist = platform.dist()
if dist[0] != 'debian' and dist[0] != 'Ubuntu':
print('This script can only be run on Debian GNU/Linux or Ubuntu.')
sys.exit(errno.EPERM)
workdir = pathlib.Path(__file__).resolve().parent.parent
subprocess.check_call(
[
'cp'
] +
[str(path) for path in ((workdir / 'scripts').glob('*'))] +
[
str(workdir / 'deploy' / 'tmp' / 'scripts')
]
)
if __name__ == '__main__':
main()
<commit_msg>Install uwsgi in venv on web upgrade<commit_after> | #!/usr/bin/python3
import errno
import pathlib
import platform
import sys
import subprocess
def main():
dist = platform.dist()
if dist[0] != 'debian' and dist[0] != 'Ubuntu':
print('This script can only be run on Debian GNU/Linux or Ubuntu.')
sys.exit(errno.EPERM)
workdir = pathlib.Path(__file__).resolve().parent.parent
with (workdir / 'etc' / 'revision.txt').open('r') as revision_file:
revision = (revision_file.readline().strip())
venv_dir = pathlib.Path('/home/cliche/venv_{}'.format(revision))
subprocess.check_call(
[
'sudo',
'-ucliche',
str(venv_dir / 'bin' / 'pip'),
'install',
'uwsgi',
]
)
if __name__ == '__main__':
main()
| #!/usr/bin/python3
import errno
import pathlib
import platform
import sys
import subprocess
def main():
dist = platform.dist()
if dist[0] != 'debian' and dist[0] != 'Ubuntu':
print('This script can only be run on Debian GNU/Linux or Ubuntu.')
sys.exit(errno.EPERM)
workdir = pathlib.Path(__file__).resolve().parent.parent
subprocess.check_call(
[
'cp'
] +
[str(path) for path in ((workdir / 'scripts').glob('*'))] +
[
str(workdir / 'deploy' / 'tmp' / 'scripts')
]
)
if __name__ == '__main__':
main()
Install uwsgi in venv on web upgrade#!/usr/bin/python3
import errno
import pathlib
import platform
import sys
import subprocess
def main():
dist = platform.dist()
if dist[0] != 'debian' and dist[0] != 'Ubuntu':
print('This script can only be run on Debian GNU/Linux or Ubuntu.')
sys.exit(errno.EPERM)
workdir = pathlib.Path(__file__).resolve().parent.parent
with (workdir / 'etc' / 'revision.txt').open('r') as revision_file:
revision = (revision_file.readline().strip())
venv_dir = pathlib.Path('/home/cliche/venv_{}'.format(revision))
subprocess.check_call(
[
'sudo',
'-ucliche',
str(venv_dir / 'bin' / 'pip'),
'install',
'uwsgi',
]
)
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/python3
import errno
import pathlib
import platform
import sys
import subprocess
def main():
dist = platform.dist()
if dist[0] != 'debian' and dist[0] != 'Ubuntu':
print('This script can only be run on Debian GNU/Linux or Ubuntu.')
sys.exit(errno.EPERM)
workdir = pathlib.Path(__file__).resolve().parent.parent
subprocess.check_call(
[
'cp'
] +
[str(path) for path in ((workdir / 'scripts').glob('*'))] +
[
str(workdir / 'deploy' / 'tmp' / 'scripts')
]
)
if __name__ == '__main__':
main()
<commit_msg>Install uwsgi in venv on web upgrade<commit_after>#!/usr/bin/python3
import errno
import pathlib
import platform
import sys
import subprocess
def main():
dist = platform.dist()
if dist[0] != 'debian' and dist[0] != 'Ubuntu':
print('This script can only be run on Debian GNU/Linux or Ubuntu.')
sys.exit(errno.EPERM)
workdir = pathlib.Path(__file__).resolve().parent.parent
with (workdir / 'etc' / 'revision.txt').open('r') as revision_file:
revision = (revision_file.readline().strip())
venv_dir = pathlib.Path('/home/cliche/venv_{}'.format(revision))
subprocess.check_call(
[
'sudo',
'-ucliche',
str(venv_dir / 'bin' / 'pip'),
'install',
'uwsgi',
]
)
if __name__ == '__main__':
main()
|
2179dee14cfbd58ab8d8779561ac3826fe8892dd | custom/enikshay/reports/views.py | custom/enikshay/reports/views.py | from django.http.response import JsonResponse
from django.utils.decorators import method_decorator
from django.views.generic.base import View
from corehq.apps.domain.decorators import login_and_domain_required
from corehq.apps.locations.models import SQLLocation
from corehq.apps.userreports.reports.filters.choice_providers import ChoiceQueryContext
class LocationsView(View):
@method_decorator(login_and_domain_required)
def dispatch(self, *args, **kwargs):
return super(LocationsView, self).dispatch(*args, **kwargs)
def _locations_query(self, domain, query_text):
if query_text:
return SQLLocation.active_objects.filter_path_by_user_input(
domain=domain, user_input=query_text)
else:
return SQLLocation.active_objects.filter(domain=domain)
def query(self, domain, query_context):
locations = self._locations_query(domain, query_context.query).order_by('name')
return [
{'id': loc.location_id, 'text': loc.display_name}
for loc in locations[query_context.offset:query_context.offset + query_context.limit]
]
def query_count(self, domain, query):
return self._locations_query(domain, query).count()
def get(self, request, domain, *args, **kwargs):
query_context = ChoiceQueryContext(
query=request.GET.get('q', None),
limit=int(request.GET.get('limit', 20)),
page=int(request.GET.get('page', 1)) - 1
)
return JsonResponse(
{
'results': self.query(domain, query_context),
'total': self.query_count(domain, query_context)
}
)
| from collections import namedtuple
from django.http.response import JsonResponse
from django.utils.decorators import method_decorator
from django.views.generic.base import View
from corehq.apps.domain.decorators import login_and_domain_required
from corehq.apps.userreports.reports.filters.choice_providers import ChoiceQueryContext, LocationChoiceProvider
Report = namedtuple('Report', 'domain')
class LocationsView(View):
@method_decorator(login_and_domain_required)
def dispatch(self, *args, **kwargs):
return super(LocationsView, self).dispatch(*args, **kwargs)
def get(self, request, domain, *args, **kwargs):
query_context = ChoiceQueryContext(
query=request.GET.get('q', None),
limit=int(request.GET.get('limit', 20)),
page=int(request.GET.get('page', 1)) - 1
)
location_choice_provider = LocationChoiceProvider(Report(domain=domain), None)
location_choice_provider.configure({'include_descendants': True})
return JsonResponse(
{
'results': [
{'id': location.value, 'text': location.display}
for location in location_choice_provider.query(query_context)
],
'total': location_choice_provider.query_count(query_context)
}
)
| Use LocationChoiceProvider in enikshay location view | Use LocationChoiceProvider in enikshay location view
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq | from django.http.response import JsonResponse
from django.utils.decorators import method_decorator
from django.views.generic.base import View
from corehq.apps.domain.decorators import login_and_domain_required
from corehq.apps.locations.models import SQLLocation
from corehq.apps.userreports.reports.filters.choice_providers import ChoiceQueryContext
class LocationsView(View):
@method_decorator(login_and_domain_required)
def dispatch(self, *args, **kwargs):
return super(LocationsView, self).dispatch(*args, **kwargs)
def _locations_query(self, domain, query_text):
if query_text:
return SQLLocation.active_objects.filter_path_by_user_input(
domain=domain, user_input=query_text)
else:
return SQLLocation.active_objects.filter(domain=domain)
def query(self, domain, query_context):
locations = self._locations_query(domain, query_context.query).order_by('name')
return [
{'id': loc.location_id, 'text': loc.display_name}
for loc in locations[query_context.offset:query_context.offset + query_context.limit]
]
def query_count(self, domain, query):
return self._locations_query(domain, query).count()
def get(self, request, domain, *args, **kwargs):
query_context = ChoiceQueryContext(
query=request.GET.get('q', None),
limit=int(request.GET.get('limit', 20)),
page=int(request.GET.get('page', 1)) - 1
)
return JsonResponse(
{
'results': self.query(domain, query_context),
'total': self.query_count(domain, query_context)
}
)
Use LocationChoiceProvider in enikshay location view | from collections import namedtuple
from django.http.response import JsonResponse
from django.utils.decorators import method_decorator
from django.views.generic.base import View
from corehq.apps.domain.decorators import login_and_domain_required
from corehq.apps.userreports.reports.filters.choice_providers import ChoiceQueryContext, LocationChoiceProvider
Report = namedtuple('Report', 'domain')
class LocationsView(View):
@method_decorator(login_and_domain_required)
def dispatch(self, *args, **kwargs):
return super(LocationsView, self).dispatch(*args, **kwargs)
def get(self, request, domain, *args, **kwargs):
query_context = ChoiceQueryContext(
query=request.GET.get('q', None),
limit=int(request.GET.get('limit', 20)),
page=int(request.GET.get('page', 1)) - 1
)
location_choice_provider = LocationChoiceProvider(Report(domain=domain), None)
location_choice_provider.configure({'include_descendants': True})
return JsonResponse(
{
'results': [
{'id': location.value, 'text': location.display}
for location in location_choice_provider.query(query_context)
],
'total': location_choice_provider.query_count(query_context)
}
)
| <commit_before>from django.http.response import JsonResponse
from django.utils.decorators import method_decorator
from django.views.generic.base import View
from corehq.apps.domain.decorators import login_and_domain_required
from corehq.apps.locations.models import SQLLocation
from corehq.apps.userreports.reports.filters.choice_providers import ChoiceQueryContext
class LocationsView(View):
@method_decorator(login_and_domain_required)
def dispatch(self, *args, **kwargs):
return super(LocationsView, self).dispatch(*args, **kwargs)
def _locations_query(self, domain, query_text):
if query_text:
return SQLLocation.active_objects.filter_path_by_user_input(
domain=domain, user_input=query_text)
else:
return SQLLocation.active_objects.filter(domain=domain)
def query(self, domain, query_context):
locations = self._locations_query(domain, query_context.query).order_by('name')
return [
{'id': loc.location_id, 'text': loc.display_name}
for loc in locations[query_context.offset:query_context.offset + query_context.limit]
]
def query_count(self, domain, query):
return self._locations_query(domain, query).count()
def get(self, request, domain, *args, **kwargs):
query_context = ChoiceQueryContext(
query=request.GET.get('q', None),
limit=int(request.GET.get('limit', 20)),
page=int(request.GET.get('page', 1)) - 1
)
return JsonResponse(
{
'results': self.query(domain, query_context),
'total': self.query_count(domain, query_context)
}
)
<commit_msg>Use LocationChoiceProvider in enikshay location view<commit_after> | from collections import namedtuple
from django.http.response import JsonResponse
from django.utils.decorators import method_decorator
from django.views.generic.base import View
from corehq.apps.domain.decorators import login_and_domain_required
from corehq.apps.userreports.reports.filters.choice_providers import ChoiceQueryContext, LocationChoiceProvider
Report = namedtuple('Report', 'domain')
class LocationsView(View):
@method_decorator(login_and_domain_required)
def dispatch(self, *args, **kwargs):
return super(LocationsView, self).dispatch(*args, **kwargs)
def get(self, request, domain, *args, **kwargs):
query_context = ChoiceQueryContext(
query=request.GET.get('q', None),
limit=int(request.GET.get('limit', 20)),
page=int(request.GET.get('page', 1)) - 1
)
location_choice_provider = LocationChoiceProvider(Report(domain=domain), None)
location_choice_provider.configure({'include_descendants': True})
return JsonResponse(
{
'results': [
{'id': location.value, 'text': location.display}
for location in location_choice_provider.query(query_context)
],
'total': location_choice_provider.query_count(query_context)
}
)
| from django.http.response import JsonResponse
from django.utils.decorators import method_decorator
from django.views.generic.base import View
from corehq.apps.domain.decorators import login_and_domain_required
from corehq.apps.locations.models import SQLLocation
from corehq.apps.userreports.reports.filters.choice_providers import ChoiceQueryContext
class LocationsView(View):
@method_decorator(login_and_domain_required)
def dispatch(self, *args, **kwargs):
return super(LocationsView, self).dispatch(*args, **kwargs)
def _locations_query(self, domain, query_text):
if query_text:
return SQLLocation.active_objects.filter_path_by_user_input(
domain=domain, user_input=query_text)
else:
return SQLLocation.active_objects.filter(domain=domain)
def query(self, domain, query_context):
locations = self._locations_query(domain, query_context.query).order_by('name')
return [
{'id': loc.location_id, 'text': loc.display_name}
for loc in locations[query_context.offset:query_context.offset + query_context.limit]
]
def query_count(self, domain, query):
return self._locations_query(domain, query).count()
def get(self, request, domain, *args, **kwargs):
query_context = ChoiceQueryContext(
query=request.GET.get('q', None),
limit=int(request.GET.get('limit', 20)),
page=int(request.GET.get('page', 1)) - 1
)
return JsonResponse(
{
'results': self.query(domain, query_context),
'total': self.query_count(domain, query_context)
}
)
Use LocationChoiceProvider in enikshay location viewfrom collections import namedtuple
from django.http.response import JsonResponse
from django.utils.decorators import method_decorator
from django.views.generic.base import View
from corehq.apps.domain.decorators import login_and_domain_required
from corehq.apps.userreports.reports.filters.choice_providers import ChoiceQueryContext, LocationChoiceProvider
Report = namedtuple('Report', 'domain')
class LocationsView(View):
@method_decorator(login_and_domain_required)
def dispatch(self, *args, **kwargs):
return super(LocationsView, self).dispatch(*args, **kwargs)
def get(self, request, domain, *args, **kwargs):
query_context = ChoiceQueryContext(
query=request.GET.get('q', None),
limit=int(request.GET.get('limit', 20)),
page=int(request.GET.get('page', 1)) - 1
)
location_choice_provider = LocationChoiceProvider(Report(domain=domain), None)
location_choice_provider.configure({'include_descendants': True})
return JsonResponse(
{
'results': [
{'id': location.value, 'text': location.display}
for location in location_choice_provider.query(query_context)
],
'total': location_choice_provider.query_count(query_context)
}
)
| <commit_before>from django.http.response import JsonResponse
from django.utils.decorators import method_decorator
from django.views.generic.base import View
from corehq.apps.domain.decorators import login_and_domain_required
from corehq.apps.locations.models import SQLLocation
from corehq.apps.userreports.reports.filters.choice_providers import ChoiceQueryContext
class LocationsView(View):
@method_decorator(login_and_domain_required)
def dispatch(self, *args, **kwargs):
return super(LocationsView, self).dispatch(*args, **kwargs)
def _locations_query(self, domain, query_text):
if query_text:
return SQLLocation.active_objects.filter_path_by_user_input(
domain=domain, user_input=query_text)
else:
return SQLLocation.active_objects.filter(domain=domain)
def query(self, domain, query_context):
locations = self._locations_query(domain, query_context.query).order_by('name')
return [
{'id': loc.location_id, 'text': loc.display_name}
for loc in locations[query_context.offset:query_context.offset + query_context.limit]
]
def query_count(self, domain, query):
return self._locations_query(domain, query).count()
def get(self, request, domain, *args, **kwargs):
query_context = ChoiceQueryContext(
query=request.GET.get('q', None),
limit=int(request.GET.get('limit', 20)),
page=int(request.GET.get('page', 1)) - 1
)
return JsonResponse(
{
'results': self.query(domain, query_context),
'total': self.query_count(domain, query_context)
}
)
<commit_msg>Use LocationChoiceProvider in enikshay location view<commit_after>from collections import namedtuple
from django.http.response import JsonResponse
from django.utils.decorators import method_decorator
from django.views.generic.base import View
from corehq.apps.domain.decorators import login_and_domain_required
from corehq.apps.userreports.reports.filters.choice_providers import ChoiceQueryContext, LocationChoiceProvider
Report = namedtuple('Report', 'domain')
class LocationsView(View):
@method_decorator(login_and_domain_required)
def dispatch(self, *args, **kwargs):
return super(LocationsView, self).dispatch(*args, **kwargs)
def get(self, request, domain, *args, **kwargs):
query_context = ChoiceQueryContext(
query=request.GET.get('q', None),
limit=int(request.GET.get('limit', 20)),
page=int(request.GET.get('page', 1)) - 1
)
location_choice_provider = LocationChoiceProvider(Report(domain=domain), None)
location_choice_provider.configure({'include_descendants': True})
return JsonResponse(
{
'results': [
{'id': location.value, 'text': location.display}
for location in location_choice_provider.query(query_context)
],
'total': location_choice_provider.query_count(query_context)
}
)
|
acaacbea4fbfdcc0f1f0c5e0aa9a837dee439d08 | saau/sections/image_provider.py | saau/sections/image_provider.py | import json
import inspect
from os.path import join, exists
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
def __init__(self, data_dir, services):
self.data_dir = data_dir
self.services = services
def has_required_data(self):
raise not_implemented()
def obtain_data(self):
raise not_implemented()
def data_dir_exists(self, name):
return exists(self.data_dir_join(name))
def data_dir_join(self, name):
return join(self.data_dir, name)
def save_json(self, name, data):
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
def load_json(self, name):
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
def build_image(self):
raise not_implemented()
| import inspect
import json
from os.path import exists, join
from pathlib import Path
from typing import Any, Union
from ..services import Services
PathOrStr = Union[str,Path]
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
def __init__(self, data_dir: Path, services: Services) -> None:
self.data_dir = data_dir
self.services = services
def has_required_data(self) -> bool:
raise not_implemented()
def obtain_data(self) -> bool:
raise not_implemented()
def data_dir_exists(self, name: PathOrStr) -> bool:
return exists(self.data_dir_join(name))
def data_dir_join(self, name: PathOrStr) -> str:
return join(self.data_dir, name)
def save_json(self, name: PathOrStr, data: Any) -> bool:
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
def load_json(self, name: PathOrStr) -> Any:
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
def build_image(self) -> str:
raise not_implemented()
| Add types to ImageProvider and RequiresData | Add types to ImageProvider and RequiresData
| Python | mit | Mause/statistical_atlas_of_au | import json
import inspect
from os.path import join, exists
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
def __init__(self, data_dir, services):
self.data_dir = data_dir
self.services = services
def has_required_data(self):
raise not_implemented()
def obtain_data(self):
raise not_implemented()
def data_dir_exists(self, name):
return exists(self.data_dir_join(name))
def data_dir_join(self, name):
return join(self.data_dir, name)
def save_json(self, name, data):
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
def load_json(self, name):
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
def build_image(self):
raise not_implemented()
Add types to ImageProvider and RequiresData | import inspect
import json
from os.path import exists, join
from pathlib import Path
from typing import Any, Union
from ..services import Services
PathOrStr = Union[str,Path]
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
def __init__(self, data_dir: Path, services: Services) -> None:
self.data_dir = data_dir
self.services = services
def has_required_data(self) -> bool:
raise not_implemented()
def obtain_data(self) -> bool:
raise not_implemented()
def data_dir_exists(self, name: PathOrStr) -> bool:
return exists(self.data_dir_join(name))
def data_dir_join(self, name: PathOrStr) -> str:
return join(self.data_dir, name)
def save_json(self, name: PathOrStr, data: Any) -> bool:
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
def load_json(self, name: PathOrStr) -> Any:
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
def build_image(self) -> str:
raise not_implemented()
| <commit_before>import json
import inspect
from os.path import join, exists
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
def __init__(self, data_dir, services):
self.data_dir = data_dir
self.services = services
def has_required_data(self):
raise not_implemented()
def obtain_data(self):
raise not_implemented()
def data_dir_exists(self, name):
return exists(self.data_dir_join(name))
def data_dir_join(self, name):
return join(self.data_dir, name)
def save_json(self, name, data):
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
def load_json(self, name):
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
def build_image(self):
raise not_implemented()
<commit_msg>Add types to ImageProvider and RequiresData<commit_after> | import inspect
import json
from os.path import exists, join
from pathlib import Path
from typing import Any, Union
from ..services import Services
PathOrStr = Union[str,Path]
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
def __init__(self, data_dir: Path, services: Services) -> None:
self.data_dir = data_dir
self.services = services
def has_required_data(self) -> bool:
raise not_implemented()
def obtain_data(self) -> bool:
raise not_implemented()
def data_dir_exists(self, name: PathOrStr) -> bool:
return exists(self.data_dir_join(name))
def data_dir_join(self, name: PathOrStr) -> str:
return join(self.data_dir, name)
def save_json(self, name: PathOrStr, data: Any) -> bool:
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
def load_json(self, name: PathOrStr) -> Any:
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
def build_image(self) -> str:
raise not_implemented()
| import json
import inspect
from os.path import join, exists
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
def __init__(self, data_dir, services):
self.data_dir = data_dir
self.services = services
def has_required_data(self):
raise not_implemented()
def obtain_data(self):
raise not_implemented()
def data_dir_exists(self, name):
return exists(self.data_dir_join(name))
def data_dir_join(self, name):
return join(self.data_dir, name)
def save_json(self, name, data):
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
def load_json(self, name):
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
def build_image(self):
raise not_implemented()
Add types to ImageProvider and RequiresDataimport inspect
import json
from os.path import exists, join
from pathlib import Path
from typing import Any, Union
from ..services import Services
PathOrStr = Union[str,Path]
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
def __init__(self, data_dir: Path, services: Services) -> None:
self.data_dir = data_dir
self.services = services
def has_required_data(self) -> bool:
raise not_implemented()
def obtain_data(self) -> bool:
raise not_implemented()
def data_dir_exists(self, name: PathOrStr) -> bool:
return exists(self.data_dir_join(name))
def data_dir_join(self, name: PathOrStr) -> str:
return join(self.data_dir, name)
def save_json(self, name: PathOrStr, data: Any) -> bool:
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
def load_json(self, name: PathOrStr) -> Any:
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
def build_image(self) -> str:
raise not_implemented()
| <commit_before>import json
import inspect
from os.path import join, exists
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
def __init__(self, data_dir, services):
self.data_dir = data_dir
self.services = services
def has_required_data(self):
raise not_implemented()
def obtain_data(self):
raise not_implemented()
def data_dir_exists(self, name):
return exists(self.data_dir_join(name))
def data_dir_join(self, name):
return join(self.data_dir, name)
def save_json(self, name, data):
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
def load_json(self, name):
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
def build_image(self):
raise not_implemented()
<commit_msg>Add types to ImageProvider and RequiresData<commit_after>import inspect
import json
from os.path import exists, join
from pathlib import Path
from typing import Any, Union
from ..services import Services
PathOrStr = Union[str,Path]
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
def __init__(self, data_dir: Path, services: Services) -> None:
self.data_dir = data_dir
self.services = services
def has_required_data(self) -> bool:
raise not_implemented()
def obtain_data(self) -> bool:
raise not_implemented()
def data_dir_exists(self, name: PathOrStr) -> bool:
return exists(self.data_dir_join(name))
def data_dir_join(self, name: PathOrStr) -> str:
return join(self.data_dir, name)
def save_json(self, name: PathOrStr, data: Any) -> bool:
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
def load_json(self, name: PathOrStr) -> Any:
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
def build_image(self) -> str:
raise not_implemented()
|
6a10f2f08825480edc7b84ca00d84c36873cbdf4 | devs/tools/adapt-es-path.py | devs/tools/adapt-es-path.py | #!/usr/bin/env python3
"""
Use to apply patches from ES upstream with:
git apply --reject \
<(curl -L https://github.com/elastic/elasticsearch/pull/<NUMBER>.diff | ./devs/tools/adapt-es-path.py)
"""
import sys
def main():
for line in sys.stdin:
sys.stdout.write(
line
.replace('diff --git a/server/', 'diff --git a/es/es-server/')
.replace('--- a/server/', '--- a/es/es-server/')
.replace('+++ b/server/', '+++ b/es/es-server/')
)
if __name__ == "__main__":
main()
| #!/usr/bin/env python3
"""
Use to apply patches from ES upstream with:
git apply --reject \
<(curl -L https://github.com/elastic/elasticsearch/pull/<NUMBER>.diff | ./devs/tools/adapt-es-path.py)
"""
import sys
def main():
for line in sys.stdin:
sys.stdout.write(
line
.replace('diff --git a/server/', 'diff --git a/es/es-server/')
.replace('--- a/server/', '--- a/es/es-server/')
.replace('+++ b/server/', '+++ b/es/es-server/')
.replace('diff --git a/test/framework', 'diff --git a/es/es-testing/')
.replace('--- a/test/framework', '--- a/es/es-testing/')
.replace('+++ b/test/framework', '+++ b/es/es-testing/')
)
if __name__ == "__main__":
main()
| Update dev tool for applying ES patches | Update dev tool for applying ES patches
Adds path rewrite for `test/framework` -> `es/es-testing/` | Python | apache-2.0 | crate/crate,crate/crate,EvilMcJerkface/crate,EvilMcJerkface/crate,EvilMcJerkface/crate,crate/crate | #!/usr/bin/env python3
"""
Use to apply patches from ES upstream with:
git apply --reject \
<(curl -L https://github.com/elastic/elasticsearch/pull/<NUMBER>.diff | ./devs/tools/adapt-es-path.py)
"""
import sys
def main():
for line in sys.stdin:
sys.stdout.write(
line
.replace('diff --git a/server/', 'diff --git a/es/es-server/')
.replace('--- a/server/', '--- a/es/es-server/')
.replace('+++ b/server/', '+++ b/es/es-server/')
)
if __name__ == "__main__":
main()
Update dev tool for applying ES patches
Adds path rewrite for `test/framework` -> `es/es-testing/` | #!/usr/bin/env python3
"""
Use to apply patches from ES upstream with:
git apply --reject \
<(curl -L https://github.com/elastic/elasticsearch/pull/<NUMBER>.diff | ./devs/tools/adapt-es-path.py)
"""
import sys
def main():
for line in sys.stdin:
sys.stdout.write(
line
.replace('diff --git a/server/', 'diff --git a/es/es-server/')
.replace('--- a/server/', '--- a/es/es-server/')
.replace('+++ b/server/', '+++ b/es/es-server/')
.replace('diff --git a/test/framework', 'diff --git a/es/es-testing/')
.replace('--- a/test/framework', '--- a/es/es-testing/')
.replace('+++ b/test/framework', '+++ b/es/es-testing/')
)
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python3
"""
Use to apply patches from ES upstream with:
git apply --reject \
<(curl -L https://github.com/elastic/elasticsearch/pull/<NUMBER>.diff | ./devs/tools/adapt-es-path.py)
"""
import sys
def main():
for line in sys.stdin:
sys.stdout.write(
line
.replace('diff --git a/server/', 'diff --git a/es/es-server/')
.replace('--- a/server/', '--- a/es/es-server/')
.replace('+++ b/server/', '+++ b/es/es-server/')
)
if __name__ == "__main__":
main()
<commit_msg>Update dev tool for applying ES patches
Adds path rewrite for `test/framework` -> `es/es-testing/`<commit_after> | #!/usr/bin/env python3
"""
Use to apply patches from ES upstream with:
git apply --reject \
<(curl -L https://github.com/elastic/elasticsearch/pull/<NUMBER>.diff | ./devs/tools/adapt-es-path.py)
"""
import sys
def main():
for line in sys.stdin:
sys.stdout.write(
line
.replace('diff --git a/server/', 'diff --git a/es/es-server/')
.replace('--- a/server/', '--- a/es/es-server/')
.replace('+++ b/server/', '+++ b/es/es-server/')
.replace('diff --git a/test/framework', 'diff --git a/es/es-testing/')
.replace('--- a/test/framework', '--- a/es/es-testing/')
.replace('+++ b/test/framework', '+++ b/es/es-testing/')
)
if __name__ == "__main__":
main()
| #!/usr/bin/env python3
"""
Use to apply patches from ES upstream with:
git apply --reject \
<(curl -L https://github.com/elastic/elasticsearch/pull/<NUMBER>.diff | ./devs/tools/adapt-es-path.py)
"""
import sys
def main():
for line in sys.stdin:
sys.stdout.write(
line
.replace('diff --git a/server/', 'diff --git a/es/es-server/')
.replace('--- a/server/', '--- a/es/es-server/')
.replace('+++ b/server/', '+++ b/es/es-server/')
)
if __name__ == "__main__":
main()
Update dev tool for applying ES patches
Adds path rewrite for `test/framework` -> `es/es-testing/`#!/usr/bin/env python3
"""
Use to apply patches from ES upstream with:
git apply --reject \
<(curl -L https://github.com/elastic/elasticsearch/pull/<NUMBER>.diff | ./devs/tools/adapt-es-path.py)
"""
import sys
def main():
for line in sys.stdin:
sys.stdout.write(
line
.replace('diff --git a/server/', 'diff --git a/es/es-server/')
.replace('--- a/server/', '--- a/es/es-server/')
.replace('+++ b/server/', '+++ b/es/es-server/')
.replace('diff --git a/test/framework', 'diff --git a/es/es-testing/')
.replace('--- a/test/framework', '--- a/es/es-testing/')
.replace('+++ b/test/framework', '+++ b/es/es-testing/')
)
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python3
"""
Use to apply patches from ES upstream with:
git apply --reject \
<(curl -L https://github.com/elastic/elasticsearch/pull/<NUMBER>.diff | ./devs/tools/adapt-es-path.py)
"""
import sys
def main():
for line in sys.stdin:
sys.stdout.write(
line
.replace('diff --git a/server/', 'diff --git a/es/es-server/')
.replace('--- a/server/', '--- a/es/es-server/')
.replace('+++ b/server/', '+++ b/es/es-server/')
)
if __name__ == "__main__":
main()
<commit_msg>Update dev tool for applying ES patches
Adds path rewrite for `test/framework` -> `es/es-testing/`<commit_after>#!/usr/bin/env python3
"""
Use to apply patches from ES upstream with:
git apply --reject \
<(curl -L https://github.com/elastic/elasticsearch/pull/<NUMBER>.diff | ./devs/tools/adapt-es-path.py)
"""
import sys
def main():
for line in sys.stdin:
sys.stdout.write(
line
.replace('diff --git a/server/', 'diff --git a/es/es-server/')
.replace('--- a/server/', '--- a/es/es-server/')
.replace('+++ b/server/', '+++ b/es/es-server/')
.replace('diff --git a/test/framework', 'diff --git a/es/es-testing/')
.replace('--- a/test/framework', '--- a/es/es-testing/')
.replace('+++ b/test/framework', '+++ b/es/es-testing/')
)
if __name__ == "__main__":
main()
|
37be9141cbcafb51ebef4ba76a5c2f1dcd9449d1 | example/test1_autograder.py | example/test1_autograder.py | from nose.tools import eq_ as assert_eq
@score(problem="hello", points=0.5)
def grade_hello1():
"""Grade 'hello' with input 'Jessica'"""
msg = hello("Jessica")
assert_eq(msg, "Hello, Jessica!")
@score(problem="hello", points=0.5)
def grade_hello2():
"""Grade 'hello' with input 'Python'"""
msg = hello("Python")
assert_eq(msg, "Hello, Python!")
@score(problem="goodbye", points=0.5)
def grade_goodbye1():
"""Grade 'goodbye' with input 'Jessica'"""
msg = goodbye("Jessica")
assert_eq(msg, "Goodbye, Jessica")
@score(problem="goodbye", points=0.5)
def grade_goodbye2():
"""Grade 'goodbye' with input 'Python'"""
msg = goodbye("Python")
assert_eq(msg, "Goodbye, Python")
| from nose.tools import eq_ as assert_eq
@score(problem="Problem 1/Part A", points=0.5)
def grade_hello1():
"""Grade 'hello' with input 'Jessica'"""
msg = hello("Jessica")
assert_eq(msg, "Hello, Jessica!")
@score(problem="Problem 1/Part A", points=0.5)
def grade_hello2():
"""Grade 'hello' with input 'Python'"""
msg = hello("Python")
assert_eq(msg, "Hello, Python!")
@score(problem="Problem 1/Part B", points=0.5)
def grade_goodbye1():
"""Grade 'goodbye' with input 'Jessica'"""
msg = goodbye("Jessica")
assert_eq(msg, "Goodbye, Jessica")
@score(problem="Problem 1/Part B", points=0.5)
def grade_goodbye2():
"""Grade 'goodbye' with input 'Python'"""
msg = goodbye("Python")
assert_eq(msg, "Goodbye, Python")
| Update example autograding code to use heading names | Update example autograding code to use heading names
| Python | mit | jhamrick/original-nbgrader,jhamrick/original-nbgrader | from nose.tools import eq_ as assert_eq
@score(problem="hello", points=0.5)
def grade_hello1():
"""Grade 'hello' with input 'Jessica'"""
msg = hello("Jessica")
assert_eq(msg, "Hello, Jessica!")
@score(problem="hello", points=0.5)
def grade_hello2():
"""Grade 'hello' with input 'Python'"""
msg = hello("Python")
assert_eq(msg, "Hello, Python!")
@score(problem="goodbye", points=0.5)
def grade_goodbye1():
"""Grade 'goodbye' with input 'Jessica'"""
msg = goodbye("Jessica")
assert_eq(msg, "Goodbye, Jessica")
@score(problem="goodbye", points=0.5)
def grade_goodbye2():
"""Grade 'goodbye' with input 'Python'"""
msg = goodbye("Python")
assert_eq(msg, "Goodbye, Python")
Update example autograding code to use heading names | from nose.tools import eq_ as assert_eq
@score(problem="Problem 1/Part A", points=0.5)
def grade_hello1():
"""Grade 'hello' with input 'Jessica'"""
msg = hello("Jessica")
assert_eq(msg, "Hello, Jessica!")
@score(problem="Problem 1/Part A", points=0.5)
def grade_hello2():
"""Grade 'hello' with input 'Python'"""
msg = hello("Python")
assert_eq(msg, "Hello, Python!")
@score(problem="Problem 1/Part B", points=0.5)
def grade_goodbye1():
"""Grade 'goodbye' with input 'Jessica'"""
msg = goodbye("Jessica")
assert_eq(msg, "Goodbye, Jessica")
@score(problem="Problem 1/Part B", points=0.5)
def grade_goodbye2():
"""Grade 'goodbye' with input 'Python'"""
msg = goodbye("Python")
assert_eq(msg, "Goodbye, Python")
| <commit_before>from nose.tools import eq_ as assert_eq
@score(problem="hello", points=0.5)
def grade_hello1():
"""Grade 'hello' with input 'Jessica'"""
msg = hello("Jessica")
assert_eq(msg, "Hello, Jessica!")
@score(problem="hello", points=0.5)
def grade_hello2():
"""Grade 'hello' with input 'Python'"""
msg = hello("Python")
assert_eq(msg, "Hello, Python!")
@score(problem="goodbye", points=0.5)
def grade_goodbye1():
"""Grade 'goodbye' with input 'Jessica'"""
msg = goodbye("Jessica")
assert_eq(msg, "Goodbye, Jessica")
@score(problem="goodbye", points=0.5)
def grade_goodbye2():
"""Grade 'goodbye' with input 'Python'"""
msg = goodbye("Python")
assert_eq(msg, "Goodbye, Python")
<commit_msg>Update example autograding code to use heading names<commit_after> | from nose.tools import eq_ as assert_eq
@score(problem="Problem 1/Part A", points=0.5)
def grade_hello1():
"""Grade 'hello' with input 'Jessica'"""
msg = hello("Jessica")
assert_eq(msg, "Hello, Jessica!")
@score(problem="Problem 1/Part A", points=0.5)
def grade_hello2():
"""Grade 'hello' with input 'Python'"""
msg = hello("Python")
assert_eq(msg, "Hello, Python!")
@score(problem="Problem 1/Part B", points=0.5)
def grade_goodbye1():
"""Grade 'goodbye' with input 'Jessica'"""
msg = goodbye("Jessica")
assert_eq(msg, "Goodbye, Jessica")
@score(problem="Problem 1/Part B", points=0.5)
def grade_goodbye2():
"""Grade 'goodbye' with input 'Python'"""
msg = goodbye("Python")
assert_eq(msg, "Goodbye, Python")
| from nose.tools import eq_ as assert_eq
@score(problem="hello", points=0.5)
def grade_hello1():
"""Grade 'hello' with input 'Jessica'"""
msg = hello("Jessica")
assert_eq(msg, "Hello, Jessica!")
@score(problem="hello", points=0.5)
def grade_hello2():
"""Grade 'hello' with input 'Python'"""
msg = hello("Python")
assert_eq(msg, "Hello, Python!")
@score(problem="goodbye", points=0.5)
def grade_goodbye1():
"""Grade 'goodbye' with input 'Jessica'"""
msg = goodbye("Jessica")
assert_eq(msg, "Goodbye, Jessica")
@score(problem="goodbye", points=0.5)
def grade_goodbye2():
"""Grade 'goodbye' with input 'Python'"""
msg = goodbye("Python")
assert_eq(msg, "Goodbye, Python")
Update example autograding code to use heading namesfrom nose.tools import eq_ as assert_eq
@score(problem="Problem 1/Part A", points=0.5)
def grade_hello1():
"""Grade 'hello' with input 'Jessica'"""
msg = hello("Jessica")
assert_eq(msg, "Hello, Jessica!")
@score(problem="Problem 1/Part A", points=0.5)
def grade_hello2():
"""Grade 'hello' with input 'Python'"""
msg = hello("Python")
assert_eq(msg, "Hello, Python!")
@score(problem="Problem 1/Part B", points=0.5)
def grade_goodbye1():
"""Grade 'goodbye' with input 'Jessica'"""
msg = goodbye("Jessica")
assert_eq(msg, "Goodbye, Jessica")
@score(problem="Problem 1/Part B", points=0.5)
def grade_goodbye2():
"""Grade 'goodbye' with input 'Python'"""
msg = goodbye("Python")
assert_eq(msg, "Goodbye, Python")
| <commit_before>from nose.tools import eq_ as assert_eq
@score(problem="hello", points=0.5)
def grade_hello1():
"""Grade 'hello' with input 'Jessica'"""
msg = hello("Jessica")
assert_eq(msg, "Hello, Jessica!")
@score(problem="hello", points=0.5)
def grade_hello2():
"""Grade 'hello' with input 'Python'"""
msg = hello("Python")
assert_eq(msg, "Hello, Python!")
@score(problem="goodbye", points=0.5)
def grade_goodbye1():
"""Grade 'goodbye' with input 'Jessica'"""
msg = goodbye("Jessica")
assert_eq(msg, "Goodbye, Jessica")
@score(problem="goodbye", points=0.5)
def grade_goodbye2():
"""Grade 'goodbye' with input 'Python'"""
msg = goodbye("Python")
assert_eq(msg, "Goodbye, Python")
<commit_msg>Update example autograding code to use heading names<commit_after>from nose.tools import eq_ as assert_eq
@score(problem="Problem 1/Part A", points=0.5)
def grade_hello1():
"""Grade 'hello' with input 'Jessica'"""
msg = hello("Jessica")
assert_eq(msg, "Hello, Jessica!")
@score(problem="Problem 1/Part A", points=0.5)
def grade_hello2():
"""Grade 'hello' with input 'Python'"""
msg = hello("Python")
assert_eq(msg, "Hello, Python!")
@score(problem="Problem 1/Part B", points=0.5)
def grade_goodbye1():
"""Grade 'goodbye' with input 'Jessica'"""
msg = goodbye("Jessica")
assert_eq(msg, "Goodbye, Jessica")
@score(problem="Problem 1/Part B", points=0.5)
def grade_goodbye2():
"""Grade 'goodbye' with input 'Python'"""
msg = goodbye("Python")
assert_eq(msg, "Goodbye, Python")
|
271c234ce215c036f54928a7c2910ddda4cea360 | dbaas/dbaas/celeryconfig.py | dbaas/dbaas/celeryconfig.py | import os
REDIS_PORT = os.getenv('DBAAS_NOTIFICATION_BROKER_PORT', '6379')
BROKER_URL = os.getenv(
'DBAAS_NOTIFICATION_BROKER_URL', 'redis://localhost:%s/0' % REDIS_PORT)
CELERYD_TASK_TIME_LIMIT = 10800
CELERY_TRACK_STARTED = True
CELERY_IGNORE_RESULT = False
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERYBEAT_MAX_LOOP_INTERVAL = 5
CELERY_TIMEZONE = os.getenv('DJANGO_TIME_ZONE', 'America/Sao_Paulo')
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_LOG_FORMAT = "[%(asctime)s: %(processName)s %(name)s %(levelname)s] %(message)s"
CELERY_ALWAYS_EAGER = False
CELERYD_LOG_COLOR = False
| import os
REDIS_PORT = os.getenv('DBAAS_NOTIFICATION_BROKER_PORT', '6379')
BROKER_URL = os.getenv(
'DBAAS_NOTIFICATION_BROKER_URL', 'redis://localhost:%s/0' % REDIS_PORT)
CELERYD_TASK_TIME_LIMIT = 10800
CELERY_TRACK_STARTED = True
CELERY_IGNORE_RESULT = False
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERYBEAT_MAX_LOOP_INTERVAL = 5
CELERY_TIMEZONE = os.getenv('DJANGO_TIME_ZONE', 'America/Sao_Paulo')
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_LOG_FORMAT = "[%(asctime)s: %(processName)s %(name)s %(levelname)s] %(message)s"
CELERY_ALWAYS_EAGER = False
CELERYD_LOG_COLOR = False
CELERYD_PREFETCH_MULTIPLIER = 1
| Add celeryd prefetch multiplier setting | Add celeryd prefetch multiplier setting
| Python | bsd-3-clause | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | import os
REDIS_PORT = os.getenv('DBAAS_NOTIFICATION_BROKER_PORT', '6379')
BROKER_URL = os.getenv(
'DBAAS_NOTIFICATION_BROKER_URL', 'redis://localhost:%s/0' % REDIS_PORT)
CELERYD_TASK_TIME_LIMIT = 10800
CELERY_TRACK_STARTED = True
CELERY_IGNORE_RESULT = False
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERYBEAT_MAX_LOOP_INTERVAL = 5
CELERY_TIMEZONE = os.getenv('DJANGO_TIME_ZONE', 'America/Sao_Paulo')
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_LOG_FORMAT = "[%(asctime)s: %(processName)s %(name)s %(levelname)s] %(message)s"
CELERY_ALWAYS_EAGER = False
CELERYD_LOG_COLOR = False
Add celeryd prefetch multiplier setting | import os
REDIS_PORT = os.getenv('DBAAS_NOTIFICATION_BROKER_PORT', '6379')
BROKER_URL = os.getenv(
'DBAAS_NOTIFICATION_BROKER_URL', 'redis://localhost:%s/0' % REDIS_PORT)
CELERYD_TASK_TIME_LIMIT = 10800
CELERY_TRACK_STARTED = True
CELERY_IGNORE_RESULT = False
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERYBEAT_MAX_LOOP_INTERVAL = 5
CELERY_TIMEZONE = os.getenv('DJANGO_TIME_ZONE', 'America/Sao_Paulo')
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_LOG_FORMAT = "[%(asctime)s: %(processName)s %(name)s %(levelname)s] %(message)s"
CELERY_ALWAYS_EAGER = False
CELERYD_LOG_COLOR = False
CELERYD_PREFETCH_MULTIPLIER = 1
| <commit_before>import os
REDIS_PORT = os.getenv('DBAAS_NOTIFICATION_BROKER_PORT', '6379')
BROKER_URL = os.getenv(
'DBAAS_NOTIFICATION_BROKER_URL', 'redis://localhost:%s/0' % REDIS_PORT)
CELERYD_TASK_TIME_LIMIT = 10800
CELERY_TRACK_STARTED = True
CELERY_IGNORE_RESULT = False
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERYBEAT_MAX_LOOP_INTERVAL = 5
CELERY_TIMEZONE = os.getenv('DJANGO_TIME_ZONE', 'America/Sao_Paulo')
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_LOG_FORMAT = "[%(asctime)s: %(processName)s %(name)s %(levelname)s] %(message)s"
CELERY_ALWAYS_EAGER = False
CELERYD_LOG_COLOR = False
<commit_msg>Add celeryd prefetch multiplier setting<commit_after> | import os
REDIS_PORT = os.getenv('DBAAS_NOTIFICATION_BROKER_PORT', '6379')
BROKER_URL = os.getenv(
'DBAAS_NOTIFICATION_BROKER_URL', 'redis://localhost:%s/0' % REDIS_PORT)
CELERYD_TASK_TIME_LIMIT = 10800
CELERY_TRACK_STARTED = True
CELERY_IGNORE_RESULT = False
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERYBEAT_MAX_LOOP_INTERVAL = 5
CELERY_TIMEZONE = os.getenv('DJANGO_TIME_ZONE', 'America/Sao_Paulo')
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_LOG_FORMAT = "[%(asctime)s: %(processName)s %(name)s %(levelname)s] %(message)s"
CELERY_ALWAYS_EAGER = False
CELERYD_LOG_COLOR = False
CELERYD_PREFETCH_MULTIPLIER = 1
| import os
REDIS_PORT = os.getenv('DBAAS_NOTIFICATION_BROKER_PORT', '6379')
BROKER_URL = os.getenv(
'DBAAS_NOTIFICATION_BROKER_URL', 'redis://localhost:%s/0' % REDIS_PORT)
CELERYD_TASK_TIME_LIMIT = 10800
CELERY_TRACK_STARTED = True
CELERY_IGNORE_RESULT = False
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERYBEAT_MAX_LOOP_INTERVAL = 5
CELERY_TIMEZONE = os.getenv('DJANGO_TIME_ZONE', 'America/Sao_Paulo')
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_LOG_FORMAT = "[%(asctime)s: %(processName)s %(name)s %(levelname)s] %(message)s"
CELERY_ALWAYS_EAGER = False
CELERYD_LOG_COLOR = False
Add celeryd prefetch multiplier settingimport os
REDIS_PORT = os.getenv('DBAAS_NOTIFICATION_BROKER_PORT', '6379')
BROKER_URL = os.getenv(
'DBAAS_NOTIFICATION_BROKER_URL', 'redis://localhost:%s/0' % REDIS_PORT)
CELERYD_TASK_TIME_LIMIT = 10800
CELERY_TRACK_STARTED = True
CELERY_IGNORE_RESULT = False
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERYBEAT_MAX_LOOP_INTERVAL = 5
CELERY_TIMEZONE = os.getenv('DJANGO_TIME_ZONE', 'America/Sao_Paulo')
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_LOG_FORMAT = "[%(asctime)s: %(processName)s %(name)s %(levelname)s] %(message)s"
CELERY_ALWAYS_EAGER = False
CELERYD_LOG_COLOR = False
CELERYD_PREFETCH_MULTIPLIER = 1
| <commit_before>import os
REDIS_PORT = os.getenv('DBAAS_NOTIFICATION_BROKER_PORT', '6379')
BROKER_URL = os.getenv(
'DBAAS_NOTIFICATION_BROKER_URL', 'redis://localhost:%s/0' % REDIS_PORT)
CELERYD_TASK_TIME_LIMIT = 10800
CELERY_TRACK_STARTED = True
CELERY_IGNORE_RESULT = False
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERYBEAT_MAX_LOOP_INTERVAL = 5
CELERY_TIMEZONE = os.getenv('DJANGO_TIME_ZONE', 'America/Sao_Paulo')
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_LOG_FORMAT = "[%(asctime)s: %(processName)s %(name)s %(levelname)s] %(message)s"
CELERY_ALWAYS_EAGER = False
CELERYD_LOG_COLOR = False
<commit_msg>Add celeryd prefetch multiplier setting<commit_after>import os
REDIS_PORT = os.getenv('DBAAS_NOTIFICATION_BROKER_PORT', '6379')
BROKER_URL = os.getenv(
'DBAAS_NOTIFICATION_BROKER_URL', 'redis://localhost:%s/0' % REDIS_PORT)
CELERYD_TASK_TIME_LIMIT = 10800
CELERY_TRACK_STARTED = True
CELERY_IGNORE_RESULT = False
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERYBEAT_MAX_LOOP_INTERVAL = 5
CELERY_TIMEZONE = os.getenv('DJANGO_TIME_ZONE', 'America/Sao_Paulo')
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_LOG_FORMAT = "[%(asctime)s: %(processName)s %(name)s %(levelname)s] %(message)s"
CELERY_ALWAYS_EAGER = False
CELERYD_LOG_COLOR = False
CELERYD_PREFETCH_MULTIPLIER = 1
|
d346129fb33f84eaa61ed48f3d4b4f9570062241 | server/server/tests/__init__.py | server/server/tests/__init__.py | import pytest
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
pytestmark = pytest.mark.django_db(transaction=True)
@pytest.fixture
def fm_user():
user = User.objects.create_user('fuzzmanager', 'test@example.com', 'test')
user.password_raw = 'test'
(token, created) = Token.objects.get_or_create(user=user)
if created:
token.save()
user.token = token.key
return user
| import pytest
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User, Permission
from rest_framework.authtoken.models import Token
from crashmanager.models import User as CMUser
pytestmark = pytest.mark.django_db(transaction=True)
@pytest.fixture
def fm_user():
user = User.objects.create_user('fuzzmanager', 'test@example.com', 'test')
content_type = ContentType.objects.get_for_model(CMUser)
user.user_permissions.add(Permission.objects.get(content_type=content_type, codename='view_ec2spotmanager'))
user.user_permissions.add(Permission.objects.get(content_type=content_type, codename='view_crashmanager'))
user.password_raw = 'test'
(token, created) = Token.objects.get_or_create(user=user)
if created:
token.save()
user.token = token.key
return user
| Fix Collector and EC2Reporter tests. Token user used for test now requires the correct permissions. | Fix Collector and EC2Reporter tests.
Token user used for test now requires the correct permissions.
| Python | mpl-2.0 | MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager | import pytest
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
pytestmark = pytest.mark.django_db(transaction=True)
@pytest.fixture
def fm_user():
user = User.objects.create_user('fuzzmanager', 'test@example.com', 'test')
user.password_raw = 'test'
(token, created) = Token.objects.get_or_create(user=user)
if created:
token.save()
user.token = token.key
return user
Fix Collector and EC2Reporter tests.
Token user used for test now requires the correct permissions. | import pytest
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User, Permission
from rest_framework.authtoken.models import Token
from crashmanager.models import User as CMUser
pytestmark = pytest.mark.django_db(transaction=True)
@pytest.fixture
def fm_user():
user = User.objects.create_user('fuzzmanager', 'test@example.com', 'test')
content_type = ContentType.objects.get_for_model(CMUser)
user.user_permissions.add(Permission.objects.get(content_type=content_type, codename='view_ec2spotmanager'))
user.user_permissions.add(Permission.objects.get(content_type=content_type, codename='view_crashmanager'))
user.password_raw = 'test'
(token, created) = Token.objects.get_or_create(user=user)
if created:
token.save()
user.token = token.key
return user
| <commit_before>import pytest
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
pytestmark = pytest.mark.django_db(transaction=True)
@pytest.fixture
def fm_user():
user = User.objects.create_user('fuzzmanager', 'test@example.com', 'test')
user.password_raw = 'test'
(token, created) = Token.objects.get_or_create(user=user)
if created:
token.save()
user.token = token.key
return user
<commit_msg>Fix Collector and EC2Reporter tests.
Token user used for test now requires the correct permissions.<commit_after> | import pytest
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User, Permission
from rest_framework.authtoken.models import Token
from crashmanager.models import User as CMUser
pytestmark = pytest.mark.django_db(transaction=True)
@pytest.fixture
def fm_user():
user = User.objects.create_user('fuzzmanager', 'test@example.com', 'test')
content_type = ContentType.objects.get_for_model(CMUser)
user.user_permissions.add(Permission.objects.get(content_type=content_type, codename='view_ec2spotmanager'))
user.user_permissions.add(Permission.objects.get(content_type=content_type, codename='view_crashmanager'))
user.password_raw = 'test'
(token, created) = Token.objects.get_or_create(user=user)
if created:
token.save()
user.token = token.key
return user
| import pytest
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
pytestmark = pytest.mark.django_db(transaction=True)
@pytest.fixture
def fm_user():
user = User.objects.create_user('fuzzmanager', 'test@example.com', 'test')
user.password_raw = 'test'
(token, created) = Token.objects.get_or_create(user=user)
if created:
token.save()
user.token = token.key
return user
Fix Collector and EC2Reporter tests.
Token user used for test now requires the correct permissions.import pytest
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User, Permission
from rest_framework.authtoken.models import Token
from crashmanager.models import User as CMUser
pytestmark = pytest.mark.django_db(transaction=True)
@pytest.fixture
def fm_user():
user = User.objects.create_user('fuzzmanager', 'test@example.com', 'test')
content_type = ContentType.objects.get_for_model(CMUser)
user.user_permissions.add(Permission.objects.get(content_type=content_type, codename='view_ec2spotmanager'))
user.user_permissions.add(Permission.objects.get(content_type=content_type, codename='view_crashmanager'))
user.password_raw = 'test'
(token, created) = Token.objects.get_or_create(user=user)
if created:
token.save()
user.token = token.key
return user
| <commit_before>import pytest
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
pytestmark = pytest.mark.django_db(transaction=True)
@pytest.fixture
def fm_user():
user = User.objects.create_user('fuzzmanager', 'test@example.com', 'test')
user.password_raw = 'test'
(token, created) = Token.objects.get_or_create(user=user)
if created:
token.save()
user.token = token.key
return user
<commit_msg>Fix Collector and EC2Reporter tests.
Token user used for test now requires the correct permissions.<commit_after>import pytest
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User, Permission
from rest_framework.authtoken.models import Token
from crashmanager.models import User as CMUser
pytestmark = pytest.mark.django_db(transaction=True)
@pytest.fixture
def fm_user():
user = User.objects.create_user('fuzzmanager', 'test@example.com', 'test')
content_type = ContentType.objects.get_for_model(CMUser)
user.user_permissions.add(Permission.objects.get(content_type=content_type, codename='view_ec2spotmanager'))
user.user_permissions.add(Permission.objects.get(content_type=content_type, codename='view_crashmanager'))
user.password_raw = 'test'
(token, created) = Token.objects.get_or_create(user=user)
if created:
token.save()
user.token = token.key
return user
|
1455da161123ea778d8e82c2f961fdcf85cd10aa | monitor-checker-http.py | monitor-checker-http.py | #!/usr/bin/env python
import pika
import json
import requests
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER))
channel = connection.channel()
channel.queue_declare(queue='http')
def callback(ch, method, properties, body):
req = json.loads(body)
host = json.loads(req["monitor"]["check"]["arguments"])["host"]
r = requests.get(host)
req["monitor"]["result"]= {}
req["monitor"]["result"]["status"] = "ok" if r.status_code == 200 else "fail"
req["monitor"]["result"]["check"] = req["monitor"]["check"]
del req["monitor"]["check"]
print req
print r.status_code
resp = json.dumps(req)
print resp
channel.queue_declare(queue='results')
channel.basic_publish(exchange='results',
routing_key='results',
body=resp)
channel.basic_consume(callback,
queue='http',
no_ack=True)
channel.start_consuming()
| #!/usr/bin/env python
import pika
import json
import requests
import os
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"]
RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"]
credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD)
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER, credentials = credentials))
channel = connection.channel()
def callback(ch, method, properties, body):
req = json.loads(body)
host = json.loads(req["monitor"]["check"]["arguments"])["host"]
r = requests.get(host)
req["monitor"]["result"]= {}
req["monitor"]["result"]["status"] = "ok" if r.status_code == 200 else "fail"
req["monitor"]["result"]["check"] = req["monitor"]["check"]
del req["monitor"]["check"]
print req
print r.status_code
resp = json.dumps(req)
channel.basic_publish(exchange='results',
routing_key='',
body=resp)
channel.basic_consume(callback,
queue='http',
no_ack=True)
channel.start_consuming()
| Add credentials + code clean up | Add credentials + code clean up
| Python | mit | observer-hackaton/monitor-checker-http | #!/usr/bin/env python
import pika
import json
import requests
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER))
channel = connection.channel()
channel.queue_declare(queue='http')
def callback(ch, method, properties, body):
req = json.loads(body)
host = json.loads(req["monitor"]["check"]["arguments"])["host"]
r = requests.get(host)
req["monitor"]["result"]= {}
req["monitor"]["result"]["status"] = "ok" if r.status_code == 200 else "fail"
req["monitor"]["result"]["check"] = req["monitor"]["check"]
del req["monitor"]["check"]
print req
print r.status_code
resp = json.dumps(req)
print resp
channel.queue_declare(queue='results')
channel.basic_publish(exchange='results',
routing_key='results',
body=resp)
channel.basic_consume(callback,
queue='http',
no_ack=True)
channel.start_consuming()
Add credentials + code clean up | #!/usr/bin/env python
import pika
import json
import requests
import os
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"]
RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"]
credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD)
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER, credentials = credentials))
channel = connection.channel()
def callback(ch, method, properties, body):
req = json.loads(body)
host = json.loads(req["monitor"]["check"]["arguments"])["host"]
r = requests.get(host)
req["monitor"]["result"]= {}
req["monitor"]["result"]["status"] = "ok" if r.status_code == 200 else "fail"
req["monitor"]["result"]["check"] = req["monitor"]["check"]
del req["monitor"]["check"]
print req
print r.status_code
resp = json.dumps(req)
channel.basic_publish(exchange='results',
routing_key='',
body=resp)
channel.basic_consume(callback,
queue='http',
no_ack=True)
channel.start_consuming()
| <commit_before>#!/usr/bin/env python
import pika
import json
import requests
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER))
channel = connection.channel()
channel.queue_declare(queue='http')
def callback(ch, method, properties, body):
req = json.loads(body)
host = json.loads(req["monitor"]["check"]["arguments"])["host"]
r = requests.get(host)
req["monitor"]["result"]= {}
req["monitor"]["result"]["status"] = "ok" if r.status_code == 200 else "fail"
req["monitor"]["result"]["check"] = req["monitor"]["check"]
del req["monitor"]["check"]
print req
print r.status_code
resp = json.dumps(req)
print resp
channel.queue_declare(queue='results')
channel.basic_publish(exchange='results',
routing_key='results',
body=resp)
channel.basic_consume(callback,
queue='http',
no_ack=True)
channel.start_consuming()
<commit_msg>Add credentials + code clean up<commit_after> | #!/usr/bin/env python
import pika
import json
import requests
import os
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"]
RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"]
credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD)
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER, credentials = credentials))
channel = connection.channel()
def callback(ch, method, properties, body):
req = json.loads(body)
host = json.loads(req["monitor"]["check"]["arguments"])["host"]
r = requests.get(host)
req["monitor"]["result"]= {}
req["monitor"]["result"]["status"] = "ok" if r.status_code == 200 else "fail"
req["monitor"]["result"]["check"] = req["monitor"]["check"]
del req["monitor"]["check"]
print req
print r.status_code
resp = json.dumps(req)
channel.basic_publish(exchange='results',
routing_key='',
body=resp)
channel.basic_consume(callback,
queue='http',
no_ack=True)
channel.start_consuming()
| #!/usr/bin/env python
import pika
import json
import requests
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER))
channel = connection.channel()
channel.queue_declare(queue='http')
def callback(ch, method, properties, body):
req = json.loads(body)
host = json.loads(req["monitor"]["check"]["arguments"])["host"]
r = requests.get(host)
req["monitor"]["result"]= {}
req["monitor"]["result"]["status"] = "ok" if r.status_code == 200 else "fail"
req["monitor"]["result"]["check"] = req["monitor"]["check"]
del req["monitor"]["check"]
print req
print r.status_code
resp = json.dumps(req)
print resp
channel.queue_declare(queue='results')
channel.basic_publish(exchange='results',
routing_key='results',
body=resp)
channel.basic_consume(callback,
queue='http',
no_ack=True)
channel.start_consuming()
Add credentials + code clean up#!/usr/bin/env python
import pika
import json
import requests
import os
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"]
RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"]
credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD)
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER, credentials = credentials))
channel = connection.channel()
def callback(ch, method, properties, body):
req = json.loads(body)
host = json.loads(req["monitor"]["check"]["arguments"])["host"]
r = requests.get(host)
req["monitor"]["result"]= {}
req["monitor"]["result"]["status"] = "ok" if r.status_code == 200 else "fail"
req["monitor"]["result"]["check"] = req["monitor"]["check"]
del req["monitor"]["check"]
print req
print r.status_code
resp = json.dumps(req)
channel.basic_publish(exchange='results',
routing_key='',
body=resp)
channel.basic_consume(callback,
queue='http',
no_ack=True)
channel.start_consuming()
| <commit_before>#!/usr/bin/env python
import pika
import json
import requests
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER))
channel = connection.channel()
channel.queue_declare(queue='http')
def callback(ch, method, properties, body):
req = json.loads(body)
host = json.loads(req["monitor"]["check"]["arguments"])["host"]
r = requests.get(host)
req["monitor"]["result"]= {}
req["monitor"]["result"]["status"] = "ok" if r.status_code == 200 else "fail"
req["monitor"]["result"]["check"] = req["monitor"]["check"]
del req["monitor"]["check"]
print req
print r.status_code
resp = json.dumps(req)
print resp
channel.queue_declare(queue='results')
channel.basic_publish(exchange='results',
routing_key='results',
body=resp)
channel.basic_consume(callback,
queue='http',
no_ack=True)
channel.start_consuming()
<commit_msg>Add credentials + code clean up<commit_after>#!/usr/bin/env python
import pika
import json
import requests
import os
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"]
RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"]
credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD)
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER, credentials = credentials))
channel = connection.channel()
def callback(ch, method, properties, body):
req = json.loads(body)
host = json.loads(req["monitor"]["check"]["arguments"])["host"]
r = requests.get(host)
req["monitor"]["result"]= {}
req["monitor"]["result"]["status"] = "ok" if r.status_code == 200 else "fail"
req["monitor"]["result"]["check"] = req["monitor"]["check"]
del req["monitor"]["check"]
print req
print r.status_code
resp = json.dumps(req)
channel.basic_publish(exchange='results',
routing_key='',
body=resp)
channel.basic_consume(callback,
queue='http',
no_ack=True)
channel.start_consuming()
|
7b697cbcddf29412ac94a186817bd9db1880a0f2 | nbody/snapshots/util.py | nbody/snapshots/util.py | """Various utility functions, mostly dealing with input/output"""
import os
import numpy as np
def load_snapshots(directory_name, stack_coords=False):
"""Loads files by traversing a directory and reading in a filename sorted order"""
data = []
for root, dirs, files in os.walk(directory_name):
for file_name in sorted(files, key=lambda x: int(x.split(".")[-2])):
#This needs fixing, but I'll leave it like this until we unify our formats
if file_name.endswith("csv"):
bodies = np.loadtxt(os.path.join(root, file_name), delimiter=",", skiprows=1, unpack=stack_coords)
data.append(bodies)
return np.array(data)
def save_snapshot(snapshot, file_name):
f = open(file_name, 'w')
f.write("x,y,z\n")
for i in range(snapshot.shape[0]):
f.write("%e,%e,%e\n" % (snapshot[i, 0], snapshot[i, 1], snapshot[i, 2]))
f.close()
def construct_snapshot_name(directory, num):
return os.path.join(directory, "nbody_snapshot." + str(num) + ".csv") | """Various utility functions, mostly dealing with input/output"""
import os
import numpy as np
def load_snapshots(directory_name, stack_coords=False):
"""Loads files by traversing a directory and reading in a filename sorted order"""
data = []
for root, dirs, files in os.walk(directory_name):
for file_name in sorted(files, key=lambda x: int(x.split(".")[-2])):
#This needs fixing, but I'll leave it like this until we unify our formats
if file_name.endswith("csv"):
bodies = np.loadtxt(os.path.join(root, file_name), delimiter=",", unpack=stack_coords)
data.append(bodies)
return np.array(data)
def save_snapshot(snapshot, file_name):
np.savetxt(file_name, snapshot, delimiter=",")
def construct_snapshot_name(directory, num):
return os.path.join(directory, "nbody_snapshot." + str(num) + ".csv") | Fix csv saving for arbitrary parameter sets | Fix csv saving for arbitrary parameter sets
| Python | mit | kostassabulis/nbody-workshop-2015 | """Various utility functions, mostly dealing with input/output"""
import os
import numpy as np
def load_snapshots(directory_name, stack_coords=False):
"""Loads files by traversing a directory and reading in a filename sorted order"""
data = []
for root, dirs, files in os.walk(directory_name):
for file_name in sorted(files, key=lambda x: int(x.split(".")[-2])):
#This needs fixing, but I'll leave it like this until we unify our formats
if file_name.endswith("csv"):
bodies = np.loadtxt(os.path.join(root, file_name), delimiter=",", skiprows=1, unpack=stack_coords)
data.append(bodies)
return np.array(data)
def save_snapshot(snapshot, file_name):
f = open(file_name, 'w')
f.write("x,y,z\n")
for i in range(snapshot.shape[0]):
f.write("%e,%e,%e\n" % (snapshot[i, 0], snapshot[i, 1], snapshot[i, 2]))
f.close()
def construct_snapshot_name(directory, num):
return os.path.join(directory, "nbody_snapshot." + str(num) + ".csv")Fix csv saving for arbitrary parameter sets | """Various utility functions, mostly dealing with input/output"""
import os
import numpy as np
def load_snapshots(directory_name, stack_coords=False):
"""Loads files by traversing a directory and reading in a filename sorted order"""
data = []
for root, dirs, files in os.walk(directory_name):
for file_name in sorted(files, key=lambda x: int(x.split(".")[-2])):
#This needs fixing, but I'll leave it like this until we unify our formats
if file_name.endswith("csv"):
bodies = np.loadtxt(os.path.join(root, file_name), delimiter=",", unpack=stack_coords)
data.append(bodies)
return np.array(data)
def save_snapshot(snapshot, file_name):
np.savetxt(file_name, snapshot, delimiter=",")
def construct_snapshot_name(directory, num):
return os.path.join(directory, "nbody_snapshot." + str(num) + ".csv") | <commit_before>"""Various utility functions, mostly dealing with input/output"""
import os
import numpy as np
def load_snapshots(directory_name, stack_coords=False):
"""Loads files by traversing a directory and reading in a filename sorted order"""
data = []
for root, dirs, files in os.walk(directory_name):
for file_name in sorted(files, key=lambda x: int(x.split(".")[-2])):
#This needs fixing, but I'll leave it like this until we unify our formats
if file_name.endswith("csv"):
bodies = np.loadtxt(os.path.join(root, file_name), delimiter=",", skiprows=1, unpack=stack_coords)
data.append(bodies)
return np.array(data)
def save_snapshot(snapshot, file_name):
f = open(file_name, 'w')
f.write("x,y,z\n")
for i in range(snapshot.shape[0]):
f.write("%e,%e,%e\n" % (snapshot[i, 0], snapshot[i, 1], snapshot[i, 2]))
f.close()
def construct_snapshot_name(directory, num):
return os.path.join(directory, "nbody_snapshot." + str(num) + ".csv")<commit_msg>Fix csv saving for arbitrary parameter sets<commit_after> | """Various utility functions, mostly dealing with input/output"""
import os
import numpy as np
def load_snapshots(directory_name, stack_coords=False):
"""Loads files by traversing a directory and reading in a filename sorted order"""
data = []
for root, dirs, files in os.walk(directory_name):
for file_name in sorted(files, key=lambda x: int(x.split(".")[-2])):
#This needs fixing, but I'll leave it like this until we unify our formats
if file_name.endswith("csv"):
bodies = np.loadtxt(os.path.join(root, file_name), delimiter=",", unpack=stack_coords)
data.append(bodies)
return np.array(data)
def save_snapshot(snapshot, file_name):
np.savetxt(file_name, snapshot, delimiter=",")
def construct_snapshot_name(directory, num):
return os.path.join(directory, "nbody_snapshot." + str(num) + ".csv") | """Various utility functions, mostly dealing with input/output"""
import os
import numpy as np
def load_snapshots(directory_name, stack_coords=False):
"""Loads files by traversing a directory and reading in a filename sorted order"""
data = []
for root, dirs, files in os.walk(directory_name):
for file_name in sorted(files, key=lambda x: int(x.split(".")[-2])):
#This needs fixing, but I'll leave it like this until we unify our formats
if file_name.endswith("csv"):
bodies = np.loadtxt(os.path.join(root, file_name), delimiter=",", skiprows=1, unpack=stack_coords)
data.append(bodies)
return np.array(data)
def save_snapshot(snapshot, file_name):
f = open(file_name, 'w')
f.write("x,y,z\n")
for i in range(snapshot.shape[0]):
f.write("%e,%e,%e\n" % (snapshot[i, 0], snapshot[i, 1], snapshot[i, 2]))
f.close()
def construct_snapshot_name(directory, num):
return os.path.join(directory, "nbody_snapshot." + str(num) + ".csv")Fix csv saving for arbitrary parameter sets"""Various utility functions, mostly dealing with input/output"""
import os
import numpy as np
def load_snapshots(directory_name, stack_coords=False):
"""Loads files by traversing a directory and reading in a filename sorted order"""
data = []
for root, dirs, files in os.walk(directory_name):
for file_name in sorted(files, key=lambda x: int(x.split(".")[-2])):
#This needs fixing, but I'll leave it like this until we unify our formats
if file_name.endswith("csv"):
bodies = np.loadtxt(os.path.join(root, file_name), delimiter=",", unpack=stack_coords)
data.append(bodies)
return np.array(data)
def save_snapshot(snapshot, file_name):
np.savetxt(file_name, snapshot, delimiter=",")
def construct_snapshot_name(directory, num):
return os.path.join(directory, "nbody_snapshot." + str(num) + ".csv") | <commit_before>"""Various utility functions, mostly dealing with input/output"""
import os
import numpy as np
def load_snapshots(directory_name, stack_coords=False):
"""Loads files by traversing a directory and reading in a filename sorted order"""
data = []
for root, dirs, files in os.walk(directory_name):
for file_name in sorted(files, key=lambda x: int(x.split(".")[-2])):
#This needs fixing, but I'll leave it like this until we unify our formats
if file_name.endswith("csv"):
bodies = np.loadtxt(os.path.join(root, file_name), delimiter=",", skiprows=1, unpack=stack_coords)
data.append(bodies)
return np.array(data)
def save_snapshot(snapshot, file_name):
f = open(file_name, 'w')
f.write("x,y,z\n")
for i in range(snapshot.shape[0]):
f.write("%e,%e,%e\n" % (snapshot[i, 0], snapshot[i, 1], snapshot[i, 2]))
f.close()
def construct_snapshot_name(directory, num):
return os.path.join(directory, "nbody_snapshot." + str(num) + ".csv")<commit_msg>Fix csv saving for arbitrary parameter sets<commit_after>"""Various utility functions, mostly dealing with input/output"""
import os
import numpy as np
def load_snapshots(directory_name, stack_coords=False):
"""Loads files by traversing a directory and reading in a filename sorted order"""
data = []
for root, dirs, files in os.walk(directory_name):
for file_name in sorted(files, key=lambda x: int(x.split(".")[-2])):
#This needs fixing, but I'll leave it like this until we unify our formats
if file_name.endswith("csv"):
bodies = np.loadtxt(os.path.join(root, file_name), delimiter=",", unpack=stack_coords)
data.append(bodies)
return np.array(data)
def save_snapshot(snapshot, file_name):
np.savetxt(file_name, snapshot, delimiter=",")
def construct_snapshot_name(directory, num):
return os.path.join(directory, "nbody_snapshot." + str(num) + ".csv") |
5745f894840eaad666617a6f99a22b62d550373c | django/contrib/comments/feeds.py | django/contrib/comments/feeds.py | from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
| from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
| Use correct m2m join table name in LatestCommentsFeed | Use correct m2m join table name in LatestCommentsFeed
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%409089
| Python | bsd-3-clause | Belgabor/django,Belgabor/django,Belgabor/django | from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
Use correct m2m join table name in LatestCommentsFeed
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%409089 | from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
| <commit_before>from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
<commit_msg>Use correct m2m join table name in LatestCommentsFeed
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%409089<commit_after> | from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
| from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
Use correct m2m join table name in LatestCommentsFeed
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%409089from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
| <commit_before>from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
<commit_msg>Use correct m2m join table name in LatestCommentsFeed
--HG--
extra : convert_revision : svn%3Abcc190cf-cafb-0310-a4f2-bffc1f526a37/django/trunk%409089<commit_after>from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
|
dfe52966b8ab72cd17687c1f6d15fdadac4d72e2 | weasyprint/logger.py | weasyprint/logger.py | # coding: utf-8
"""
weasyprint.logging
------------------
Logging setup.
The rest of the code gets the logger through this module rather than
``logging.getLogger`` to make sure that it is configured.
Logging levels are used for specific purposes:
- errors are used for unreachable or unusable external resources, including
unreachable stylesheets, unreachables images and unreadable images;
- warnings are used for unknown or bad HTML/CSS syntaxes, unreachable local
fonts and various non-fatal problems;
- infos are used to advertise rendering steps.
:copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
LOGGER.addHandler(logging.NullHandler())
| # coding: utf-8
"""
weasyprint.logging
------------------
Logging setup.
The rest of the code gets the logger through this module rather than
``logging.getLogger`` to make sure that it is configured.
Logging levels are used for specific purposes:
- errors are used for unreachable or unusable external resources, including
unreachable stylesheets, unreachables images and unreadable images;
- warnings are used for unknown or bad HTML/CSS syntaxes, unreachable local
fonts and various non-fatal problems;
- infos are used to advertise rendering steps.
:copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
LOGGER.setLevel(logging.WARNING)
LOGGER.addHandler(logging.NullHandler())
| Set default logging level to WARNING | Set default logging level to WARNING
Fixes tests with pytest 3.3.0+
| Python | bsd-3-clause | Kozea/WeasyPrint,Kozea/WeasyPrint | # coding: utf-8
"""
weasyprint.logging
------------------
Logging setup.
The rest of the code gets the logger through this module rather than
``logging.getLogger`` to make sure that it is configured.
Logging levels are used for specific purposes:
- errors are used for unreachable or unusable external resources, including
unreachable stylesheets, unreachables images and unreadable images;
- warnings are used for unknown or bad HTML/CSS syntaxes, unreachable local
fonts and various non-fatal problems;
- infos are used to advertise rendering steps.
:copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
LOGGER.addHandler(logging.NullHandler())
Set default logging level to WARNING
Fixes tests with pytest 3.3.0+ | # coding: utf-8
"""
weasyprint.logging
------------------
Logging setup.
The rest of the code gets the logger through this module rather than
``logging.getLogger`` to make sure that it is configured.
Logging levels are used for specific purposes:
- errors are used for unreachable or unusable external resources, including
unreachable stylesheets, unreachables images and unreadable images;
- warnings are used for unknown or bad HTML/CSS syntaxes, unreachable local
fonts and various non-fatal problems;
- infos are used to advertise rendering steps.
:copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
LOGGER.setLevel(logging.WARNING)
LOGGER.addHandler(logging.NullHandler())
| <commit_before># coding: utf-8
"""
weasyprint.logging
------------------
Logging setup.
The rest of the code gets the logger through this module rather than
``logging.getLogger`` to make sure that it is configured.
Logging levels are used for specific purposes:
- errors are used for unreachable or unusable external resources, including
unreachable stylesheets, unreachables images and unreadable images;
- warnings are used for unknown or bad HTML/CSS syntaxes, unreachable local
fonts and various non-fatal problems;
- infos are used to advertise rendering steps.
:copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
LOGGER.addHandler(logging.NullHandler())
<commit_msg>Set default logging level to WARNING
Fixes tests with pytest 3.3.0+<commit_after> | # coding: utf-8
"""
weasyprint.logging
------------------
Logging setup.
The rest of the code gets the logger through this module rather than
``logging.getLogger`` to make sure that it is configured.
Logging levels are used for specific purposes:
- errors are used for unreachable or unusable external resources, including
unreachable stylesheets, unreachables images and unreadable images;
- warnings are used for unknown or bad HTML/CSS syntaxes, unreachable local
fonts and various non-fatal problems;
- infos are used to advertise rendering steps.
:copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
LOGGER.setLevel(logging.WARNING)
LOGGER.addHandler(logging.NullHandler())
| # coding: utf-8
"""
weasyprint.logging
------------------
Logging setup.
The rest of the code gets the logger through this module rather than
``logging.getLogger`` to make sure that it is configured.
Logging levels are used for specific purposes:
- errors are used for unreachable or unusable external resources, including
unreachable stylesheets, unreachables images and unreadable images;
- warnings are used for unknown or bad HTML/CSS syntaxes, unreachable local
fonts and various non-fatal problems;
- infos are used to advertise rendering steps.
:copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
LOGGER.addHandler(logging.NullHandler())
Set default logging level to WARNING
Fixes tests with pytest 3.3.0+# coding: utf-8
"""
weasyprint.logging
------------------
Logging setup.
The rest of the code gets the logger through this module rather than
``logging.getLogger`` to make sure that it is configured.
Logging levels are used for specific purposes:
- errors are used for unreachable or unusable external resources, including
unreachable stylesheets, unreachables images and unreadable images;
- warnings are used for unknown or bad HTML/CSS syntaxes, unreachable local
fonts and various non-fatal problems;
- infos are used to advertise rendering steps.
:copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
LOGGER.setLevel(logging.WARNING)
LOGGER.addHandler(logging.NullHandler())
| <commit_before># coding: utf-8
"""
weasyprint.logging
------------------
Logging setup.
The rest of the code gets the logger through this module rather than
``logging.getLogger`` to make sure that it is configured.
Logging levels are used for specific purposes:
- errors are used for unreachable or unusable external resources, including
unreachable stylesheets, unreachables images and unreadable images;
- warnings are used for unknown or bad HTML/CSS syntaxes, unreachable local
fonts and various non-fatal problems;
- infos are used to advertise rendering steps.
:copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
LOGGER.addHandler(logging.NullHandler())
<commit_msg>Set default logging level to WARNING
Fixes tests with pytest 3.3.0+<commit_after># coding: utf-8
"""
weasyprint.logging
------------------
Logging setup.
The rest of the code gets the logger through this module rather than
``logging.getLogger`` to make sure that it is configured.
Logging levels are used for specific purposes:
- errors are used for unreachable or unusable external resources, including
unreachable stylesheets, unreachables images and unreadable images;
- warnings are used for unknown or bad HTML/CSS syntaxes, unreachable local
fonts and various non-fatal problems;
- infos are used to advertise rendering steps.
:copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
LOGGER.setLevel(logging.WARNING)
LOGGER.addHandler(logging.NullHandler())
|
6f8efd5e6893491a7ee1c046513210c8ad1e7fc8 | sqliteschema/_logger/_logger.py | sqliteschema/_logger/_logger.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "sqliteschema"
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable=is_enable)
def set_log_level(log_level):
# deprecated
return
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "sqliteschema"
_is_enable = False
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
global _is_enable
if is_enable == _is_enable:
return
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
_is_enable = is_enable
tabledata.set_logger(is_enable=is_enable)
def set_log_level(log_level):
# deprecated
return
| Add check for logging state | Add check for logging state
| Python | mit | thombashi/sqliteschema | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "sqliteschema"
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable=is_enable)
def set_log_level(log_level):
# deprecated
return
Add check for logging state | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "sqliteschema"
_is_enable = False
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
global _is_enable
if is_enable == _is_enable:
return
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
_is_enable = is_enable
tabledata.set_logger(is_enable=is_enable)
def set_log_level(log_level):
# deprecated
return
| <commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "sqliteschema"
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable=is_enable)
def set_log_level(log_level):
# deprecated
return
<commit_msg>Add check for logging state<commit_after> | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "sqliteschema"
_is_enable = False
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
global _is_enable
if is_enable == _is_enable:
return
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
_is_enable = is_enable
tabledata.set_logger(is_enable=is_enable)
def set_log_level(log_level):
# deprecated
return
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "sqliteschema"
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable=is_enable)
def set_log_level(log_level):
# deprecated
return
Add check for logging state# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "sqliteschema"
_is_enable = False
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
global _is_enable
if is_enable == _is_enable:
return
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
_is_enable = is_enable
tabledata.set_logger(is_enable=is_enable)
def set_log_level(log_level):
# deprecated
return
| <commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "sqliteschema"
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable=is_enable)
def set_log_level(log_level):
# deprecated
return
<commit_msg>Add check for logging state<commit_after># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "sqliteschema"
_is_enable = False
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
global _is_enable
if is_enable == _is_enable:
return
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
_is_enable = is_enable
tabledata.set_logger(is_enable=is_enable)
def set_log_level(log_level):
# deprecated
return
|
0c30fe72179a125b41ffb88fec387862c78e6c7c | flaskrst/modules/atom.py | flaskrst/modules/atom.py | # -*- coding: utf-8 -*-
"""
flask-rst.modules.atom
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, request, current_app
from werkzeug.contrib.atom import AtomFeed, FeedEntry
from flaskrst.modules.blog import get_posts
atom = Blueprint('atom', __name__)
@atom.route("/atom.xml")
def atom_feed():
feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"),
feed_url=request.url, url=request.host_url,
subtitle=current_app.config.get('SITE_SUBTITLE', None))
for post in get_posts():
entry = FeedEntry(post.title, url=post.external_url,
updated=post.pub_date)
feed.add(entry)
return feed.to_string(), 200, {}, "application/atom+xml"
def setup(app, cfg):
app.register_blueprint(atom) | # -*- coding: utf-8 -*-
"""
flask-rst.modules.atom
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, request, current_app
from werkzeug.contrib.atom import AtomFeed, FeedEntry
from flaskrst.modules.blog import get_posts
atom = Blueprint('atom', __name__)
@atom.route("/atom.xml")
def atom_feed():
feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"),
feed_url=request.url, url=request.host_url,
subtitle=current_app.config.get('SITE_SUBTITLE', None))
for post in get_posts():
entry = FeedEntry(post.title,
url=post.external_url,
updated=post.pub_date,
content=post.body,
summary=post.config.get('summary', None),
author={
'name': current_app.config.get('AUTHOR_NAME'),
'email': current_app.config.get('AUTHOR_EMAIL')
})
feed.add(entry)
return feed.to_string(), 200, {}, "application/atom+xml"
def setup(app, cfg):
app.register_blueprint(atom) | Add more information to the feed entry | Add more information to the feed entry
| Python | bsd-3-clause | jarus/flask-rst | # -*- coding: utf-8 -*-
"""
flask-rst.modules.atom
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, request, current_app
from werkzeug.contrib.atom import AtomFeed, FeedEntry
from flaskrst.modules.blog import get_posts
atom = Blueprint('atom', __name__)
@atom.route("/atom.xml")
def atom_feed():
feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"),
feed_url=request.url, url=request.host_url,
subtitle=current_app.config.get('SITE_SUBTITLE', None))
for post in get_posts():
entry = FeedEntry(post.title, url=post.external_url,
updated=post.pub_date)
feed.add(entry)
return feed.to_string(), 200, {}, "application/atom+xml"
def setup(app, cfg):
app.register_blueprint(atom)Add more information to the feed entry | # -*- coding: utf-8 -*-
"""
flask-rst.modules.atom
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, request, current_app
from werkzeug.contrib.atom import AtomFeed, FeedEntry
from flaskrst.modules.blog import get_posts
atom = Blueprint('atom', __name__)
@atom.route("/atom.xml")
def atom_feed():
feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"),
feed_url=request.url, url=request.host_url,
subtitle=current_app.config.get('SITE_SUBTITLE', None))
for post in get_posts():
entry = FeedEntry(post.title,
url=post.external_url,
updated=post.pub_date,
content=post.body,
summary=post.config.get('summary', None),
author={
'name': current_app.config.get('AUTHOR_NAME'),
'email': current_app.config.get('AUTHOR_EMAIL')
})
feed.add(entry)
return feed.to_string(), 200, {}, "application/atom+xml"
def setup(app, cfg):
app.register_blueprint(atom) | <commit_before># -*- coding: utf-8 -*-
"""
flask-rst.modules.atom
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, request, current_app
from werkzeug.contrib.atom import AtomFeed, FeedEntry
from flaskrst.modules.blog import get_posts
atom = Blueprint('atom', __name__)
@atom.route("/atom.xml")
def atom_feed():
feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"),
feed_url=request.url, url=request.host_url,
subtitle=current_app.config.get('SITE_SUBTITLE', None))
for post in get_posts():
entry = FeedEntry(post.title, url=post.external_url,
updated=post.pub_date)
feed.add(entry)
return feed.to_string(), 200, {}, "application/atom+xml"
def setup(app, cfg):
app.register_blueprint(atom)<commit_msg>Add more information to the feed entry<commit_after> | # -*- coding: utf-8 -*-
"""
flask-rst.modules.atom
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, request, current_app
from werkzeug.contrib.atom import AtomFeed, FeedEntry
from flaskrst.modules.blog import get_posts
atom = Blueprint('atom', __name__)
@atom.route("/atom.xml")
def atom_feed():
feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"),
feed_url=request.url, url=request.host_url,
subtitle=current_app.config.get('SITE_SUBTITLE', None))
for post in get_posts():
entry = FeedEntry(post.title,
url=post.external_url,
updated=post.pub_date,
content=post.body,
summary=post.config.get('summary', None),
author={
'name': current_app.config.get('AUTHOR_NAME'),
'email': current_app.config.get('AUTHOR_EMAIL')
})
feed.add(entry)
return feed.to_string(), 200, {}, "application/atom+xml"
def setup(app, cfg):
app.register_blueprint(atom) | # -*- coding: utf-8 -*-
"""
flask-rst.modules.atom
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, request, current_app
from werkzeug.contrib.atom import AtomFeed, FeedEntry
from flaskrst.modules.blog import get_posts
atom = Blueprint('atom', __name__)
@atom.route("/atom.xml")
def atom_feed():
feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"),
feed_url=request.url, url=request.host_url,
subtitle=current_app.config.get('SITE_SUBTITLE', None))
for post in get_posts():
entry = FeedEntry(post.title, url=post.external_url,
updated=post.pub_date)
feed.add(entry)
return feed.to_string(), 200, {}, "application/atom+xml"
def setup(app, cfg):
app.register_blueprint(atom)Add more information to the feed entry# -*- coding: utf-8 -*-
"""
flask-rst.modules.atom
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, request, current_app
from werkzeug.contrib.atom import AtomFeed, FeedEntry
from flaskrst.modules.blog import get_posts
atom = Blueprint('atom', __name__)
@atom.route("/atom.xml")
def atom_feed():
feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"),
feed_url=request.url, url=request.host_url,
subtitle=current_app.config.get('SITE_SUBTITLE', None))
for post in get_posts():
entry = FeedEntry(post.title,
url=post.external_url,
updated=post.pub_date,
content=post.body,
summary=post.config.get('summary', None),
author={
'name': current_app.config.get('AUTHOR_NAME'),
'email': current_app.config.get('AUTHOR_EMAIL')
})
feed.add(entry)
return feed.to_string(), 200, {}, "application/atom+xml"
def setup(app, cfg):
app.register_blueprint(atom) | <commit_before># -*- coding: utf-8 -*-
"""
flask-rst.modules.atom
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, request, current_app
from werkzeug.contrib.atom import AtomFeed, FeedEntry
from flaskrst.modules.blog import get_posts
atom = Blueprint('atom', __name__)
@atom.route("/atom.xml")
def atom_feed():
feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"),
feed_url=request.url, url=request.host_url,
subtitle=current_app.config.get('SITE_SUBTITLE', None))
for post in get_posts():
entry = FeedEntry(post.title, url=post.external_url,
updated=post.pub_date)
feed.add(entry)
return feed.to_string(), 200, {}, "application/atom+xml"
def setup(app, cfg):
app.register_blueprint(atom)<commit_msg>Add more information to the feed entry<commit_after># -*- coding: utf-8 -*-
"""
flask-rst.modules.atom
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, request, current_app
from werkzeug.contrib.atom import AtomFeed, FeedEntry
from flaskrst.modules.blog import get_posts
atom = Blueprint('atom', __name__)
@atom.route("/atom.xml")
def atom_feed():
feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"),
feed_url=request.url, url=request.host_url,
subtitle=current_app.config.get('SITE_SUBTITLE', None))
for post in get_posts():
entry = FeedEntry(post.title,
url=post.external_url,
updated=post.pub_date,
content=post.body,
summary=post.config.get('summary', None),
author={
'name': current_app.config.get('AUTHOR_NAME'),
'email': current_app.config.get('AUTHOR_EMAIL')
})
feed.add(entry)
return feed.to_string(), 200, {}, "application/atom+xml"
def setup(app, cfg):
app.register_blueprint(atom) |
038bb5fe10a3b9df18f8c709cddd0c18b2ac694d | parsing/forum/mongo_forum_to_mongod.py | parsing/forum/mongo_forum_to_mongod.py | '''
Insert the edx discussion board .mongo files into mongodb database
'''
import pymongo
import sys
import json
def connect_to_db_collection(db_name, collection_name):
'''
Retrieve collection from given database name and collection name
'''
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
collection = db[collection_name]
return collection
def remove_dollar_sign(json_object):
pass
def migrate_form_to_mongodb(forum_mongo_file, collection):
pass
def main():
if len(sys.argv) != 4:
usage_message = 'usage: %s coure_db_name forum_mongo_file'
if __name__ == '__main__':
main()
| '''
Insert the edx discussion board .mongo files into mongodb database
'''
import pymongo
import sys
import json
def connect_to_db_collection(db_name, collection_name):
'''
Retrieve collection from given database name and collection name
'''
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
collection = db[collection_name]
return collection
def remove_dollar_sign(json_object):
pass
def migrate_form_to_mongodb(forum_mongo_file, collection):
with open(forum_mongo_file) as file_handler:
try:
for line in file_handler:
data = json.loads(line)
data = remove_dollar_sign(data)
collection.insert(data)
except pymongo.errors.InvalidDocument as e:
print "INVALID_DOC: ", line
except Exception as e:
print "ERROR: ", line
def main():
if len(sys.argv) != 4:
usage_message = 'usage: %s coure_db_name forum_mongo_file'
if __name__ == '__main__':
main()
| Add function to migrate forum data to mongodb | Add function to migrate forum data to mongodb
| Python | mit | andyzsf/edx_data_research,andyzsf/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research | '''
Insert the edx discussion board .mongo files into mongodb database
'''
import pymongo
import sys
import json
def connect_to_db_collection(db_name, collection_name):
'''
Retrieve collection from given database name and collection name
'''
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
collection = db[collection_name]
return collection
def remove_dollar_sign(json_object):
pass
def migrate_form_to_mongodb(forum_mongo_file, collection):
pass
def main():
if len(sys.argv) != 4:
usage_message = 'usage: %s coure_db_name forum_mongo_file'
if __name__ == '__main__':
main()
Add function to migrate forum data to mongodb | '''
Insert the edx discussion board .mongo files into mongodb database
'''
import pymongo
import sys
import json
def connect_to_db_collection(db_name, collection_name):
'''
Retrieve collection from given database name and collection name
'''
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
collection = db[collection_name]
return collection
def remove_dollar_sign(json_object):
pass
def migrate_form_to_mongodb(forum_mongo_file, collection):
with open(forum_mongo_file) as file_handler:
try:
for line in file_handler:
data = json.loads(line)
data = remove_dollar_sign(data)
collection.insert(data)
except pymongo.errors.InvalidDocument as e:
print "INVALID_DOC: ", line
except Exception as e:
print "ERROR: ", line
def main():
if len(sys.argv) != 4:
usage_message = 'usage: %s coure_db_name forum_mongo_file'
if __name__ == '__main__':
main()
| <commit_before>'''
Insert the edx discussion board .mongo files into mongodb database
'''
import pymongo
import sys
import json
def connect_to_db_collection(db_name, collection_name):
'''
Retrieve collection from given database name and collection name
'''
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
collection = db[collection_name]
return collection
def remove_dollar_sign(json_object):
pass
def migrate_form_to_mongodb(forum_mongo_file, collection):
pass
def main():
if len(sys.argv) != 4:
usage_message = 'usage: %s coure_db_name forum_mongo_file'
if __name__ == '__main__':
main()
<commit_msg>Add function to migrate forum data to mongodb<commit_after> | '''
Insert the edx discussion board .mongo files into mongodb database
'''
import pymongo
import sys
import json
def connect_to_db_collection(db_name, collection_name):
'''
Retrieve collection from given database name and collection name
'''
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
collection = db[collection_name]
return collection
def remove_dollar_sign(json_object):
pass
def migrate_form_to_mongodb(forum_mongo_file, collection):
with open(forum_mongo_file) as file_handler:
try:
for line in file_handler:
data = json.loads(line)
data = remove_dollar_sign(data)
collection.insert(data)
except pymongo.errors.InvalidDocument as e:
print "INVALID_DOC: ", line
except Exception as e:
print "ERROR: ", line
def main():
if len(sys.argv) != 4:
usage_message = 'usage: %s coure_db_name forum_mongo_file'
if __name__ == '__main__':
main()
| '''
Insert the edx discussion board .mongo files into mongodb database
'''
import pymongo
import sys
import json
def connect_to_db_collection(db_name, collection_name):
'''
Retrieve collection from given database name and collection name
'''
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
collection = db[collection_name]
return collection
def remove_dollar_sign(json_object):
pass
def migrate_form_to_mongodb(forum_mongo_file, collection):
pass
def main():
if len(sys.argv) != 4:
usage_message = 'usage: %s coure_db_name forum_mongo_file'
if __name__ == '__main__':
main()
Add function to migrate forum data to mongodb'''
Insert the edx discussion board .mongo files into mongodb database
'''
import pymongo
import sys
import json
def connect_to_db_collection(db_name, collection_name):
'''
Retrieve collection from given database name and collection name
'''
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
collection = db[collection_name]
return collection
def remove_dollar_sign(json_object):
pass
def migrate_form_to_mongodb(forum_mongo_file, collection):
with open(forum_mongo_file) as file_handler:
try:
for line in file_handler:
data = json.loads(line)
data = remove_dollar_sign(data)
collection.insert(data)
except pymongo.errors.InvalidDocument as e:
print "INVALID_DOC: ", line
except Exception as e:
print "ERROR: ", line
def main():
if len(sys.argv) != 4:
usage_message = 'usage: %s coure_db_name forum_mongo_file'
if __name__ == '__main__':
main()
| <commit_before>'''
Insert the edx discussion board .mongo files into mongodb database
'''
import pymongo
import sys
import json
def connect_to_db_collection(db_name, collection_name):
'''
Retrieve collection from given database name and collection name
'''
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
collection = db[collection_name]
return collection
def remove_dollar_sign(json_object):
pass
def migrate_form_to_mongodb(forum_mongo_file, collection):
pass
def main():
if len(sys.argv) != 4:
usage_message = 'usage: %s coure_db_name forum_mongo_file'
if __name__ == '__main__':
main()
<commit_msg>Add function to migrate forum data to mongodb<commit_after>'''
Insert the edx discussion board .mongo files into mongodb database
'''
import pymongo
import sys
import json
def connect_to_db_collection(db_name, collection_name):
'''
Retrieve collection from given database name and collection name
'''
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
collection = db[collection_name]
return collection
def remove_dollar_sign(json_object):
pass
def migrate_form_to_mongodb(forum_mongo_file, collection):
with open(forum_mongo_file) as file_handler:
try:
for line in file_handler:
data = json.loads(line)
data = remove_dollar_sign(data)
collection.insert(data)
except pymongo.errors.InvalidDocument as e:
print "INVALID_DOC: ", line
except Exception as e:
print "ERROR: ", line
def main():
if len(sys.argv) != 4:
usage_message = 'usage: %s coure_db_name forum_mongo_file'
if __name__ == '__main__':
main()
|
cc8cc05480e85c9a66450f1655083e87d00ba3f4 | usersettings/shortcuts.py | usersettings/shortcuts.py | from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
current_usersettings = USERSETTINGS_MODEL.objects.get_current()
return current_usersettings
| from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
try:
current_usersettings = USERSETTINGS_MODEL.objects.get_current()
except USERSETTINGS_MODEL.DoesNotExist:
current_usersettings = None
return current_usersettings
| Update 'get_current_usersettings' to catch 'DoesNotExist' error | Update 'get_current_usersettings' to catch 'DoesNotExist' error
| Python | bsd-3-clause | mishbahr/django-usersettings2,mishbahr/django-usersettings2 | from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
current_usersettings = USERSETTINGS_MODEL.objects.get_current()
return current_usersettings
Update 'get_current_usersettings' to catch 'DoesNotExist' error | from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
try:
current_usersettings = USERSETTINGS_MODEL.objects.get_current()
except USERSETTINGS_MODEL.DoesNotExist:
current_usersettings = None
return current_usersettings
| <commit_before>from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
current_usersettings = USERSETTINGS_MODEL.objects.get_current()
return current_usersettings
<commit_msg>Update 'get_current_usersettings' to catch 'DoesNotExist' error<commit_after> | from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
try:
current_usersettings = USERSETTINGS_MODEL.objects.get_current()
except USERSETTINGS_MODEL.DoesNotExist:
current_usersettings = None
return current_usersettings
| from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
current_usersettings = USERSETTINGS_MODEL.objects.get_current()
return current_usersettings
Update 'get_current_usersettings' to catch 'DoesNotExist' errorfrom django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
try:
current_usersettings = USERSETTINGS_MODEL.objects.get_current()
except USERSETTINGS_MODEL.DoesNotExist:
current_usersettings = None
return current_usersettings
| <commit_before>from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
current_usersettings = USERSETTINGS_MODEL.objects.get_current()
return current_usersettings
<commit_msg>Update 'get_current_usersettings' to catch 'DoesNotExist' error<commit_after>from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_usersettings_model():
"""
Returns the ``UserSettings`` model that is active in this project.
"""
from django.db.models import get_model
try:
app_label, model_name = settings.USERSETTINGS_MODEL.split('.')
except ValueError:
raise ImproperlyConfigured('USERSETTINGS_MODEL must be of the '
'form "app_label.model_name"')
usersettings_model = get_model(app_label, model_name)
if usersettings_model is None:
raise ImproperlyConfigured('USERSETTINGS_MODEL refers to model "%s" that has '
'not been installed' % settings.USERSETTINGS_MODEL)
return usersettings_model
def get_current_usersettings():
"""
Returns the current ``UserSettings`` based on
the SITE_ID in the project's settings
"""
USERSETTINGS_MODEL = get_usersettings_model()
try:
current_usersettings = USERSETTINGS_MODEL.objects.get_current()
except USERSETTINGS_MODEL.DoesNotExist:
current_usersettings = None
return current_usersettings
|
eb9e2c3217ff0f19a28fc49b2fa5f14d295f32e2 | app/views.py | app/views.py | from flask import render_template, jsonify, request
from app import app
from app import evolver
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
def format_transcriptions(transcriptions):
'''Split the raw string of transcriptions into
the correct tuple rules.'''
return [(pair.split(':')[0], pair.split(':')[1]) for pair in transcriptions.split('\n')]
@app.route('/evolve', methods=['POST'])
def evolve():
words = request.form['words'].split()
try:
transcriptions = format_transcriptions(request.form['transcriptions'])
except IndexError:
return jsonify({'error': 'Error: Transcription seperator must be a colon'})
try:
generations = int(request.form['generations'])
except ValueError:
return jsonify({'error': 'Error: Generations must be an integer'})
words, rules = evolver.evolve(words, generations, transcriptions)
return jsonify({'rules': rules, 'words': words, 'error': 0})
| from flask import render_template, jsonify, request
from app import app
from app import evolver
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
def format_transcriptions(transcriptions):
'''Split the raw string of transcriptions into
the correct tuple rules.'''
clean_transcriptions = transcriptions.strip().lower()
if len(clean_transcriptions) == 0:
return []
else:
return [(pair.split(':')[0], pair.split(':')[1]) for pair in clean_transcriptions.split('\n')]
@app.route('/evolve', methods=['POST'])
def evolve():
words = request.form['words'].split()
try:
transcriptions = format_transcriptions(request.form['transcriptions'])
except IndexError:
return jsonify({'error': 'Error: Transcription seperator must be a colon'})
try:
generations = int(request.form['generations'])
except ValueError:
return jsonify({'error': 'Error: Generations must be an integer'})
words, rules = evolver.evolve(words, generations, transcriptions)
return jsonify({'rules': rules, 'words': words, 'error': 0})
| Fix bug when transcriptions are empty | Fix bug when transcriptions are empty
| Python | mit | kdelwat/LangEvolve,kdelwat/LangEvolve,kdelwat/LangEvolve | from flask import render_template, jsonify, request
from app import app
from app import evolver
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
def format_transcriptions(transcriptions):
'''Split the raw string of transcriptions into
the correct tuple rules.'''
return [(pair.split(':')[0], pair.split(':')[1]) for pair in transcriptions.split('\n')]
@app.route('/evolve', methods=['POST'])
def evolve():
words = request.form['words'].split()
try:
transcriptions = format_transcriptions(request.form['transcriptions'])
except IndexError:
return jsonify({'error': 'Error: Transcription seperator must be a colon'})
try:
generations = int(request.form['generations'])
except ValueError:
return jsonify({'error': 'Error: Generations must be an integer'})
words, rules = evolver.evolve(words, generations, transcriptions)
return jsonify({'rules': rules, 'words': words, 'error': 0})
Fix bug when transcriptions are empty | from flask import render_template, jsonify, request
from app import app
from app import evolver
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
def format_transcriptions(transcriptions):
'''Split the raw string of transcriptions into
the correct tuple rules.'''
clean_transcriptions = transcriptions.strip().lower()
if len(clean_transcriptions) == 0:
return []
else:
return [(pair.split(':')[0], pair.split(':')[1]) for pair in clean_transcriptions.split('\n')]
@app.route('/evolve', methods=['POST'])
def evolve():
words = request.form['words'].split()
try:
transcriptions = format_transcriptions(request.form['transcriptions'])
except IndexError:
return jsonify({'error': 'Error: Transcription seperator must be a colon'})
try:
generations = int(request.form['generations'])
except ValueError:
return jsonify({'error': 'Error: Generations must be an integer'})
words, rules = evolver.evolve(words, generations, transcriptions)
return jsonify({'rules': rules, 'words': words, 'error': 0})
| <commit_before>from flask import render_template, jsonify, request
from app import app
from app import evolver
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
def format_transcriptions(transcriptions):
'''Split the raw string of transcriptions into
the correct tuple rules.'''
return [(pair.split(':')[0], pair.split(':')[1]) for pair in transcriptions.split('\n')]
@app.route('/evolve', methods=['POST'])
def evolve():
words = request.form['words'].split()
try:
transcriptions = format_transcriptions(request.form['transcriptions'])
except IndexError:
return jsonify({'error': 'Error: Transcription seperator must be a colon'})
try:
generations = int(request.form['generations'])
except ValueError:
return jsonify({'error': 'Error: Generations must be an integer'})
words, rules = evolver.evolve(words, generations, transcriptions)
return jsonify({'rules': rules, 'words': words, 'error': 0})
<commit_msg>Fix bug when transcriptions are empty<commit_after> | from flask import render_template, jsonify, request
from app import app
from app import evolver
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
def format_transcriptions(transcriptions):
'''Split the raw string of transcriptions into
the correct tuple rules.'''
clean_transcriptions = transcriptions.strip().lower()
if len(clean_transcriptions) == 0:
return []
else:
return [(pair.split(':')[0], pair.split(':')[1]) for pair in clean_transcriptions.split('\n')]
@app.route('/evolve', methods=['POST'])
def evolve():
words = request.form['words'].split()
try:
transcriptions = format_transcriptions(request.form['transcriptions'])
except IndexError:
return jsonify({'error': 'Error: Transcription seperator must be a colon'})
try:
generations = int(request.form['generations'])
except ValueError:
return jsonify({'error': 'Error: Generations must be an integer'})
words, rules = evolver.evolve(words, generations, transcriptions)
return jsonify({'rules': rules, 'words': words, 'error': 0})
| from flask import render_template, jsonify, request
from app import app
from app import evolver
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
def format_transcriptions(transcriptions):
'''Split the raw string of transcriptions into
the correct tuple rules.'''
return [(pair.split(':')[0], pair.split(':')[1]) for pair in transcriptions.split('\n')]
@app.route('/evolve', methods=['POST'])
def evolve():
words = request.form['words'].split()
try:
transcriptions = format_transcriptions(request.form['transcriptions'])
except IndexError:
return jsonify({'error': 'Error: Transcription seperator must be a colon'})
try:
generations = int(request.form['generations'])
except ValueError:
return jsonify({'error': 'Error: Generations must be an integer'})
words, rules = evolver.evolve(words, generations, transcriptions)
return jsonify({'rules': rules, 'words': words, 'error': 0})
Fix bug when transcriptions are emptyfrom flask import render_template, jsonify, request
from app import app
from app import evolver
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
def format_transcriptions(transcriptions):
'''Split the raw string of transcriptions into
the correct tuple rules.'''
clean_transcriptions = transcriptions.strip().lower()
if len(clean_transcriptions) == 0:
return []
else:
return [(pair.split(':')[0], pair.split(':')[1]) for pair in clean_transcriptions.split('\n')]
@app.route('/evolve', methods=['POST'])
def evolve():
words = request.form['words'].split()
try:
transcriptions = format_transcriptions(request.form['transcriptions'])
except IndexError:
return jsonify({'error': 'Error: Transcription seperator must be a colon'})
try:
generations = int(request.form['generations'])
except ValueError:
return jsonify({'error': 'Error: Generations must be an integer'})
words, rules = evolver.evolve(words, generations, transcriptions)
return jsonify({'rules': rules, 'words': words, 'error': 0})
| <commit_before>from flask import render_template, jsonify, request
from app import app
from app import evolver
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
def format_transcriptions(transcriptions):
'''Split the raw string of transcriptions into
the correct tuple rules.'''
return [(pair.split(':')[0], pair.split(':')[1]) for pair in transcriptions.split('\n')]
@app.route('/evolve', methods=['POST'])
def evolve():
words = request.form['words'].split()
try:
transcriptions = format_transcriptions(request.form['transcriptions'])
except IndexError:
return jsonify({'error': 'Error: Transcription seperator must be a colon'})
try:
generations = int(request.form['generations'])
except ValueError:
return jsonify({'error': 'Error: Generations must be an integer'})
words, rules = evolver.evolve(words, generations, transcriptions)
return jsonify({'rules': rules, 'words': words, 'error': 0})
<commit_msg>Fix bug when transcriptions are empty<commit_after>from flask import render_template, jsonify, request
from app import app
from app import evolver
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
def format_transcriptions(transcriptions):
'''Split the raw string of transcriptions into
the correct tuple rules.'''
clean_transcriptions = transcriptions.strip().lower()
if len(clean_transcriptions) == 0:
return []
else:
return [(pair.split(':')[0], pair.split(':')[1]) for pair in clean_transcriptions.split('\n')]
@app.route('/evolve', methods=['POST'])
def evolve():
words = request.form['words'].split()
try:
transcriptions = format_transcriptions(request.form['transcriptions'])
except IndexError:
return jsonify({'error': 'Error: Transcription seperator must be a colon'})
try:
generations = int(request.form['generations'])
except ValueError:
return jsonify({'error': 'Error: Generations must be an integer'})
words, rules = evolver.evolve(words, generations, transcriptions)
return jsonify({'rules': rules, 'words': words, 'error': 0})
|
87bdef439a3faf465bb8c23166beeb8a142400f7 | fapistrano/plugins/curl.py | fapistrano/plugins/curl.py | # -*- coding: utf-8 -*-
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
run('curl %(curl_url)s %(curl_options)s' % env)
| # -*- coding: utf-8 -*-
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
configuration.setdefault('curl_extract_tar', '')
configuration.setdefault('curl_postinstall_script', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
cmd = 'curl %(curl_url)s %(curl_options)s' % env
if env.curl_extract_tar:
cmd += ' | tar -x'
run(cmd)
if env.curl_postinstall_script:
run(env.curl_postinstall_script)
| Add extract_tar and post_install_script option. | Add extract_tar and post_install_script option.
| Python | mit | liwushuo/fapistrano | # -*- coding: utf-8 -*-
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
run('curl %(curl_url)s %(curl_options)s' % env)
Add extract_tar and post_install_script option. | # -*- coding: utf-8 -*-
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
configuration.setdefault('curl_extract_tar', '')
configuration.setdefault('curl_postinstall_script', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
cmd = 'curl %(curl_url)s %(curl_options)s' % env
if env.curl_extract_tar:
cmd += ' | tar -x'
run(cmd)
if env.curl_postinstall_script:
run(env.curl_postinstall_script)
| <commit_before># -*- coding: utf-8 -*-
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
run('curl %(curl_url)s %(curl_options)s' % env)
<commit_msg>Add extract_tar and post_install_script option.<commit_after> | # -*- coding: utf-8 -*-
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
configuration.setdefault('curl_extract_tar', '')
configuration.setdefault('curl_postinstall_script', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
cmd = 'curl %(curl_url)s %(curl_options)s' % env
if env.curl_extract_tar:
cmd += ' | tar -x'
run(cmd)
if env.curl_postinstall_script:
run(env.curl_postinstall_script)
| # -*- coding: utf-8 -*-
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
run('curl %(curl_url)s %(curl_options)s' % env)
Add extract_tar and post_install_script option.# -*- coding: utf-8 -*-
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
configuration.setdefault('curl_extract_tar', '')
configuration.setdefault('curl_postinstall_script', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
cmd = 'curl %(curl_url)s %(curl_options)s' % env
if env.curl_extract_tar:
cmd += ' | tar -x'
run(cmd)
if env.curl_postinstall_script:
run(env.curl_postinstall_script)
| <commit_before># -*- coding: utf-8 -*-
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
run('curl %(curl_url)s %(curl_options)s' % env)
<commit_msg>Add extract_tar and post_install_script option.<commit_after># -*- coding: utf-8 -*-
from fabric.api import cd, env, run
from .. import signal, configuration
def init():
configuration.setdefault('curl_url', '')
configuration.setdefault('curl_options', '')
configuration.setdefault('curl_extract_tar', '')
configuration.setdefault('curl_postinstall_script', '')
signal.register('deploy.updating', download_artifact)
def download_artifact(**kwargs):
with cd(env.release_path):
cmd = 'curl %(curl_url)s %(curl_options)s' % env
if env.curl_extract_tar:
cmd += ' | tar -x'
run(cmd)
if env.curl_postinstall_script:
run(env.curl_postinstall_script)
|
83e147ca35cbdc70a5b3e3e374a14a3ad4efdd17 | vumi_http_api/__init__.py | vumi_http_api/__init__.py | from .vumi_api import VumiApiWorker
__all__ = ['VumiApiWorker']
| from .vumi_api import VumiApiWorker
__version__ = "0.0.1a"
__all__ = ['VumiApiWorker']
| Add __version__ to vumi_http_api package. | Add __version__ to vumi_http_api package.
| Python | bsd-3-clause | praekelt/vumi-http-api,praekelt/vumi-http-api | from .vumi_api import VumiApiWorker
__all__ = ['VumiApiWorker']
Add __version__ to vumi_http_api package. | from .vumi_api import VumiApiWorker
__version__ = "0.0.1a"
__all__ = ['VumiApiWorker']
| <commit_before>from .vumi_api import VumiApiWorker
__all__ = ['VumiApiWorker']
<commit_msg>Add __version__ to vumi_http_api package.<commit_after> | from .vumi_api import VumiApiWorker
__version__ = "0.0.1a"
__all__ = ['VumiApiWorker']
| from .vumi_api import VumiApiWorker
__all__ = ['VumiApiWorker']
Add __version__ to vumi_http_api package.from .vumi_api import VumiApiWorker
__version__ = "0.0.1a"
__all__ = ['VumiApiWorker']
| <commit_before>from .vumi_api import VumiApiWorker
__all__ = ['VumiApiWorker']
<commit_msg>Add __version__ to vumi_http_api package.<commit_after>from .vumi_api import VumiApiWorker
__version__ = "0.0.1a"
__all__ = ['VumiApiWorker']
|
bd3cb20453d044882fc476e55e2aade8c5c81ea7 | 2/ConfNEP.py | 2/ConfNEP.py | """A custom ILAMB confrontation for net ecosystem productivity (nep)."""
import os
import numpy as np
from ILAMB.Confrontation import Confrontation
from ILAMB.Variable import Variable
from ILAMB.ilamblib import MakeComparable
class ConfNEP(Confrontation):
"""Confront ``nep`` model outputs with ``nee`` observations.
Net ecosystem productivity (``nep``) is a CMIP5 standard output
provided by the MsTMIP models, and is the inverse of net ecosystem
exchange (``nee``), for which benchmark datasets are provided in
ILAMB.
"""
def __init__(self, **keywords):
super(ConfNEP, self).__init__(**keywords)
def stageData(self, m):
obs = Variable(filename=self.source,
variable_name=self.variable)
self._checkRegions(obs)
obs.data *= -1.0 # Reverse sign of benchmark data.
mod = m.extractTimeSeries(self.variable,
alt_vars=self.alternate_vars)
mod.data *= -1.0 # Reverse sign of modified model outputs.
obs, mod = MakeComparable(obs, mod, clip_ref=True,
logstring="[%s][%s]" %
(self.longname, m.name))
return obs, mod
| """A custom ILAMB confrontation for net ecosystem productivity (nep)."""
import os
import numpy as np
from ILAMB.Confrontation import Confrontation
from ILAMB.Variable import Variable
from ILAMB.ilamblib import MakeComparable
class ConfNEP(Confrontation):
"""Confront ``nep`` model outputs with ``nee`` observations.
Net ecosystem productivity (``nep``) is a CMIP5 standard output
provided by the MsTMIP models, and is the inverse of net ecosystem
exchange (``nee``), for which benchmark datasets are provided in
ILAMB.
"""
def __init__(self, **keywords):
super(ConfNEP, self).__init__(**keywords)
def stageData(self, m):
obs = Variable(filename=self.source,
variable_name=self.variable)
obs.data *= -1.0 # Reverse sign of benchmark data.
mod = m.extractTimeSeries(self.variable,
alt_vars=self.alternate_vars)
mod.data *= -1.0 # Reverse sign of modified model outputs.
obs, mod = MakeComparable(obs, mod, clip_ref=True,
logstring="[%s][%s]" %
(self.longname, m.name))
return obs, mod
| Remove call to _checkRegions method | Remove call to _checkRegions method
At one point it was a method of the super (I believe), but it's
no longer there.
| Python | mit | permamodel/ILAMB-experiments | """A custom ILAMB confrontation for net ecosystem productivity (nep)."""
import os
import numpy as np
from ILAMB.Confrontation import Confrontation
from ILAMB.Variable import Variable
from ILAMB.ilamblib import MakeComparable
class ConfNEP(Confrontation):
"""Confront ``nep`` model outputs with ``nee`` observations.
Net ecosystem productivity (``nep``) is a CMIP5 standard output
provided by the MsTMIP models, and is the inverse of net ecosystem
exchange (``nee``), for which benchmark datasets are provided in
ILAMB.
"""
def __init__(self, **keywords):
super(ConfNEP, self).__init__(**keywords)
def stageData(self, m):
obs = Variable(filename=self.source,
variable_name=self.variable)
self._checkRegions(obs)
obs.data *= -1.0 # Reverse sign of benchmark data.
mod = m.extractTimeSeries(self.variable,
alt_vars=self.alternate_vars)
mod.data *= -1.0 # Reverse sign of modified model outputs.
obs, mod = MakeComparable(obs, mod, clip_ref=True,
logstring="[%s][%s]" %
(self.longname, m.name))
return obs, mod
Remove call to _checkRegions method
At one point it was a method of the super (I believe), but it's
no longer there. | """A custom ILAMB confrontation for net ecosystem productivity (nep)."""
import os
import numpy as np
from ILAMB.Confrontation import Confrontation
from ILAMB.Variable import Variable
from ILAMB.ilamblib import MakeComparable
class ConfNEP(Confrontation):
"""Confront ``nep`` model outputs with ``nee`` observations.
Net ecosystem productivity (``nep``) is a CMIP5 standard output
provided by the MsTMIP models, and is the inverse of net ecosystem
exchange (``nee``), for which benchmark datasets are provided in
ILAMB.
"""
def __init__(self, **keywords):
super(ConfNEP, self).__init__(**keywords)
def stageData(self, m):
obs = Variable(filename=self.source,
variable_name=self.variable)
obs.data *= -1.0 # Reverse sign of benchmark data.
mod = m.extractTimeSeries(self.variable,
alt_vars=self.alternate_vars)
mod.data *= -1.0 # Reverse sign of modified model outputs.
obs, mod = MakeComparable(obs, mod, clip_ref=True,
logstring="[%s][%s]" %
(self.longname, m.name))
return obs, mod
| <commit_before>"""A custom ILAMB confrontation for net ecosystem productivity (nep)."""
import os
import numpy as np
from ILAMB.Confrontation import Confrontation
from ILAMB.Variable import Variable
from ILAMB.ilamblib import MakeComparable
class ConfNEP(Confrontation):
"""Confront ``nep`` model outputs with ``nee`` observations.
Net ecosystem productivity (``nep``) is a CMIP5 standard output
provided by the MsTMIP models, and is the inverse of net ecosystem
exchange (``nee``), for which benchmark datasets are provided in
ILAMB.
"""
def __init__(self, **keywords):
super(ConfNEP, self).__init__(**keywords)
def stageData(self, m):
obs = Variable(filename=self.source,
variable_name=self.variable)
self._checkRegions(obs)
obs.data *= -1.0 # Reverse sign of benchmark data.
mod = m.extractTimeSeries(self.variable,
alt_vars=self.alternate_vars)
mod.data *= -1.0 # Reverse sign of modified model outputs.
obs, mod = MakeComparable(obs, mod, clip_ref=True,
logstring="[%s][%s]" %
(self.longname, m.name))
return obs, mod
<commit_msg>Remove call to _checkRegions method
At one point it was a method of the super (I believe), but it's
no longer there.<commit_after> | """A custom ILAMB confrontation for net ecosystem productivity (nep)."""
import os
import numpy as np
from ILAMB.Confrontation import Confrontation
from ILAMB.Variable import Variable
from ILAMB.ilamblib import MakeComparable
class ConfNEP(Confrontation):
"""Confront ``nep`` model outputs with ``nee`` observations.
Net ecosystem productivity (``nep``) is a CMIP5 standard output
provided by the MsTMIP models, and is the inverse of net ecosystem
exchange (``nee``), for which benchmark datasets are provided in
ILAMB.
"""
def __init__(self, **keywords):
super(ConfNEP, self).__init__(**keywords)
def stageData(self, m):
obs = Variable(filename=self.source,
variable_name=self.variable)
obs.data *= -1.0 # Reverse sign of benchmark data.
mod = m.extractTimeSeries(self.variable,
alt_vars=self.alternate_vars)
mod.data *= -1.0 # Reverse sign of modified model outputs.
obs, mod = MakeComparable(obs, mod, clip_ref=True,
logstring="[%s][%s]" %
(self.longname, m.name))
return obs, mod
| """A custom ILAMB confrontation for net ecosystem productivity (nep)."""
import os
import numpy as np
from ILAMB.Confrontation import Confrontation
from ILAMB.Variable import Variable
from ILAMB.ilamblib import MakeComparable
class ConfNEP(Confrontation):
"""Confront ``nep`` model outputs with ``nee`` observations.
Net ecosystem productivity (``nep``) is a CMIP5 standard output
provided by the MsTMIP models, and is the inverse of net ecosystem
exchange (``nee``), for which benchmark datasets are provided in
ILAMB.
"""
def __init__(self, **keywords):
super(ConfNEP, self).__init__(**keywords)
def stageData(self, m):
obs = Variable(filename=self.source,
variable_name=self.variable)
self._checkRegions(obs)
obs.data *= -1.0 # Reverse sign of benchmark data.
mod = m.extractTimeSeries(self.variable,
alt_vars=self.alternate_vars)
mod.data *= -1.0 # Reverse sign of modified model outputs.
obs, mod = MakeComparable(obs, mod, clip_ref=True,
logstring="[%s][%s]" %
(self.longname, m.name))
return obs, mod
Remove call to _checkRegions method
At one point it was a method of the super (I believe), but it's
no longer there."""A custom ILAMB confrontation for net ecosystem productivity (nep)."""
import os
import numpy as np
from ILAMB.Confrontation import Confrontation
from ILAMB.Variable import Variable
from ILAMB.ilamblib import MakeComparable
class ConfNEP(Confrontation):
"""Confront ``nep`` model outputs with ``nee`` observations.
Net ecosystem productivity (``nep``) is a CMIP5 standard output
provided by the MsTMIP models, and is the inverse of net ecosystem
exchange (``nee``), for which benchmark datasets are provided in
ILAMB.
"""
def __init__(self, **keywords):
super(ConfNEP, self).__init__(**keywords)
def stageData(self, m):
obs = Variable(filename=self.source,
variable_name=self.variable)
obs.data *= -1.0 # Reverse sign of benchmark data.
mod = m.extractTimeSeries(self.variable,
alt_vars=self.alternate_vars)
mod.data *= -1.0 # Reverse sign of modified model outputs.
obs, mod = MakeComparable(obs, mod, clip_ref=True,
logstring="[%s][%s]" %
(self.longname, m.name))
return obs, mod
| <commit_before>"""A custom ILAMB confrontation for net ecosystem productivity (nep)."""
import os
import numpy as np
from ILAMB.Confrontation import Confrontation
from ILAMB.Variable import Variable
from ILAMB.ilamblib import MakeComparable
class ConfNEP(Confrontation):
"""Confront ``nep`` model outputs with ``nee`` observations.
Net ecosystem productivity (``nep``) is a CMIP5 standard output
provided by the MsTMIP models, and is the inverse of net ecosystem
exchange (``nee``), for which benchmark datasets are provided in
ILAMB.
"""
def __init__(self, **keywords):
super(ConfNEP, self).__init__(**keywords)
def stageData(self, m):
obs = Variable(filename=self.source,
variable_name=self.variable)
self._checkRegions(obs)
obs.data *= -1.0 # Reverse sign of benchmark data.
mod = m.extractTimeSeries(self.variable,
alt_vars=self.alternate_vars)
mod.data *= -1.0 # Reverse sign of modified model outputs.
obs, mod = MakeComparable(obs, mod, clip_ref=True,
logstring="[%s][%s]" %
(self.longname, m.name))
return obs, mod
<commit_msg>Remove call to _checkRegions method
At one point it was a method of the super (I believe), but it's
no longer there.<commit_after>"""A custom ILAMB confrontation for net ecosystem productivity (nep)."""
import os
import numpy as np
from ILAMB.Confrontation import Confrontation
from ILAMB.Variable import Variable
from ILAMB.ilamblib import MakeComparable
class ConfNEP(Confrontation):
"""Confront ``nep`` model outputs with ``nee`` observations.
Net ecosystem productivity (``nep``) is a CMIP5 standard output
provided by the MsTMIP models, and is the inverse of net ecosystem
exchange (``nee``), for which benchmark datasets are provided in
ILAMB.
"""
def __init__(self, **keywords):
super(ConfNEP, self).__init__(**keywords)
def stageData(self, m):
obs = Variable(filename=self.source,
variable_name=self.variable)
obs.data *= -1.0 # Reverse sign of benchmark data.
mod = m.extractTimeSeries(self.variable,
alt_vars=self.alternate_vars)
mod.data *= -1.0 # Reverse sign of modified model outputs.
obs, mod = MakeComparable(obs, mod, clip_ref=True,
logstring="[%s][%s]" %
(self.longname, m.name))
return obs, mod
|
528759e6ba579de185616190e3e514938989a54e | tests/console/asciimatics/widgets/testcheckbox.py | tests/console/asciimatics/widgets/testcheckbox.py |
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
|
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
self.assert_equal(value, checkbox.value)
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
| Check if checkbox value has updated. | Check if checkbox value has updated.
| Python | apache-2.0 | LowieHuyghe/script-core |
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
Check if checkbox value has updated. |
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
self.assert_equal(value, checkbox.value)
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
| <commit_before>
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
<commit_msg>Check if checkbox value has updated.<commit_after> |
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
self.assert_equal(value, checkbox.value)
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
|
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
Check if checkbox value has updated.
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
self.assert_equal(value, checkbox.value)
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
| <commit_before>
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
<commit_msg>Check if checkbox value has updated.<commit_after>
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
self.assert_equal(value, checkbox.value)
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
|
d6b4024d502e189e67d9027a50e472b7c295a83f | misc/migrate_miro_vhs.py | misc/migrate_miro_vhs.py | #!/usr/bin/env python
# -*- encoding: utf-8
import boto3
def get_existing_records(dynamodb_client):
"""
Generates existing Miro records from the SourceData table.
"""
paginator = dynamodb_client.get_paginator('scan')
for page in paginator.paginate(TableName='SourceData'):
for item in page['Items']:
yield item
if __name__ == '__main__':
dynamodb_client = boto3.client('dynamodb')
for item in get_existing_records(dynamodb_client):
print(item)
break
| #!/usr/bin/env python
# -*- encoding: utf-8
import boto3
OLD_TABLE = 'SourceData'
OLD_BUCKET = 'wellcomecollection-vhs-sourcedata'
NEW_TABLE = 'wellcomecollection-vhs-sourcedata-miro'
NEW_BUCKET = 'wellcomecollection-vhs-sourcedata-miro'
def get_existing_records(dynamodb_client):
"""
Generates existing Miro records from the SourceData table.
"""
paginator = dynamodb_client.get_paginator('scan')
for page in paginator.paginate(TableName=OLD_TABLE):
for item in page['Items']:
if 'reindexShard' not in item:
print(item)
if item['sourceName'] != {'S': 'miro'}:
continue
yield item
if __name__ == '__main__':
dynamodb_client = boto3.client('dynamodb')
s3_client = boto3.client('s3')
for item in get_existing_records(dynamodb_client):
del item['sourceName']
s3_client.copy_object(
Bucket=NEW_BUCKET,
Key=item['s3key']['S'].replace('miro/', ''),
CopySource={
'Bucket': OLD_BUCKET,
'Key': item['s3key']['S']
}
)
print(item)
break
| Copy the S3 object into the new bucket | Copy the S3 object into the new bucket
| Python | mit | wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api | #!/usr/bin/env python
# -*- encoding: utf-8
import boto3
def get_existing_records(dynamodb_client):
"""
Generates existing Miro records from the SourceData table.
"""
paginator = dynamodb_client.get_paginator('scan')
for page in paginator.paginate(TableName='SourceData'):
for item in page['Items']:
yield item
if __name__ == '__main__':
dynamodb_client = boto3.client('dynamodb')
for item in get_existing_records(dynamodb_client):
print(item)
break
Copy the S3 object into the new bucket | #!/usr/bin/env python
# -*- encoding: utf-8
import boto3
OLD_TABLE = 'SourceData'
OLD_BUCKET = 'wellcomecollection-vhs-sourcedata'
NEW_TABLE = 'wellcomecollection-vhs-sourcedata-miro'
NEW_BUCKET = 'wellcomecollection-vhs-sourcedata-miro'
def get_existing_records(dynamodb_client):
"""
Generates existing Miro records from the SourceData table.
"""
paginator = dynamodb_client.get_paginator('scan')
for page in paginator.paginate(TableName=OLD_TABLE):
for item in page['Items']:
if 'reindexShard' not in item:
print(item)
if item['sourceName'] != {'S': 'miro'}:
continue
yield item
if __name__ == '__main__':
dynamodb_client = boto3.client('dynamodb')
s3_client = boto3.client('s3')
for item in get_existing_records(dynamodb_client):
del item['sourceName']
s3_client.copy_object(
Bucket=NEW_BUCKET,
Key=item['s3key']['S'].replace('miro/', ''),
CopySource={
'Bucket': OLD_BUCKET,
'Key': item['s3key']['S']
}
)
print(item)
break
| <commit_before>#!/usr/bin/env python
# -*- encoding: utf-8
import boto3
def get_existing_records(dynamodb_client):
"""
Generates existing Miro records from the SourceData table.
"""
paginator = dynamodb_client.get_paginator('scan')
for page in paginator.paginate(TableName='SourceData'):
for item in page['Items']:
yield item
if __name__ == '__main__':
dynamodb_client = boto3.client('dynamodb')
for item in get_existing_records(dynamodb_client):
print(item)
break
<commit_msg>Copy the S3 object into the new bucket<commit_after> | #!/usr/bin/env python
# -*- encoding: utf-8
import boto3
OLD_TABLE = 'SourceData'
OLD_BUCKET = 'wellcomecollection-vhs-sourcedata'
NEW_TABLE = 'wellcomecollection-vhs-sourcedata-miro'
NEW_BUCKET = 'wellcomecollection-vhs-sourcedata-miro'
def get_existing_records(dynamodb_client):
"""
Generates existing Miro records from the SourceData table.
"""
paginator = dynamodb_client.get_paginator('scan')
for page in paginator.paginate(TableName=OLD_TABLE):
for item in page['Items']:
if 'reindexShard' not in item:
print(item)
if item['sourceName'] != {'S': 'miro'}:
continue
yield item
if __name__ == '__main__':
dynamodb_client = boto3.client('dynamodb')
s3_client = boto3.client('s3')
for item in get_existing_records(dynamodb_client):
del item['sourceName']
s3_client.copy_object(
Bucket=NEW_BUCKET,
Key=item['s3key']['S'].replace('miro/', ''),
CopySource={
'Bucket': OLD_BUCKET,
'Key': item['s3key']['S']
}
)
print(item)
break
| #!/usr/bin/env python
# -*- encoding: utf-8
import boto3
def get_existing_records(dynamodb_client):
"""
Generates existing Miro records from the SourceData table.
"""
paginator = dynamodb_client.get_paginator('scan')
for page in paginator.paginate(TableName='SourceData'):
for item in page['Items']:
yield item
if __name__ == '__main__':
dynamodb_client = boto3.client('dynamodb')
for item in get_existing_records(dynamodb_client):
print(item)
break
Copy the S3 object into the new bucket#!/usr/bin/env python
# -*- encoding: utf-8
import boto3
OLD_TABLE = 'SourceData'
OLD_BUCKET = 'wellcomecollection-vhs-sourcedata'
NEW_TABLE = 'wellcomecollection-vhs-sourcedata-miro'
NEW_BUCKET = 'wellcomecollection-vhs-sourcedata-miro'
def get_existing_records(dynamodb_client):
"""
Generates existing Miro records from the SourceData table.
"""
paginator = dynamodb_client.get_paginator('scan')
for page in paginator.paginate(TableName=OLD_TABLE):
for item in page['Items']:
if 'reindexShard' not in item:
print(item)
if item['sourceName'] != {'S': 'miro'}:
continue
yield item
if __name__ == '__main__':
dynamodb_client = boto3.client('dynamodb')
s3_client = boto3.client('s3')
for item in get_existing_records(dynamodb_client):
del item['sourceName']
s3_client.copy_object(
Bucket=NEW_BUCKET,
Key=item['s3key']['S'].replace('miro/', ''),
CopySource={
'Bucket': OLD_BUCKET,
'Key': item['s3key']['S']
}
)
print(item)
break
| <commit_before>#!/usr/bin/env python
# -*- encoding: utf-8
import boto3
def get_existing_records(dynamodb_client):
"""
Generates existing Miro records from the SourceData table.
"""
paginator = dynamodb_client.get_paginator('scan')
for page in paginator.paginate(TableName='SourceData'):
for item in page['Items']:
yield item
if __name__ == '__main__':
dynamodb_client = boto3.client('dynamodb')
for item in get_existing_records(dynamodb_client):
print(item)
break
<commit_msg>Copy the S3 object into the new bucket<commit_after>#!/usr/bin/env python
# -*- encoding: utf-8
import boto3
OLD_TABLE = 'SourceData'
OLD_BUCKET = 'wellcomecollection-vhs-sourcedata'
NEW_TABLE = 'wellcomecollection-vhs-sourcedata-miro'
NEW_BUCKET = 'wellcomecollection-vhs-sourcedata-miro'
def get_existing_records(dynamodb_client):
"""
Generates existing Miro records from the SourceData table.
"""
paginator = dynamodb_client.get_paginator('scan')
for page in paginator.paginate(TableName=OLD_TABLE):
for item in page['Items']:
if 'reindexShard' not in item:
print(item)
if item['sourceName'] != {'S': 'miro'}:
continue
yield item
if __name__ == '__main__':
dynamodb_client = boto3.client('dynamodb')
s3_client = boto3.client('s3')
for item in get_existing_records(dynamodb_client):
del item['sourceName']
s3_client.copy_object(
Bucket=NEW_BUCKET,
Key=item['s3key']['S'].replace('miro/', ''),
CopySource={
'Bucket': OLD_BUCKET,
'Key': item['s3key']['S']
}
)
print(item)
break
|
89ccd37938fc4ca98b0683ecd8c93e48eef3bf35 | forms.py | forms.py | from django import forms
class RemoteSubscribeForm(forms.Form):
username = forms.CharField(max_length=64, label="Username")
profile_url = forms.URLField(label="OMB Compatable Profile URL")
AUTHORIZE_CHOICES = (
('on', 'Accept'),
('off', 'Reject')
)
class AuthorizeForm(forms.Form):
token = forms.CharField(widget=forms.HiddenInput)
authorize_access = forms.ChoiceField(choices=AUTHORIZE_CHOICES,widget=forms.RadioSelect, required=False)
| from django import forms
class RemoteSubscribeForm(forms.Form):
username = forms.CharField(max_length=64, label="Username")
profile_url = forms.URLField(label="OMB Compatable Profile URL")
AUTHORIZE_CHOICES = (
(1, 'Accept'),
(0, 'Reject')
)
class AuthorizeForm(forms.Form):
token = forms.CharField(widget=forms.HiddenInput)
authorize_access = forms.ChoiceField(choices=AUTHORIZE_CHOICES,widget=forms.RadioSelect, required=False)
| Use integer values for the remote subscribe form to comply with the django oauth_provider. | Use integer values for the remote subscribe form to comply with the django oauth_provider.
| Python | mit | skabber/django-omb | from django import forms
class RemoteSubscribeForm(forms.Form):
username = forms.CharField(max_length=64, label="Username")
profile_url = forms.URLField(label="OMB Compatable Profile URL")
AUTHORIZE_CHOICES = (
('on', 'Accept'),
('off', 'Reject')
)
class AuthorizeForm(forms.Form):
token = forms.CharField(widget=forms.HiddenInput)
authorize_access = forms.ChoiceField(choices=AUTHORIZE_CHOICES,widget=forms.RadioSelect, required=False)
Use integer values for the remote subscribe form to comply with the django oauth_provider. | from django import forms
class RemoteSubscribeForm(forms.Form):
username = forms.CharField(max_length=64, label="Username")
profile_url = forms.URLField(label="OMB Compatable Profile URL")
AUTHORIZE_CHOICES = (
(1, 'Accept'),
(0, 'Reject')
)
class AuthorizeForm(forms.Form):
token = forms.CharField(widget=forms.HiddenInput)
authorize_access = forms.ChoiceField(choices=AUTHORIZE_CHOICES,widget=forms.RadioSelect, required=False)
| <commit_before>from django import forms
class RemoteSubscribeForm(forms.Form):
username = forms.CharField(max_length=64, label="Username")
profile_url = forms.URLField(label="OMB Compatable Profile URL")
AUTHORIZE_CHOICES = (
('on', 'Accept'),
('off', 'Reject')
)
class AuthorizeForm(forms.Form):
token = forms.CharField(widget=forms.HiddenInput)
authorize_access = forms.ChoiceField(choices=AUTHORIZE_CHOICES,widget=forms.RadioSelect, required=False)
<commit_msg>Use integer values for the remote subscribe form to comply with the django oauth_provider.<commit_after> | from django import forms
class RemoteSubscribeForm(forms.Form):
username = forms.CharField(max_length=64, label="Username")
profile_url = forms.URLField(label="OMB Compatable Profile URL")
AUTHORIZE_CHOICES = (
(1, 'Accept'),
(0, 'Reject')
)
class AuthorizeForm(forms.Form):
token = forms.CharField(widget=forms.HiddenInput)
authorize_access = forms.ChoiceField(choices=AUTHORIZE_CHOICES,widget=forms.RadioSelect, required=False)
| from django import forms
class RemoteSubscribeForm(forms.Form):
username = forms.CharField(max_length=64, label="Username")
profile_url = forms.URLField(label="OMB Compatable Profile URL")
AUTHORIZE_CHOICES = (
('on', 'Accept'),
('off', 'Reject')
)
class AuthorizeForm(forms.Form):
token = forms.CharField(widget=forms.HiddenInput)
authorize_access = forms.ChoiceField(choices=AUTHORIZE_CHOICES,widget=forms.RadioSelect, required=False)
Use integer values for the remote subscribe form to comply with the django oauth_provider.from django import forms
class RemoteSubscribeForm(forms.Form):
username = forms.CharField(max_length=64, label="Username")
profile_url = forms.URLField(label="OMB Compatable Profile URL")
AUTHORIZE_CHOICES = (
(1, 'Accept'),
(0, 'Reject')
)
class AuthorizeForm(forms.Form):
token = forms.CharField(widget=forms.HiddenInput)
authorize_access = forms.ChoiceField(choices=AUTHORIZE_CHOICES,widget=forms.RadioSelect, required=False)
| <commit_before>from django import forms
class RemoteSubscribeForm(forms.Form):
username = forms.CharField(max_length=64, label="Username")
profile_url = forms.URLField(label="OMB Compatable Profile URL")
AUTHORIZE_CHOICES = (
('on', 'Accept'),
('off', 'Reject')
)
class AuthorizeForm(forms.Form):
token = forms.CharField(widget=forms.HiddenInput)
authorize_access = forms.ChoiceField(choices=AUTHORIZE_CHOICES,widget=forms.RadioSelect, required=False)
<commit_msg>Use integer values for the remote subscribe form to comply with the django oauth_provider.<commit_after>from django import forms
class RemoteSubscribeForm(forms.Form):
username = forms.CharField(max_length=64, label="Username")
profile_url = forms.URLField(label="OMB Compatable Profile URL")
AUTHORIZE_CHOICES = (
(1, 'Accept'),
(0, 'Reject')
)
class AuthorizeForm(forms.Form):
token = forms.CharField(widget=forms.HiddenInput)
authorize_access = forms.ChoiceField(choices=AUTHORIZE_CHOICES,widget=forms.RadioSelect, required=False)
|
df0d950747d80024f962216f17f2d3f967e4363b | source/hostel_huptainer/environment.py | source/hostel_huptainer/environment.py | """Inspects dictionary for desired keys and stores for later usage."""
from hostel_huptainer.errors import InputError
class Environment(object):
"""Searches ``environment`` for expected variables and stores them."""
def __init__(self, environment):
self.hostname = environment.get('CERTBOT_HOSTNAME')
if not self.hostname:
raise InputError('CERTBOT_HOSTNAME environment variable is missing.')
| """Inspects dictionary for desired keys and stores for later usage."""
from hostel_huptainer.errors import InputError
class Environment(object):
"""Searches ``environment`` for expected variables and stores them."""
def __init__(self, environment):
self.hostname = environment.get('CERTBOT_HOSTNAME')
if not self.hostname:
raise InputError(
'CERTBOT_HOSTNAME environment variable is missing.')
| Decrease line length for pycodestyle compliance | Decrease line length for pycodestyle compliance
A line exceeded 75 characters in length.
| Python | apache-2.0 | Jitsusama/hostel-huptainer | """Inspects dictionary for desired keys and stores for later usage."""
from hostel_huptainer.errors import InputError
class Environment(object):
"""Searches ``environment`` for expected variables and stores them."""
def __init__(self, environment):
self.hostname = environment.get('CERTBOT_HOSTNAME')
if not self.hostname:
raise InputError('CERTBOT_HOSTNAME environment variable is missing.')
Decrease line length for pycodestyle compliance
A line exceeded 75 characters in length. | """Inspects dictionary for desired keys and stores for later usage."""
from hostel_huptainer.errors import InputError
class Environment(object):
"""Searches ``environment`` for expected variables and stores them."""
def __init__(self, environment):
self.hostname = environment.get('CERTBOT_HOSTNAME')
if not self.hostname:
raise InputError(
'CERTBOT_HOSTNAME environment variable is missing.')
| <commit_before>"""Inspects dictionary for desired keys and stores for later usage."""
from hostel_huptainer.errors import InputError
class Environment(object):
"""Searches ``environment`` for expected variables and stores them."""
def __init__(self, environment):
self.hostname = environment.get('CERTBOT_HOSTNAME')
if not self.hostname:
raise InputError('CERTBOT_HOSTNAME environment variable is missing.')
<commit_msg>Decrease line length for pycodestyle compliance
A line exceeded 75 characters in length.<commit_after> | """Inspects dictionary for desired keys and stores for later usage."""
from hostel_huptainer.errors import InputError
class Environment(object):
"""Searches ``environment`` for expected variables and stores them."""
def __init__(self, environment):
self.hostname = environment.get('CERTBOT_HOSTNAME')
if not self.hostname:
raise InputError(
'CERTBOT_HOSTNAME environment variable is missing.')
| """Inspects dictionary for desired keys and stores for later usage."""
from hostel_huptainer.errors import InputError
class Environment(object):
"""Searches ``environment`` for expected variables and stores them."""
def __init__(self, environment):
self.hostname = environment.get('CERTBOT_HOSTNAME')
if not self.hostname:
raise InputError('CERTBOT_HOSTNAME environment variable is missing.')
Decrease line length for pycodestyle compliance
A line exceeded 75 characters in length."""Inspects dictionary for desired keys and stores for later usage."""
from hostel_huptainer.errors import InputError
class Environment(object):
"""Searches ``environment`` for expected variables and stores them."""
def __init__(self, environment):
self.hostname = environment.get('CERTBOT_HOSTNAME')
if not self.hostname:
raise InputError(
'CERTBOT_HOSTNAME environment variable is missing.')
| <commit_before>"""Inspects dictionary for desired keys and stores for later usage."""
from hostel_huptainer.errors import InputError
class Environment(object):
"""Searches ``environment`` for expected variables and stores them."""
def __init__(self, environment):
self.hostname = environment.get('CERTBOT_HOSTNAME')
if not self.hostname:
raise InputError('CERTBOT_HOSTNAME environment variable is missing.')
<commit_msg>Decrease line length for pycodestyle compliance
A line exceeded 75 characters in length.<commit_after>"""Inspects dictionary for desired keys and stores for later usage."""
from hostel_huptainer.errors import InputError
class Environment(object):
"""Searches ``environment`` for expected variables and stores them."""
def __init__(self, environment):
self.hostname = environment.get('CERTBOT_HOSTNAME')
if not self.hostname:
raise InputError(
'CERTBOT_HOSTNAME environment variable is missing.')
|
7a7de7b7a44180f4ea3b6d5b3334ce406eb72b38 | discussion/migrations/0002_discussionthread_updated.py | discussion/migrations/0002_discussionthread_updated.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
field=models.DateTimeField(default=datetime.datetime(2015, 7, 31, 15, 51, 36, 361733, tzinfo=utc), auto_now=True),
preserve_default=False,
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.utils import timezone
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
field=models.DateTimeField(default=timezone.now(), auto_now=True),
preserve_default=False,
),
]
| Fix because we're not timezone aware. | Fix because we're not timezone aware.
| Python | mit | btomaszewski/webdoctor-server | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
field=models.DateTimeField(default=datetime.datetime(2015, 7, 31, 15, 51, 36, 361733, tzinfo=utc), auto_now=True),
preserve_default=False,
),
]
Fix because we're not timezone aware. | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.utils import timezone
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
field=models.DateTimeField(default=timezone.now(), auto_now=True),
preserve_default=False,
),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
field=models.DateTimeField(default=datetime.datetime(2015, 7, 31, 15, 51, 36, 361733, tzinfo=utc), auto_now=True),
preserve_default=False,
),
]
<commit_msg>Fix because we're not timezone aware.<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.utils import timezone
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
field=models.DateTimeField(default=timezone.now(), auto_now=True),
preserve_default=False,
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
field=models.DateTimeField(default=datetime.datetime(2015, 7, 31, 15, 51, 36, 361733, tzinfo=utc), auto_now=True),
preserve_default=False,
),
]
Fix because we're not timezone aware.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.utils import timezone
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
field=models.DateTimeField(default=timezone.now(), auto_now=True),
preserve_default=False,
),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
field=models.DateTimeField(default=datetime.datetime(2015, 7, 31, 15, 51, 36, 361733, tzinfo=utc), auto_now=True),
preserve_default=False,
),
]
<commit_msg>Fix because we're not timezone aware.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.utils import timezone
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
field=models.DateTimeField(default=timezone.now(), auto_now=True),
preserve_default=False,
),
]
|
bab10c6a1e9c8548fe778817595aa18baa5e3cdb | account_fiscal_position_no_source_tax/account.py | account_fiscal_position_no_source_tax/account.py | from openerp import models, api, fields
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
@api.v7
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
result = super(account_fiscal_position, self).map_tax(
cr, uid, fposition_id, taxes, context=context)
taxes_without_src_ids = [
x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
result = set(result) | set(taxes_without_src_ids)
return list(result)
@api.v8 # noqa
def map_tax(self, taxes):
result = super(account_fiscal_position, self).map_tax(taxes)
taxes_without_src_ids = [
x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id]
result += result.browse(taxes_without_src_ids)
return result
class account_fiscal_position_tax(models.Model):
_inherit = 'account.fiscal.position.tax'
tax_src_id = fields.Many2one(required=False)
| from openerp import models, api, fields
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
@api.v7
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
result = super(account_fiscal_position, self).map_tax(
cr, uid, fposition_id, taxes, context=context)
print 'fposition_id', fposition_id
if fposition_id:
taxes_without_src_ids = [
x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
result = set(result) | set(taxes_without_src_ids)
return list(result)
@api.v8 # noqa
def map_tax(self, taxes):
result = super(account_fiscal_position, self).map_tax(taxes)
taxes_without_src_ids = [
x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id]
result += result.browse(taxes_without_src_ids)
return result
class account_fiscal_position_tax(models.Model):
_inherit = 'account.fiscal.position.tax'
tax_src_id = fields.Many2one(required=False)
| FIX fiscal position no source tax | FIX fiscal position no source tax
| Python | agpl-3.0 | csrocha/account_journal_payment_subtype,csrocha/account_voucher_payline | from openerp import models, api, fields
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
@api.v7
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
result = super(account_fiscal_position, self).map_tax(
cr, uid, fposition_id, taxes, context=context)
taxes_without_src_ids = [
x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
result = set(result) | set(taxes_without_src_ids)
return list(result)
@api.v8 # noqa
def map_tax(self, taxes):
result = super(account_fiscal_position, self).map_tax(taxes)
taxes_without_src_ids = [
x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id]
result += result.browse(taxes_without_src_ids)
return result
class account_fiscal_position_tax(models.Model):
_inherit = 'account.fiscal.position.tax'
tax_src_id = fields.Many2one(required=False)
FIX fiscal position no source tax | from openerp import models, api, fields
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
@api.v7
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
result = super(account_fiscal_position, self).map_tax(
cr, uid, fposition_id, taxes, context=context)
print 'fposition_id', fposition_id
if fposition_id:
taxes_without_src_ids = [
x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
result = set(result) | set(taxes_without_src_ids)
return list(result)
@api.v8 # noqa
def map_tax(self, taxes):
result = super(account_fiscal_position, self).map_tax(taxes)
taxes_without_src_ids = [
x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id]
result += result.browse(taxes_without_src_ids)
return result
class account_fiscal_position_tax(models.Model):
_inherit = 'account.fiscal.position.tax'
tax_src_id = fields.Many2one(required=False)
| <commit_before>from openerp import models, api, fields
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
@api.v7
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
result = super(account_fiscal_position, self).map_tax(
cr, uid, fposition_id, taxes, context=context)
taxes_without_src_ids = [
x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
result = set(result) | set(taxes_without_src_ids)
return list(result)
@api.v8 # noqa
def map_tax(self, taxes):
result = super(account_fiscal_position, self).map_tax(taxes)
taxes_without_src_ids = [
x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id]
result += result.browse(taxes_without_src_ids)
return result
class account_fiscal_position_tax(models.Model):
_inherit = 'account.fiscal.position.tax'
tax_src_id = fields.Many2one(required=False)
<commit_msg>FIX fiscal position no source tax<commit_after> | from openerp import models, api, fields
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
@api.v7
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
result = super(account_fiscal_position, self).map_tax(
cr, uid, fposition_id, taxes, context=context)
print 'fposition_id', fposition_id
if fposition_id:
taxes_without_src_ids = [
x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
result = set(result) | set(taxes_without_src_ids)
return list(result)
@api.v8 # noqa
def map_tax(self, taxes):
result = super(account_fiscal_position, self).map_tax(taxes)
taxes_without_src_ids = [
x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id]
result += result.browse(taxes_without_src_ids)
return result
class account_fiscal_position_tax(models.Model):
_inherit = 'account.fiscal.position.tax'
tax_src_id = fields.Many2one(required=False)
| from openerp import models, api, fields
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
@api.v7
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
result = super(account_fiscal_position, self).map_tax(
cr, uid, fposition_id, taxes, context=context)
taxes_without_src_ids = [
x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
result = set(result) | set(taxes_without_src_ids)
return list(result)
@api.v8 # noqa
def map_tax(self, taxes):
result = super(account_fiscal_position, self).map_tax(taxes)
taxes_without_src_ids = [
x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id]
result += result.browse(taxes_without_src_ids)
return result
class account_fiscal_position_tax(models.Model):
_inherit = 'account.fiscal.position.tax'
tax_src_id = fields.Many2one(required=False)
FIX fiscal position no source taxfrom openerp import models, api, fields
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
@api.v7
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
result = super(account_fiscal_position, self).map_tax(
cr, uid, fposition_id, taxes, context=context)
print 'fposition_id', fposition_id
if fposition_id:
taxes_without_src_ids = [
x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
result = set(result) | set(taxes_without_src_ids)
return list(result)
@api.v8 # noqa
def map_tax(self, taxes):
result = super(account_fiscal_position, self).map_tax(taxes)
taxes_without_src_ids = [
x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id]
result += result.browse(taxes_without_src_ids)
return result
class account_fiscal_position_tax(models.Model):
_inherit = 'account.fiscal.position.tax'
tax_src_id = fields.Many2one(required=False)
| <commit_before>from openerp import models, api, fields
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
@api.v7
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
result = super(account_fiscal_position, self).map_tax(
cr, uid, fposition_id, taxes, context=context)
taxes_without_src_ids = [
x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
result = set(result) | set(taxes_without_src_ids)
return list(result)
@api.v8 # noqa
def map_tax(self, taxes):
result = super(account_fiscal_position, self).map_tax(taxes)
taxes_without_src_ids = [
x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id]
result += result.browse(taxes_without_src_ids)
return result
class account_fiscal_position_tax(models.Model):
_inherit = 'account.fiscal.position.tax'
tax_src_id = fields.Many2one(required=False)
<commit_msg>FIX fiscal position no source tax<commit_after>from openerp import models, api, fields
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
@api.v7
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
result = super(account_fiscal_position, self).map_tax(
cr, uid, fposition_id, taxes, context=context)
print 'fposition_id', fposition_id
if fposition_id:
taxes_without_src_ids = [
x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
result = set(result) | set(taxes_without_src_ids)
return list(result)
@api.v8 # noqa
def map_tax(self, taxes):
result = super(account_fiscal_position, self).map_tax(taxes)
taxes_without_src_ids = [
x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id]
result += result.browse(taxes_without_src_ids)
return result
class account_fiscal_position_tax(models.Model):
_inherit = 'account.fiscal.position.tax'
tax_src_id = fields.Many2one(required=False)
|
5dec1db567ef7c2b6ea1cca3ddd02612cb9f7d8a | Lib/encodings/bz2_codec.py | Lib/encodings/bz2_codec.py | """ Python 'bz2_codec' Codec - bz2 compression encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Adapted by Raymond Hettinger from zlib_codec.py which was written
by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs
import bz2
def encode(input, errors='strict'):
assert errors == 'strict'
output = bz2.compress(input)
return (output, len(input))
def decode(input, errors='strict'):
assert errors == 'strict'
output = bz2.decompress(input)
return (output, len(input))
### encodings module API
def getregentry():
return (encode, decode, codecs.StreamReader, codecs.StreamWriter)
| """ Python 'bz2_codec' Codec - bz2 compression encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Adapted by Raymond Hettinger from zlib_codec.py which was written
by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs
import bz2 # this codec needs the optional bz2 module !
### Codec APIs
def bz2_encode(input,errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = bz2.compress(input)
return (output, len(input))
def bz2_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = bz2.decompress(input)
return (output, len(input))
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return bz2_encode(input, errors)
def decode(self, input, errors='strict'):
return bz2_decode(input, errors)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (bz2_encode,bz2_decode,StreamReader,StreamWriter)
| Revert previous change. MAL preferred the old version. | Revert previous change. MAL preferred the old version.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | """ Python 'bz2_codec' Codec - bz2 compression encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Adapted by Raymond Hettinger from zlib_codec.py which was written
by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs
import bz2
def encode(input, errors='strict'):
assert errors == 'strict'
output = bz2.compress(input)
return (output, len(input))
def decode(input, errors='strict'):
assert errors == 'strict'
output = bz2.decompress(input)
return (output, len(input))
### encodings module API
def getregentry():
return (encode, decode, codecs.StreamReader, codecs.StreamWriter)
Revert previous change. MAL preferred the old version. | """ Python 'bz2_codec' Codec - bz2 compression encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Adapted by Raymond Hettinger from zlib_codec.py which was written
by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs
import bz2 # this codec needs the optional bz2 module !
### Codec APIs
def bz2_encode(input,errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = bz2.compress(input)
return (output, len(input))
def bz2_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = bz2.decompress(input)
return (output, len(input))
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return bz2_encode(input, errors)
def decode(self, input, errors='strict'):
return bz2_decode(input, errors)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (bz2_encode,bz2_decode,StreamReader,StreamWriter)
| <commit_before>""" Python 'bz2_codec' Codec - bz2 compression encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Adapted by Raymond Hettinger from zlib_codec.py which was written
by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs
import bz2
def encode(input, errors='strict'):
assert errors == 'strict'
output = bz2.compress(input)
return (output, len(input))
def decode(input, errors='strict'):
assert errors == 'strict'
output = bz2.decompress(input)
return (output, len(input))
### encodings module API
def getregentry():
return (encode, decode, codecs.StreamReader, codecs.StreamWriter)
<commit_msg>Revert previous change. MAL preferred the old version.<commit_after> | """ Python 'bz2_codec' Codec - bz2 compression encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Adapted by Raymond Hettinger from zlib_codec.py which was written
by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs
import bz2 # this codec needs the optional bz2 module !
### Codec APIs
def bz2_encode(input,errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = bz2.compress(input)
return (output, len(input))
def bz2_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = bz2.decompress(input)
return (output, len(input))
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return bz2_encode(input, errors)
def decode(self, input, errors='strict'):
return bz2_decode(input, errors)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (bz2_encode,bz2_decode,StreamReader,StreamWriter)
| """ Python 'bz2_codec' Codec - bz2 compression encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Adapted by Raymond Hettinger from zlib_codec.py which was written
by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs
import bz2
def encode(input, errors='strict'):
assert errors == 'strict'
output = bz2.compress(input)
return (output, len(input))
def decode(input, errors='strict'):
assert errors == 'strict'
output = bz2.decompress(input)
return (output, len(input))
### encodings module API
def getregentry():
return (encode, decode, codecs.StreamReader, codecs.StreamWriter)
Revert previous change. MAL preferred the old version.""" Python 'bz2_codec' Codec - bz2 compression encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Adapted by Raymond Hettinger from zlib_codec.py which was written
by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs
import bz2 # this codec needs the optional bz2 module !
### Codec APIs
def bz2_encode(input,errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = bz2.compress(input)
return (output, len(input))
def bz2_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = bz2.decompress(input)
return (output, len(input))
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return bz2_encode(input, errors)
def decode(self, input, errors='strict'):
return bz2_decode(input, errors)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (bz2_encode,bz2_decode,StreamReader,StreamWriter)
| <commit_before>""" Python 'bz2_codec' Codec - bz2 compression encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Adapted by Raymond Hettinger from zlib_codec.py which was written
by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs
import bz2
def encode(input, errors='strict'):
assert errors == 'strict'
output = bz2.compress(input)
return (output, len(input))
def decode(input, errors='strict'):
assert errors == 'strict'
output = bz2.decompress(input)
return (output, len(input))
### encodings module API
def getregentry():
return (encode, decode, codecs.StreamReader, codecs.StreamWriter)
<commit_msg>Revert previous change. MAL preferred the old version.<commit_after>""" Python 'bz2_codec' Codec - bz2 compression encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Adapted by Raymond Hettinger from zlib_codec.py which was written
by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs
import bz2 # this codec needs the optional bz2 module !
### Codec APIs
def bz2_encode(input,errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = bz2.compress(input)
return (output, len(input))
def bz2_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = bz2.decompress(input)
return (output, len(input))
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return bz2_encode(input, errors)
def decode(self, input, errors='strict'):
return bz2_decode(input, errors)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (bz2_encode,bz2_decode,StreamReader,StreamWriter)
|
090c485a6d6e0eb9b078ef5401ad0364f8c494d7 | StatCache.py | StatCache.py | import numpy as np
def circular_store(redis, stat_name, stat_value):
stat_value = np.average(np.append(np.asfarray(redis.lrange(stat_name, 0, 2)), stat_value))
redis.lpush(stat_name, stat_value)
redis.ltrim(stat_name, 0, 100)
def circular_all(redis, stat_name):
return np.asfarray(redis.lrange(stat_name, 0, 100), float).tolist() | import numpy as np
def circular_store(redis, stat_name, stat_value):
stat_value = np.average(np.append(np.asfarray(redis.lrange(stat_name, 0, 1)), stat_value))
redis.lpush(stat_name, stat_value)
redis.ltrim(stat_name, 0, 100)
def circular_all(redis, stat_name):
return np.asfarray(redis.lrange(stat_name, 0, 100), float).tolist() | Use moving average n=1 instead of n=2 | Use moving average n=1 instead of n=2
| Python | mit | nettube/mbtapuller,nettube/mbtapuller,nettube/mbtapuller | import numpy as np
def circular_store(redis, stat_name, stat_value):
stat_value = np.average(np.append(np.asfarray(redis.lrange(stat_name, 0, 2)), stat_value))
redis.lpush(stat_name, stat_value)
redis.ltrim(stat_name, 0, 100)
def circular_all(redis, stat_name):
return np.asfarray(redis.lrange(stat_name, 0, 100), float).tolist()Use moving average n=1 instead of n=2 | import numpy as np
def circular_store(redis, stat_name, stat_value):
stat_value = np.average(np.append(np.asfarray(redis.lrange(stat_name, 0, 1)), stat_value))
redis.lpush(stat_name, stat_value)
redis.ltrim(stat_name, 0, 100)
def circular_all(redis, stat_name):
return np.asfarray(redis.lrange(stat_name, 0, 100), float).tolist() | <commit_before>import numpy as np
def circular_store(redis, stat_name, stat_value):
stat_value = np.average(np.append(np.asfarray(redis.lrange(stat_name, 0, 2)), stat_value))
redis.lpush(stat_name, stat_value)
redis.ltrim(stat_name, 0, 100)
def circular_all(redis, stat_name):
return np.asfarray(redis.lrange(stat_name, 0, 100), float).tolist()<commit_msg>Use moving average n=1 instead of n=2<commit_after> | import numpy as np
def circular_store(redis, stat_name, stat_value):
stat_value = np.average(np.append(np.asfarray(redis.lrange(stat_name, 0, 1)), stat_value))
redis.lpush(stat_name, stat_value)
redis.ltrim(stat_name, 0, 100)
def circular_all(redis, stat_name):
return np.asfarray(redis.lrange(stat_name, 0, 100), float).tolist() | import numpy as np
def circular_store(redis, stat_name, stat_value):
stat_value = np.average(np.append(np.asfarray(redis.lrange(stat_name, 0, 2)), stat_value))
redis.lpush(stat_name, stat_value)
redis.ltrim(stat_name, 0, 100)
def circular_all(redis, stat_name):
return np.asfarray(redis.lrange(stat_name, 0, 100), float).tolist()Use moving average n=1 instead of n=2import numpy as np
def circular_store(redis, stat_name, stat_value):
stat_value = np.average(np.append(np.asfarray(redis.lrange(stat_name, 0, 1)), stat_value))
redis.lpush(stat_name, stat_value)
redis.ltrim(stat_name, 0, 100)
def circular_all(redis, stat_name):
return np.asfarray(redis.lrange(stat_name, 0, 100), float).tolist() | <commit_before>import numpy as np
def circular_store(redis, stat_name, stat_value):
stat_value = np.average(np.append(np.asfarray(redis.lrange(stat_name, 0, 2)), stat_value))
redis.lpush(stat_name, stat_value)
redis.ltrim(stat_name, 0, 100)
def circular_all(redis, stat_name):
return np.asfarray(redis.lrange(stat_name, 0, 100), float).tolist()<commit_msg>Use moving average n=1 instead of n=2<commit_after>import numpy as np
def circular_store(redis, stat_name, stat_value):
stat_value = np.average(np.append(np.asfarray(redis.lrange(stat_name, 0, 1)), stat_value))
redis.lpush(stat_name, stat_value)
redis.ltrim(stat_name, 0, 100)
def circular_all(redis, stat_name):
return np.asfarray(redis.lrange(stat_name, 0, 100), float).tolist() |
8e308f1928fed8713c5b18ada2032b92f4dceeff | blocker.py | blocker.py | import os
import sys
import json
import utils
from datetime import datetime
from experiments import *
RESULTS_DIR = "results"
DATA_DIR = "data"
EXPERIMENTS = {
"http_request" : http_request.HTTPRequestExperiment,
"tcp_connect" : tcp_connect.TCPConnectExperiment,
"turkey" : turkey.TurkeyExperiment
}
def get_result_file():
result_file = "result-%s.txt" % (datetime.now().isoformat())
return os.path.join(RESULTS_DIR, result_file)
def get_input_file(experiment_name):
input_file = "%s.txt" % (experiment_name)
return os.path.join(DATA_DIR, input_file)
def run():
result_file = get_result_file()
result_file = open(result_file, "w")
results = {}
for name, exp in EXPERIMENTS.items():
input_file = get_input_file(name)
if not os.path.isfile(input_file):
print "Input file for %s does not exist!" % name
return
input_file = open(input_file)
exp = exp(input_file)
exp.run()
input_file.close()
results[name] = exp.results
json.dump(results, result_file)
result_file.close()
if __name__ == "__main__":
run()
| import os
import sys
import json
import utils
from datetime import datetime
from experiments import *
RESULTS_DIR = "results"
DATA_DIR = "data"
EXPERIMENTS = {
"http_request" : http_request.HTTPRequestExperiment,
"tcp_connect" : tcp_connect.TCPConnectExperiment,
"turkey" : turkey.TurkeyExperiment
}
def get_result_file():
result_file = "result-%s.json" % (datetime.now().isoformat())
return os.path.join(RESULTS_DIR, result_file)
def get_input_file(experiment_name):
input_file = "%s.txt" % (experiment_name)
return os.path.join(DATA_DIR, input_file)
def run():
result_file = get_result_file()
result_file = open(result_file, "w")
results = {}
for name, exp in EXPERIMENTS.items():
input_file = get_input_file(name)
if not os.path.isfile(input_file):
print "Input file for %s does not exist!" % name
return
input_file = open(input_file)
exp = exp(input_file)
exp.run()
input_file.close()
results[name] = exp.results
json.dump(results, result_file)
result_file.close()
if __name__ == "__main__":
run()
| Store json file as .json | Store json file as .json
| Python | mit | rpanah/centinel,rpanah/centinel,ben-jones/centinel,iclab/centinel,lianke123321/centinel,gsathya/centinel-client,iclab/centinel,gsathya/blocker,JASONews/centinel,Ashish1805/centinel,rpanah/centinel,lianke123321/centinel,gsathya/centinel-client,iclab/centinel,lianke123321/centinel | import os
import sys
import json
import utils
from datetime import datetime
from experiments import *
RESULTS_DIR = "results"
DATA_DIR = "data"
EXPERIMENTS = {
"http_request" : http_request.HTTPRequestExperiment,
"tcp_connect" : tcp_connect.TCPConnectExperiment,
"turkey" : turkey.TurkeyExperiment
}
def get_result_file():
result_file = "result-%s.txt" % (datetime.now().isoformat())
return os.path.join(RESULTS_DIR, result_file)
def get_input_file(experiment_name):
input_file = "%s.txt" % (experiment_name)
return os.path.join(DATA_DIR, input_file)
def run():
result_file = get_result_file()
result_file = open(result_file, "w")
results = {}
for name, exp in EXPERIMENTS.items():
input_file = get_input_file(name)
if not os.path.isfile(input_file):
print "Input file for %s does not exist!" % name
return
input_file = open(input_file)
exp = exp(input_file)
exp.run()
input_file.close()
results[name] = exp.results
json.dump(results, result_file)
result_file.close()
if __name__ == "__main__":
run()
Store json file as .json | import os
import sys
import json
import utils
from datetime import datetime
from experiments import *
RESULTS_DIR = "results"
DATA_DIR = "data"
EXPERIMENTS = {
"http_request" : http_request.HTTPRequestExperiment,
"tcp_connect" : tcp_connect.TCPConnectExperiment,
"turkey" : turkey.TurkeyExperiment
}
def get_result_file():
result_file = "result-%s.json" % (datetime.now().isoformat())
return os.path.join(RESULTS_DIR, result_file)
def get_input_file(experiment_name):
input_file = "%s.txt" % (experiment_name)
return os.path.join(DATA_DIR, input_file)
def run():
result_file = get_result_file()
result_file = open(result_file, "w")
results = {}
for name, exp in EXPERIMENTS.items():
input_file = get_input_file(name)
if not os.path.isfile(input_file):
print "Input file for %s does not exist!" % name
return
input_file = open(input_file)
exp = exp(input_file)
exp.run()
input_file.close()
results[name] = exp.results
json.dump(results, result_file)
result_file.close()
if __name__ == "__main__":
run()
| <commit_before>import os
import sys
import json
import utils
from datetime import datetime
from experiments import *
RESULTS_DIR = "results"
DATA_DIR = "data"
EXPERIMENTS = {
"http_request" : http_request.HTTPRequestExperiment,
"tcp_connect" : tcp_connect.TCPConnectExperiment,
"turkey" : turkey.TurkeyExperiment
}
def get_result_file():
result_file = "result-%s.txt" % (datetime.now().isoformat())
return os.path.join(RESULTS_DIR, result_file)
def get_input_file(experiment_name):
input_file = "%s.txt" % (experiment_name)
return os.path.join(DATA_DIR, input_file)
def run():
result_file = get_result_file()
result_file = open(result_file, "w")
results = {}
for name, exp in EXPERIMENTS.items():
input_file = get_input_file(name)
if not os.path.isfile(input_file):
print "Input file for %s does not exist!" % name
return
input_file = open(input_file)
exp = exp(input_file)
exp.run()
input_file.close()
results[name] = exp.results
json.dump(results, result_file)
result_file.close()
if __name__ == "__main__":
run()
<commit_msg>Store json file as .json<commit_after> | import os
import sys
import json
import utils
from datetime import datetime
from experiments import *
RESULTS_DIR = "results"
DATA_DIR = "data"
EXPERIMENTS = {
"http_request" : http_request.HTTPRequestExperiment,
"tcp_connect" : tcp_connect.TCPConnectExperiment,
"turkey" : turkey.TurkeyExperiment
}
def get_result_file():
result_file = "result-%s.json" % (datetime.now().isoformat())
return os.path.join(RESULTS_DIR, result_file)
def get_input_file(experiment_name):
input_file = "%s.txt" % (experiment_name)
return os.path.join(DATA_DIR, input_file)
def run():
result_file = get_result_file()
result_file = open(result_file, "w")
results = {}
for name, exp in EXPERIMENTS.items():
input_file = get_input_file(name)
if not os.path.isfile(input_file):
print "Input file for %s does not exist!" % name
return
input_file = open(input_file)
exp = exp(input_file)
exp.run()
input_file.close()
results[name] = exp.results
json.dump(results, result_file)
result_file.close()
if __name__ == "__main__":
run()
| import os
import sys
import json
import utils
from datetime import datetime
from experiments import *
RESULTS_DIR = "results"
DATA_DIR = "data"
EXPERIMENTS = {
"http_request" : http_request.HTTPRequestExperiment,
"tcp_connect" : tcp_connect.TCPConnectExperiment,
"turkey" : turkey.TurkeyExperiment
}
def get_result_file():
result_file = "result-%s.txt" % (datetime.now().isoformat())
return os.path.join(RESULTS_DIR, result_file)
def get_input_file(experiment_name):
input_file = "%s.txt" % (experiment_name)
return os.path.join(DATA_DIR, input_file)
def run():
result_file = get_result_file()
result_file = open(result_file, "w")
results = {}
for name, exp in EXPERIMENTS.items():
input_file = get_input_file(name)
if not os.path.isfile(input_file):
print "Input file for %s does not exist!" % name
return
input_file = open(input_file)
exp = exp(input_file)
exp.run()
input_file.close()
results[name] = exp.results
json.dump(results, result_file)
result_file.close()
if __name__ == "__main__":
run()
Store json file as .jsonimport os
import sys
import json
import utils
from datetime import datetime
from experiments import *
RESULTS_DIR = "results"
DATA_DIR = "data"
EXPERIMENTS = {
"http_request" : http_request.HTTPRequestExperiment,
"tcp_connect" : tcp_connect.TCPConnectExperiment,
"turkey" : turkey.TurkeyExperiment
}
def get_result_file():
result_file = "result-%s.json" % (datetime.now().isoformat())
return os.path.join(RESULTS_DIR, result_file)
def get_input_file(experiment_name):
input_file = "%s.txt" % (experiment_name)
return os.path.join(DATA_DIR, input_file)
def run():
result_file = get_result_file()
result_file = open(result_file, "w")
results = {}
for name, exp in EXPERIMENTS.items():
input_file = get_input_file(name)
if not os.path.isfile(input_file):
print "Input file for %s does not exist!" % name
return
input_file = open(input_file)
exp = exp(input_file)
exp.run()
input_file.close()
results[name] = exp.results
json.dump(results, result_file)
result_file.close()
if __name__ == "__main__":
run()
| <commit_before>import os
import sys
import json
import utils
from datetime import datetime
from experiments import *
RESULTS_DIR = "results"
DATA_DIR = "data"
EXPERIMENTS = {
"http_request" : http_request.HTTPRequestExperiment,
"tcp_connect" : tcp_connect.TCPConnectExperiment,
"turkey" : turkey.TurkeyExperiment
}
def get_result_file():
result_file = "result-%s.txt" % (datetime.now().isoformat())
return os.path.join(RESULTS_DIR, result_file)
def get_input_file(experiment_name):
input_file = "%s.txt" % (experiment_name)
return os.path.join(DATA_DIR, input_file)
def run():
result_file = get_result_file()
result_file = open(result_file, "w")
results = {}
for name, exp in EXPERIMENTS.items():
input_file = get_input_file(name)
if not os.path.isfile(input_file):
print "Input file for %s does not exist!" % name
return
input_file = open(input_file)
exp = exp(input_file)
exp.run()
input_file.close()
results[name] = exp.results
json.dump(results, result_file)
result_file.close()
if __name__ == "__main__":
run()
<commit_msg>Store json file as .json<commit_after>import os
import sys
import json
import utils
from datetime import datetime
from experiments import *
RESULTS_DIR = "results"
DATA_DIR = "data"
EXPERIMENTS = {
"http_request" : http_request.HTTPRequestExperiment,
"tcp_connect" : tcp_connect.TCPConnectExperiment,
"turkey" : turkey.TurkeyExperiment
}
def get_result_file():
result_file = "result-%s.json" % (datetime.now().isoformat())
return os.path.join(RESULTS_DIR, result_file)
def get_input_file(experiment_name):
input_file = "%s.txt" % (experiment_name)
return os.path.join(DATA_DIR, input_file)
def run():
result_file = get_result_file()
result_file = open(result_file, "w")
results = {}
for name, exp in EXPERIMENTS.items():
input_file = get_input_file(name)
if not os.path.isfile(input_file):
print "Input file for %s does not exist!" % name
return
input_file = open(input_file)
exp = exp(input_file)
exp.run()
input_file.close()
results[name] = exp.results
json.dump(results, result_file)
result_file.close()
if __name__ == "__main__":
run()
|
cc29f43a351f1e0418edaceb830e5b189d31b3ad | script/lib/config.py | script/lib/config.py | #!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '5d5539f8232bb4d0253438216de11a99159b3c4d'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| #!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '99d263cbd842ba57331ddb975aad742470a4cff4'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| Update libchromiumcontent to fix shared workers. | win: Update libchromiumcontent to fix shared workers.
| Python | mit | tinydew4/electron,mattotodd/electron,fritx/electron,kcrt/electron,RobertJGabriel/electron,lzpfmh/electron,sshiting/electron,leethomas/electron,fomojola/electron,Gerhut/electron,subblue/electron,dahal/electron,posix4e/electron,jsutcodes/electron,thingsinjars/electron,subblue/electron,Jacobichou/electron,aichingm/electron,mrwizard82d1/electron,fritx/electron,kostia/electron,systembugtj/electron,davazp/electron,chrisswk/electron,nicobot/electron,meowlab/electron,aichingm/electron,simongregory/electron,rajatsingla28/electron,shiftkey/electron,voidbridge/electron,mhkeller/electron,stevekinney/electron,jhen0409/electron,rreimann/electron,fritx/electron,tomashanacek/electron,icattlecoder/electron,systembugtj/electron,aecca/electron,coderhaoxin/electron,fabien-d/electron,pandoraui/electron,jhen0409/electron,rprichard/electron,John-Lin/electron,preco21/electron,digideskio/electron,jonatasfreitasv/electron,gbn972/electron,setzer777/electron,mirrh/electron,shockone/electron,jjz/electron,LadyNaggaga/electron,tinydew4/electron,simongregory/electron,dkfiresky/electron,Rokt33r/electron,LadyNaggaga/electron,sshiting/electron,JussMee15/electron,lrlna/electron,mjaniszew/electron,arusakov/electron,icattlecoder/electron,farmisen/electron,ianscrivener/electron,thingsinjars/electron,eriser/electron,JussMee15/electron,beni55/electron,yan-foto/electron,jaanus/electron,Ivshti/electron,bpasero/electron,bobwol/electron,oiledCode/electron,GoooIce/electron,ervinb/electron,Neron-X5/electron,destan/electron,jannishuebl/electron,benweissmann/electron,jlhbaseball15/electron,rsvip/electron,kazupon/electron,iftekeriba/electron,nicholasess/electron,edulan/electron,LadyNaggaga/electron,darwin/electron,wan-qy/electron,MaxWhere/electron,carsonmcdonald/electron,tomashanacek/electron,mattdesl/electron,arusakov/electron,icattlecoder/electron,yan-foto/electron,mhkeller/electron,felixrieseberg/electron,biblerule/UMCTelnetHub,tincan24/electron,meowlab/electron,fireball-x/atom-shell,IonicaBizauKitchen/electron,Ivshti/electron,rhencke/electron,kenmozi/electron,kokdemo/electron,SufianHassan/electron,sshiting/electron,greyhwndz/electron,setzer777/electron,GoooIce/electron,setzer777/electron,destan/electron,d-salas/electron,bitemyapp/electron,yan-foto/electron,gerhardberger/electron,aaron-goshine/electron,aliib/electron,Floato/electron,Zagorakiss/electron,meowlab/electron,SufianHassan/electron,evgenyzinoviev/electron,beni55/electron,rajatsingla28/electron,tincan24/electron,JussMee15/electron,cqqccqc/electron,dahal/electron,nicobot/electron,jannishuebl/electron,simongregory/electron,pirafrank/electron,bbondy/electron,fffej/electron,vaginessa/electron,MaxWhere/electron,pirafrank/electron,bpasero/electron,greyhwndz/electron,aichingm/electron,Floato/electron,synaptek/electron,ankitaggarwal011/electron,simonfork/electron,eriser/electron,jlhbaseball15/electron,cos2004/electron,rajatsingla28/electron,vipulroxx/electron,kenmozi/electron,iftekeriba/electron,brave/electron,micalan/electron,aliib/electron,rreimann/electron,bbondy/electron,chriskdon/electron,benweissmann/electron,sircharleswatson/electron,cqqccqc/electron,trankmichael/electron,d-salas/electron,iftekeriba/electron,seanchas116/electron,cos2004/electron,synaptek/electron,saronwei/electron,saronwei/electron,smczk/electron,trigrass2/electron,biblerule/UMCTelnetHub,mattotodd/electron,baiwyc119/electron,shennushi/electron,wolfflow/electron,deepak1556/atom-shell,jonatasfreitasv/electron,fomojola/electron,aecca/electron,adamjgray/electron,Evercoder/electron,astoilkov/electron,shennushi/electron,leethomas/electron,aecca/electron,xfstudio/electron,electron/electron,tincan24/electron,soulteary/electron,shockone/electron,kazupon/electron,jlhbaseball15/electron,RIAEvangelist/electron,gerhardberger/electron,JesselJohn/electron,edulan/electron,eriser/electron,noikiy/electron,fffej/electron,ervinb/electron,bpasero/electron,edulan/electron,d-salas/electron,felixrieseberg/electron,jjz/electron,minggo/electron,takashi/electron,Andrey-Pavlov/electron,deed02392/electron,farmisen/electron,ankitaggarwal011/electron,stevemao/electron,natgolov/electron,egoist/electron,abhishekgahlot/electron,fomojola/electron,John-Lin/electron,shiftkey/electron,rhencke/electron,kazupon/electron,medixdev/electron,Faiz7412/electron,gstack/infinium-shell,chrisswk/electron,gbn972/electron,fffej/electron,rsvip/electron,wolfflow/electron,kokdemo/electron,bruce/electron,rhencke/electron,the-ress/electron,pandoraui/electron,bruce/electron,ervinb/electron,biblerule/UMCTelnetHub,miniak/electron,seanchas116/electron,smczk/electron,howmuchcomputer/electron,fomojola/electron,joaomoreno/atom-shell,aaron-goshine/electron,John-Lin/electron,xiruibing/electron,takashi/electron,greyhwndz/electron,oiledCode/electron,leftstick/electron,brave/muon,bpasero/electron,simongregory/electron,mubassirhayat/electron,John-Lin/electron,RobertJGabriel/electron,jhen0409/electron,evgenyzinoviev/electron,bbondy/electron,John-Lin/electron,brave/electron,mirrh/electron,setzer777/electron,fabien-d/electron,the-ress/electron,gabriel/electron,trankmichael/electron,ianscrivener/electron,yan-foto/electron,renaesop/electron,webmechanicx/electron,lzpfmh/electron,leethomas/electron,nicobot/electron,timruffles/electron,gbn972/electron,trigrass2/electron,simonfork/electron,Rokt33r/electron,chriskdon/electron,sircharleswatson/electron,gbn972/electron,Faiz7412/electron,BionicClick/electron,tomashanacek/electron,ianscrivener/electron,dkfiresky/electron,kikong/electron,Floato/electron,roadev/electron,gerhardberger/electron,arturts/electron,smczk/electron,cos2004/electron,benweissmann/electron,evgenyzinoviev/electron,simongregory/electron,d-salas/electron,faizalpribadi/electron,bwiggs/electron,kenmozi/electron,jtburke/electron,brave/muon,the-ress/electron,Andrey-Pavlov/electron,leftstick/electron,gabrielPeart/electron,coderhaoxin/electron,takashi/electron,chrisswk/electron,pombredanne/electron,adamjgray/electron,davazp/electron,roadev/electron,vipulroxx/electron,farmisen/electron,MaxGraey/electron,LadyNaggaga/electron,bwiggs/electron,joneit/electron,electron/electron,beni55/electron,renaesop/electron,eric-seekas/electron,zhakui/electron,edulan/electron,mubassirhayat/electron,fabien-d/electron,JesselJohn/electron,RIAEvangelist/electron,tomashanacek/electron,aliib/electron,adcentury/electron,natgolov/electron,fireball-x/atom-shell,jacksondc/electron,deed02392/electron,kcrt/electron,meowlab/electron,shaundunne/electron,etiktin/electron,brave/muon,noikiy/electron,RobertJGabriel/electron,aliib/electron,farmisen/electron,felixrieseberg/electron,thomsonreuters/electron,zhakui/electron,natgolov/electron,Rokt33r/electron,etiktin/electron,mubassirhayat/electron,jiaz/electron,farmisen/electron,baiwyc119/electron,the-ress/electron,evgenyzinoviev/electron,twolfson/electron,simonfork/electron,adcentury/electron,tonyganch/electron,twolfson/electron,yalexx/electron,joaomoreno/atom-shell,destan/electron,Gerhut/electron,jannishuebl/electron,LadyNaggaga/electron,micalan/electron,voidbridge/electron,faizalpribadi/electron,subblue/electron,anko/electron,aichingm/electron,eric-seekas/electron,rajatsingla28/electron,jiaz/electron,Evercoder/electron,vHanda/electron,shaundunne/electron,chriskdon/electron,shockone/electron,arturts/electron,kostia/electron,miniak/electron,astoilkov/electron,systembugtj/electron,minggo/electron,xiruibing/electron,benweissmann/electron,JussMee15/electron,ianscrivener/electron,jiaz/electron,coderhaoxin/electron,rsvip/electron,jannishuebl/electron,stevekinney/electron,roadev/electron,bruce/electron,felixrieseberg/electron,Zagorakiss/electron,preco21/electron,dkfiresky/electron,pombredanne/electron,carsonmcdonald/electron,jiaz/electron,kostia/electron,sircharleswatson/electron,trankmichael/electron,takashi/electron,seanchas116/electron,timruffles/electron,adcentury/electron,setzer777/electron,digideskio/electron,Neron-X5/electron,JesselJohn/electron,wan-qy/electron,matiasinsaurralde/electron,bright-sparks/electron,saronwei/electron,pandoraui/electron,jjz/electron,LadyNaggaga/electron,medixdev/electron,noikiy/electron,pandoraui/electron,Zagorakiss/electron,jannishuebl/electron,howmuchcomputer/electron,deed02392/electron,thomsonreuters/electron,Jonekee/electron,bright-sparks/electron,zhakui/electron,twolfson/electron,egoist/electron,takashi/electron,tylergibson/electron,zhakui/electron,tylergibson/electron,bright-sparks/electron,fireball-x/atom-shell,gabriel/electron,electron/electron,jsutcodes/electron,gstack/infinium-shell,dongjoon-hyun/electron,iftekeriba/electron,BionicClick/electron,thomsonreuters/electron,fabien-d/electron,the-ress/electron,gabriel/electron,neutrous/electron,medixdev/electron,Floato/electron,jlhbaseball15/electron,GoooIce/electron,Evercoder/electron,renaesop/electron,jonatasfreitasv/electron,shiftkey/electron,beni55/electron,gabriel/electron,tincan24/electron,brave/muon,hokein/atom-shell,Gerhut/electron,tylergibson/electron,systembugtj/electron,gstack/infinium-shell,aaron-goshine/electron,MaxGraey/electron,tylergibson/electron,leolujuyi/electron,vaginessa/electron,kenmozi/electron,d-salas/electron,nekuz0r/electron,carsonmcdonald/electron,egoist/electron,vaginessa/electron,voidbridge/electron,oiledCode/electron,RIAEvangelist/electron,dkfiresky/electron,jjz/electron,roadev/electron,farmisen/electron,bobwol/electron,rhencke/electron,xfstudio/electron,rsvip/electron,trigrass2/electron,Neron-X5/electron,howmuchcomputer/electron,mrwizard82d1/electron,wolfflow/electron,meowlab/electron,joneit/electron,yan-foto/electron,gabrielPeart/electron,jiaz/electron,aichingm/electron,DivyaKMenon/electron,abhishekgahlot/electron,nekuz0r/electron,renaesop/electron,jlord/electron,micalan/electron,thompsonemerson/electron,shennushi/electron,setzer777/electron,gstack/infinium-shell,shaundunne/electron,baiwyc119/electron,joneit/electron,jtburke/electron,noikiy/electron,davazp/electron,MaxGraey/electron,d-salas/electron,anko/electron,bwiggs/electron,IonicaBizauKitchen/electron,leolujuyi/electron,kostia/electron,soulteary/electron,tylergibson/electron,wan-qy/electron,chriskdon/electron,bitemyapp/electron,bobwol/electron,darwin/electron,Jacobichou/electron,gamedevsam/electron,leolujuyi/electron,Zagorakiss/electron,SufianHassan/electron,tinydew4/electron,cqqccqc/electron,shaundunne/electron,jhen0409/electron,saronwei/electron,mattotodd/electron,preco21/electron,matiasinsaurralde/electron,Rokt33r/electron,wan-qy/electron,etiktin/electron,oiledCode/electron,jaanus/electron,ervinb/electron,Andrey-Pavlov/electron,abhishekgahlot/electron,systembugtj/electron,sky7sea/electron,sircharleswatson/electron,medixdev/electron,Andrey-Pavlov/electron,BionicClick/electron,Floato/electron,GoooIce/electron,biblerule/UMCTelnetHub,dongjoon-hyun/electron,nicobot/electron,IonicaBizauKitchen/electron,davazp/electron,carsonmcdonald/electron,arusakov/electron,anko/electron,cos2004/electron,bbondy/electron,tinydew4/electron,destan/electron,kikong/electron,pirafrank/electron,synaptek/electron,eric-seekas/electron,jiaz/electron,preco21/electron,jsutcodes/electron,kikong/electron,deepak1556/atom-shell,fomojola/electron,arturts/electron,jannishuebl/electron,mhkeller/electron,aliib/electron,michaelchiche/electron,deed02392/electron,thompsonemerson/electron,fffej/electron,abhishekgahlot/electron,etiktin/electron,Faiz7412/electron,adamjgray/electron,pirafrank/electron,jonatasfreitasv/electron,shennushi/electron,abhishekgahlot/electron,trankmichael/electron,howmuchcomputer/electron,matiasinsaurralde/electron,webmechanicx/electron,arusakov/electron,brenca/electron,mjaniszew/electron,michaelchiche/electron,noikiy/electron,anko/electron,the-ress/electron,digideskio/electron,jlhbaseball15/electron,stevekinney/electron,simonfork/electron,bpasero/electron,MaxWhere/electron,soulteary/electron,baiwyc119/electron,sircharleswatson/electron,stevekinney/electron,tonyganch/electron,bpasero/electron,deepak1556/atom-shell,adcentury/electron,faizalpribadi/electron,jhen0409/electron,mirrh/electron,oiledCode/electron,gstack/infinium-shell,lrlna/electron,eric-seekas/electron,carsonmcdonald/electron,hokein/atom-shell,nicholasess/electron,DivyaKMenon/electron,JussMee15/electron,thingsinjars/electron,micalan/electron,Evercoder/electron,rsvip/electron,electron/electron,deepak1556/atom-shell,Zagorakiss/electron,tinydew4/electron,jhen0409/electron,shaundunne/electron,zhakui/electron,stevekinney/electron,shockone/electron,thomsonreuters/electron,jaanus/electron,jlhbaseball15/electron,biblerule/UMCTelnetHub,saronwei/electron,dkfiresky/electron,fomojola/electron,gabrielPeart/electron,smczk/electron,rprichard/electron,kcrt/electron,RIAEvangelist/electron,deed02392/electron,robinvandernoord/electron,vipulroxx/electron,natgolov/electron,xiruibing/electron,MaxWhere/electron,fireball-x/atom-shell,Zagorakiss/electron,neutrous/electron,mhkeller/electron,rajatsingla28/electron,posix4e/electron,vHanda/electron,ianscrivener/electron,bitemyapp/electron,webmechanicx/electron,tomashanacek/electron,joneit/electron,IonicaBizauKitchen/electron,pombredanne/electron,John-Lin/electron,nekuz0r/electron,shiftkey/electron,shockone/electron,leftstick/electron,jsutcodes/electron,cqqccqc/electron,yalexx/electron,sky7sea/electron,roadev/electron,saronwei/electron,posix4e/electron,benweissmann/electron,subblue/electron,eric-seekas/electron,Evercoder/electron,baiwyc119/electron,shiftkey/electron,jaanus/electron,jacksondc/electron,bruce/electron,timruffles/electron,leethomas/electron,bobwol/electron,seanchas116/electron,lzpfmh/electron,mattotodd/electron,edulan/electron,Ivshti/electron,gamedevsam/electron,tonyganch/electron,fritx/electron,adamjgray/electron,hokein/atom-shell,arturts/electron,ankitaggarwal011/electron,rreimann/electron,synaptek/electron,yalexx/electron,miniak/electron,tomashanacek/electron,gabrielPeart/electron,sky7sea/electron,jtburke/electron,mattdesl/electron,Neron-X5/electron,astoilkov/electron,tinydew4/electron,DivyaKMenon/electron,leftstick/electron,icattlecoder/electron,michaelchiche/electron,RIAEvangelist/electron,carsonmcdonald/electron,sky7sea/electron,RobertJGabriel/electron,joneit/electron,brave/electron,seanchas116/electron,subblue/electron,tonyganch/electron,MaxGraey/electron,mattotodd/electron,rreimann/electron,bobwol/electron,webmechanicx/electron,nekuz0r/electron,robinvandernoord/electron,lzpfmh/electron,voidbridge/electron,trigrass2/electron,vipulroxx/electron,christian-bromann/electron,mrwizard82d1/electron,dongjoon-hyun/electron,GoooIce/electron,kazupon/electron,electron/electron,sshiting/electron,wolfflow/electron,tylergibson/electron,destan/electron,jjz/electron,anko/electron,bwiggs/electron,jacksondc/electron,dkfiresky/electron,greyhwndz/electron,stevemao/electron,bbondy/electron,bitemyapp/electron,Gerhut/electron,michaelchiche/electron,jlord/electron,nicholasess/electron,leolujuyi/electron,gbn972/electron,nagyistoce/electron-atom-shell,sshiting/electron,pandoraui/electron,mattdesl/electron,natgolov/electron,ervinb/electron,icattlecoder/electron,sky7sea/electron,xiruibing/electron,ankitaggarwal011/electron,tincan24/electron,Ivshti/electron,webmechanicx/electron,felixrieseberg/electron,maxogden/atom-shell,twolfson/electron,seanchas116/electron,biblerule/UMCTelnetHub,leethomas/electron,mjaniszew/electron,vaginessa/electron,adamjgray/electron,nekuz0r/electron,rajatsingla28/electron,vHanda/electron,SufianHassan/electron,mattdesl/electron,nagyistoce/electron-atom-shell,gamedevsam/electron,deepak1556/atom-shell,soulteary/electron,bruce/electron,eriser/electron,dongjoon-hyun/electron,synaptek/electron,coderhaoxin/electron,brave/electron,twolfson/electron,Andrey-Pavlov/electron,lrlna/electron,fffej/electron,wolfflow/electron,mhkeller/electron,thingsinjars/electron,beni55/electron,rhencke/electron,aaron-goshine/electron,Faiz7412/electron,baiwyc119/electron,darwin/electron,thompsonemerson/electron,twolfson/electron,fritx/electron,pombredanne/electron,rreimann/electron,bitemyapp/electron,gabrielPeart/electron,miniak/electron,kostia/electron,Rokt33r/electron,dahal/electron,brave/muon,brave/muon,bwiggs/electron,evgenyzinoviev/electron,miniak/electron,vipulroxx/electron,ervinb/electron,dongjoon-hyun/electron,christian-bromann/electron,joneit/electron,xiruibing/electron,maxogden/atom-shell,ianscrivener/electron,rreimann/electron,jaanus/electron,aecca/electron,smczk/electron,IonicaBizauKitchen/electron,BionicClick/electron,DivyaKMenon/electron,edulan/electron,kikong/electron,mjaniszew/electron,nicholasess/electron,neutrous/electron,eriser/electron,leftstick/electron,jacksondc/electron,lrlna/electron,zhakui/electron,gerhardberger/electron,posix4e/electron,pombredanne/electron,DivyaKMenon/electron,evgenyzinoviev/electron,xfstudio/electron,kazupon/electron,BionicClick/electron,sky7sea/electron,trankmichael/electron,minggo/electron,arusakov/electron,BionicClick/electron,yalexx/electron,cqqccqc/electron,kokdemo/electron,gabriel/electron,neutrous/electron,Jonekee/electron,leethomas/electron,chriskdon/electron,darwin/electron,jacksondc/electron,minggo/electron,soulteary/electron,stevemao/electron,mirrh/electron,cos2004/electron,mrwizard82d1/electron,howmuchcomputer/electron,jcblw/electron,simongregory/electron,bruce/electron,christian-bromann/electron,icattlecoder/electron,chriskdon/electron,maxogden/atom-shell,mirrh/electron,gamedevsam/electron,JussMee15/electron,SufianHassan/electron,takashi/electron,Jacobichou/electron,robinvandernoord/electron,shockone/electron,SufianHassan/electron,pirafrank/electron,ankitaggarwal011/electron,fritx/electron,leolujuyi/electron,bright-sparks/electron,stevekinney/electron,oiledCode/electron,greyhwndz/electron,bwiggs/electron,trigrass2/electron,digideskio/electron,Jacobichou/electron,gerhardberger/electron,coderhaoxin/electron,etiktin/electron,the-ress/electron,yalexx/electron,trigrass2/electron,leolujuyi/electron,michaelchiche/electron,MaxGraey/electron,maxogden/atom-shell,jlord/electron,wan-qy/electron,jsutcodes/electron,thomsonreuters/electron,minggo/electron,jcblw/electron,roadev/electron,jlord/electron,hokein/atom-shell,aaron-goshine/electron,coderhaoxin/electron,joaomoreno/atom-shell,voidbridge/electron,posix4e/electron,shiftkey/electron,thompsonemerson/electron,darwin/electron,joaomoreno/atom-shell,gabriel/electron,chrisswk/electron,stevemao/electron,christian-bromann/electron,miniak/electron,christian-bromann/electron,jcblw/electron,thompsonemerson/electron,astoilkov/electron,xfstudio/electron,eriser/electron,gerhardberger/electron,RobertJGabriel/electron,anko/electron,kikong/electron,mattdesl/electron,kenmozi/electron,Gerhut/electron,ankitaggarwal011/electron,davazp/electron,trankmichael/electron,adamjgray/electron,thingsinjars/electron,astoilkov/electron,thompsonemerson/electron,michaelchiche/electron,kenmozi/electron,electron/electron,tonyganch/electron,brenca/electron,pirafrank/electron,Evercoder/electron,matiasinsaurralde/electron,brenca/electron,JesselJohn/electron,matiasinsaurralde/electron,Floato/electron,dahal/electron,bright-sparks/electron,kokdemo/electron,yalexx/electron,gamedevsam/electron,medixdev/electron,joaomoreno/atom-shell,pombredanne/electron,nicholasess/electron,thomsonreuters/electron,neutrous/electron,micalan/electron,joaomoreno/atom-shell,timruffles/electron,Andrey-Pavlov/electron,Jonekee/electron,lrlna/electron,arturts/electron,egoist/electron,aichingm/electron,mattdesl/electron,Jacobichou/electron,deed02392/electron,Jonekee/electron,rprichard/electron,subblue/electron,preco21/electron,thingsinjars/electron,dongjoon-hyun/electron,RIAEvangelist/electron,faizalpribadi/electron,stevemao/electron,Ivshti/electron,hokein/atom-shell,kcrt/electron,kostia/electron,mubassirhayat/electron,vHanda/electron,fffej/electron,chrisswk/electron,bbondy/electron,dahal/electron,Jonekee/electron,egoist/electron,kcrt/electron,IonicaBizauKitchen/electron,jacksondc/electron,faizalpribadi/electron,mjaniszew/electron,renaesop/electron,simonfork/electron,jlord/electron,eric-seekas/electron,greyhwndz/electron,medixdev/electron,soulteary/electron,meowlab/electron,brenca/electron,astoilkov/electron,systembugtj/electron,jcblw/electron,MaxWhere/electron,Neron-X5/electron,nicobot/electron,yan-foto/electron,adcentury/electron,fireball-x/atom-shell,davazp/electron,tonyganch/electron,gabrielPeart/electron,Jacobichou/electron,adcentury/electron,brave/electron,jtburke/electron,neutrous/electron,arusakov/electron,micalan/electron,vipulroxx/electron,electron/electron,jonatasfreitasv/electron,jcblw/electron,RobertJGabriel/electron,rprichard/electron,jtburke/electron,robinvandernoord/electron,aecca/electron,mjaniszew/electron,mhkeller/electron,jjz/electron,robinvandernoord/electron,arturts/electron,brenca/electron,shaundunne/electron,iftekeriba/electron,benweissmann/electron,xfstudio/electron,vaginessa/electron,jtburke/electron,mrwizard82d1/electron,timruffles/electron,smczk/electron,JesselJohn/electron,kokdemo/electron,xiruibing/electron,abhishekgahlot/electron,kazupon/electron,lrlna/electron,Faiz7412/electron,voidbridge/electron,webmechanicx/electron,vHanda/electron,maxogden/atom-shell,cos2004/electron,vaginessa/electron,xfstudio/electron,aaron-goshine/electron,wolfflow/electron,faizalpribadi/electron,felixrieseberg/electron,mattotodd/electron,natgolov/electron,bobwol/electron,kokdemo/electron,destan/electron,Jonekee/electron,sshiting/electron,gbn972/electron,mrwizard82d1/electron,jaanus/electron,bpasero/electron,fabien-d/electron,kcrt/electron,MaxWhere/electron,robinvandernoord/electron,posix4e/electron,simonfork/electron,nicobot/electron,digideskio/electron,JesselJohn/electron,bright-sparks/electron,aliib/electron,brenca/electron,leftstick/electron,nicholasess/electron,matiasinsaurralde/electron,digideskio/electron,christian-bromann/electron,shennushi/electron,renaesop/electron,nagyistoce/electron-atom-shell,mirrh/electron,vHanda/electron,GoooIce/electron,cqqccqc/electron,preco21/electron,iftekeriba/electron,jsutcodes/electron,lzpfmh/electron,lzpfmh/electron,Neron-X5/electron,tincan24/electron,synaptek/electron,beni55/electron,jonatasfreitasv/electron,mubassirhayat/electron,bitemyapp/electron,shennushi/electron,pandoraui/electron,minggo/electron,sircharleswatson/electron,egoist/electron,wan-qy/electron,noikiy/electron,rhencke/electron,jcblw/electron,aecca/electron,Rokt33r/electron,nagyistoce/electron-atom-shell,Gerhut/electron,nagyistoce/electron-atom-shell,gerhardberger/electron,nekuz0r/electron,brave/electron,dahal/electron,howmuchcomputer/electron,DivyaKMenon/electron,gamedevsam/electron,stevemao/electron,etiktin/electron | #!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '5d5539f8232bb4d0253438216de11a99159b3c4d'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
win: Update libchromiumcontent to fix shared workers. | #!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '99d263cbd842ba57331ddb975aad742470a4cff4'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| <commit_before>#!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '5d5539f8232bb4d0253438216de11a99159b3c4d'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
<commit_msg>win: Update libchromiumcontent to fix shared workers.<commit_after> | #!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '99d263cbd842ba57331ddb975aad742470a4cff4'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| #!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '5d5539f8232bb4d0253438216de11a99159b3c4d'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
win: Update libchromiumcontent to fix shared workers.#!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '99d263cbd842ba57331ddb975aad742470a4cff4'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| <commit_before>#!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '5d5539f8232bb4d0253438216de11a99159b3c4d'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
<commit_msg>win: Update libchromiumcontent to fix shared workers.<commit_after>#!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '99d263cbd842ba57331ddb975aad742470a4cff4'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
|
e0a34d86837b6d1e1a9d740fbc5f0b8e2a2ee4b1 | Lib/email/__init__.py | Lib/email/__init__.py | # Copyright (C) 2001 Python Software Foundation
# Author: barry@zope.com (Barry Warsaw)
"""A package for parsing, handling, and generating email messages.
"""
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
'Image',
'Iterators',
'MIMEBase',
'Message',
'MessageRFC822',
'Parser',
'Text',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
| # Copyright (C) 2001 Python Software Foundation
# Author: barry@zope.com (Barry Warsaw)
"""A package for parsing, handling, and generating email messages.
"""
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
'Iterators',
'MIMEAudio',
'MIMEBase',
'MIMEImage',
'MIMEMessage',
'MIMEText',
'Message',
'Parser',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
| Fix __all__ to the current list of exported modules (must pass the tests in test_email.py). | Fix __all__ to the current list of exported modules (must pass the
tests in test_email.py).
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | # Copyright (C) 2001 Python Software Foundation
# Author: barry@zope.com (Barry Warsaw)
"""A package for parsing, handling, and generating email messages.
"""
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
'Image',
'Iterators',
'MIMEBase',
'Message',
'MessageRFC822',
'Parser',
'Text',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
Fix __all__ to the current list of exported modules (must pass the
tests in test_email.py). | # Copyright (C) 2001 Python Software Foundation
# Author: barry@zope.com (Barry Warsaw)
"""A package for parsing, handling, and generating email messages.
"""
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
'Iterators',
'MIMEAudio',
'MIMEBase',
'MIMEImage',
'MIMEMessage',
'MIMEText',
'Message',
'Parser',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
| <commit_before># Copyright (C) 2001 Python Software Foundation
# Author: barry@zope.com (Barry Warsaw)
"""A package for parsing, handling, and generating email messages.
"""
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
'Image',
'Iterators',
'MIMEBase',
'Message',
'MessageRFC822',
'Parser',
'Text',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
<commit_msg>Fix __all__ to the current list of exported modules (must pass the
tests in test_email.py).<commit_after> | # Copyright (C) 2001 Python Software Foundation
# Author: barry@zope.com (Barry Warsaw)
"""A package for parsing, handling, and generating email messages.
"""
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
'Iterators',
'MIMEAudio',
'MIMEBase',
'MIMEImage',
'MIMEMessage',
'MIMEText',
'Message',
'Parser',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
| # Copyright (C) 2001 Python Software Foundation
# Author: barry@zope.com (Barry Warsaw)
"""A package for parsing, handling, and generating email messages.
"""
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
'Image',
'Iterators',
'MIMEBase',
'Message',
'MessageRFC822',
'Parser',
'Text',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
Fix __all__ to the current list of exported modules (must pass the
tests in test_email.py).# Copyright (C) 2001 Python Software Foundation
# Author: barry@zope.com (Barry Warsaw)
"""A package for parsing, handling, and generating email messages.
"""
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
'Iterators',
'MIMEAudio',
'MIMEBase',
'MIMEImage',
'MIMEMessage',
'MIMEText',
'Message',
'Parser',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
| <commit_before># Copyright (C) 2001 Python Software Foundation
# Author: barry@zope.com (Barry Warsaw)
"""A package for parsing, handling, and generating email messages.
"""
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
'Image',
'Iterators',
'MIMEBase',
'Message',
'MessageRFC822',
'Parser',
'Text',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
<commit_msg>Fix __all__ to the current list of exported modules (must pass the
tests in test_email.py).<commit_after># Copyright (C) 2001 Python Software Foundation
# Author: barry@zope.com (Barry Warsaw)
"""A package for parsing, handling, and generating email messages.
"""
__version__ = '1.0'
__all__ = ['Encoders',
'Errors',
'Generator',
'Iterators',
'MIMEAudio',
'MIMEBase',
'MIMEImage',
'MIMEMessage',
'MIMEText',
'Message',
'Parser',
'Utils',
'message_from_string',
'message_from_file',
]
# Some convenience routines
from Parser import Parser as _Parser
from Message import Message as _Message
def message_from_string(s, _class=_Message):
return _Parser(_class).parsestr(s)
def message_from_file(fp, _class=_Message):
return _Parser(_class).parse(fp)
|
36ce36541bbd6512feaaf8e385bcfa7e11251281 | infcommon/yaml_reader/factory.py | infcommon/yaml_reader/factory.py | # -*- coding: utf-8 -*-
from infcommon.factory import Factory
from infcommon.yaml_reader.yaml_reader import YamlReader
def yaml_reader(path=None):
return Factory.instance('yaml_reader', lambda: YamlReader(path))
| # -*- coding: utf-8 -*-
import os
from infcommon.factory import Factory
from infcommon.yaml_reader.yaml_reader import YamlReader
DEFAULT_PATH_ENVIRONMENT_VARIABLE_NAME = 'CONF_FILE'
def yaml_reader(path=None):
path = path or os.environ[DEFAULT_PATH_ENVIRONMENT_VARIABLE_NAME]
return Factory.instance('yaml_reader',
lambda: YamlReader(path))
| Use environment variable with deafult path when no path given | [FEATURE] Use environment variable with deafult path when no path given
| Python | mit | aleasoluciones/infcommon,aleasoluciones/infcommon | # -*- coding: utf-8 -*-
from infcommon.factory import Factory
from infcommon.yaml_reader.yaml_reader import YamlReader
def yaml_reader(path=None):
return Factory.instance('yaml_reader', lambda: YamlReader(path))
[FEATURE] Use environment variable with deafult path when no path given | # -*- coding: utf-8 -*-
import os
from infcommon.factory import Factory
from infcommon.yaml_reader.yaml_reader import YamlReader
DEFAULT_PATH_ENVIRONMENT_VARIABLE_NAME = 'CONF_FILE'
def yaml_reader(path=None):
path = path or os.environ[DEFAULT_PATH_ENVIRONMENT_VARIABLE_NAME]
return Factory.instance('yaml_reader',
lambda: YamlReader(path))
| <commit_before># -*- coding: utf-8 -*-
from infcommon.factory import Factory
from infcommon.yaml_reader.yaml_reader import YamlReader
def yaml_reader(path=None):
return Factory.instance('yaml_reader', lambda: YamlReader(path))
<commit_msg>[FEATURE] Use environment variable with deafult path when no path given<commit_after> | # -*- coding: utf-8 -*-
import os
from infcommon.factory import Factory
from infcommon.yaml_reader.yaml_reader import YamlReader
DEFAULT_PATH_ENVIRONMENT_VARIABLE_NAME = 'CONF_FILE'
def yaml_reader(path=None):
path = path or os.environ[DEFAULT_PATH_ENVIRONMENT_VARIABLE_NAME]
return Factory.instance('yaml_reader',
lambda: YamlReader(path))
| # -*- coding: utf-8 -*-
from infcommon.factory import Factory
from infcommon.yaml_reader.yaml_reader import YamlReader
def yaml_reader(path=None):
return Factory.instance('yaml_reader', lambda: YamlReader(path))
[FEATURE] Use environment variable with deafult path when no path given# -*- coding: utf-8 -*-
import os
from infcommon.factory import Factory
from infcommon.yaml_reader.yaml_reader import YamlReader
DEFAULT_PATH_ENVIRONMENT_VARIABLE_NAME = 'CONF_FILE'
def yaml_reader(path=None):
path = path or os.environ[DEFAULT_PATH_ENVIRONMENT_VARIABLE_NAME]
return Factory.instance('yaml_reader',
lambda: YamlReader(path))
| <commit_before># -*- coding: utf-8 -*-
from infcommon.factory import Factory
from infcommon.yaml_reader.yaml_reader import YamlReader
def yaml_reader(path=None):
return Factory.instance('yaml_reader', lambda: YamlReader(path))
<commit_msg>[FEATURE] Use environment variable with deafult path when no path given<commit_after># -*- coding: utf-8 -*-
import os
from infcommon.factory import Factory
from infcommon.yaml_reader.yaml_reader import YamlReader
DEFAULT_PATH_ENVIRONMENT_VARIABLE_NAME = 'CONF_FILE'
def yaml_reader(path=None):
path = path or os.environ[DEFAULT_PATH_ENVIRONMENT_VARIABLE_NAME]
return Factory.instance('yaml_reader',
lambda: YamlReader(path))
|
1c05f5f88d84d90ca949bab6e3d48c5f1e35a909 | keystone/server/flask/request_processing/req_logging.py | keystone/server/flask/request_processing/req_logging.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# LOG some debug output about the request. This was originally in the
# dispatch middleware
import flask
from oslo_log import log
LOG = log.getLogger(__name__)
_ENVIRON_KEYS = ('SCRIPT_NAME', 'PATH_INFO')
def log_request_info():
# Add in any extra debug logging about the request that is desired
# note that this is executed prior to routing the request to a resource
# so the data is somewhat raw.
for element in _ENVIRON_KEYS:
LOG.debug("environ['%(key)s']: %(value)s",
{'key': element,
'value': flask.request.environ.get(element, '<<NOT SET>>')})
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# LOG some debug output about the request. This was originally in the
# dispatch middleware
import flask
from oslo_log import log
LOG = log.getLogger(__name__)
def log_request_info():
# Add in any extra debug logging about the request that is desired
# note that this is executed prior to routing the request to a resource
# so the data is somewhat raw.
LOG.debug('REQUEST_METHOD: `%s`', flask.request.method)
LOG.debug('SCRIPT_NAME: `%s`', flask.request.script_root)
LOG.debug('PATH_INFO: `%s`', flask.request.path)
| Make Request Logging a little better | Make Request Logging a little better
Use the flask.request properties instead of direct environ lookups,
as this is more representative of what is happening in the application.
Change-Id: Ic16c5ea26b2f526b51ef167e6f6977c72df1d06a
Partial-Bug: #1776504
| Python | apache-2.0 | openstack/keystone,mahak/keystone,mahak/keystone,openstack/keystone,mahak/keystone,openstack/keystone | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# LOG some debug output about the request. This was originally in the
# dispatch middleware
import flask
from oslo_log import log
LOG = log.getLogger(__name__)
_ENVIRON_KEYS = ('SCRIPT_NAME', 'PATH_INFO')
def log_request_info():
# Add in any extra debug logging about the request that is desired
# note that this is executed prior to routing the request to a resource
# so the data is somewhat raw.
for element in _ENVIRON_KEYS:
LOG.debug("environ['%(key)s']: %(value)s",
{'key': element,
'value': flask.request.environ.get(element, '<<NOT SET>>')})
Make Request Logging a little better
Use the flask.request properties instead of direct environ lookups,
as this is more representative of what is happening in the application.
Change-Id: Ic16c5ea26b2f526b51ef167e6f6977c72df1d06a
Partial-Bug: #1776504 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# LOG some debug output about the request. This was originally in the
# dispatch middleware
import flask
from oslo_log import log
LOG = log.getLogger(__name__)
def log_request_info():
# Add in any extra debug logging about the request that is desired
# note that this is executed prior to routing the request to a resource
# so the data is somewhat raw.
LOG.debug('REQUEST_METHOD: `%s`', flask.request.method)
LOG.debug('SCRIPT_NAME: `%s`', flask.request.script_root)
LOG.debug('PATH_INFO: `%s`', flask.request.path)
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# LOG some debug output about the request. This was originally in the
# dispatch middleware
import flask
from oslo_log import log
LOG = log.getLogger(__name__)
_ENVIRON_KEYS = ('SCRIPT_NAME', 'PATH_INFO')
def log_request_info():
# Add in any extra debug logging about the request that is desired
# note that this is executed prior to routing the request to a resource
# so the data is somewhat raw.
for element in _ENVIRON_KEYS:
LOG.debug("environ['%(key)s']: %(value)s",
{'key': element,
'value': flask.request.environ.get(element, '<<NOT SET>>')})
<commit_msg>Make Request Logging a little better
Use the flask.request properties instead of direct environ lookups,
as this is more representative of what is happening in the application.
Change-Id: Ic16c5ea26b2f526b51ef167e6f6977c72df1d06a
Partial-Bug: #1776504<commit_after> | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# LOG some debug output about the request. This was originally in the
# dispatch middleware
import flask
from oslo_log import log
LOG = log.getLogger(__name__)
def log_request_info():
# Add in any extra debug logging about the request that is desired
# note that this is executed prior to routing the request to a resource
# so the data is somewhat raw.
LOG.debug('REQUEST_METHOD: `%s`', flask.request.method)
LOG.debug('SCRIPT_NAME: `%s`', flask.request.script_root)
LOG.debug('PATH_INFO: `%s`', flask.request.path)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# LOG some debug output about the request. This was originally in the
# dispatch middleware
import flask
from oslo_log import log
LOG = log.getLogger(__name__)
_ENVIRON_KEYS = ('SCRIPT_NAME', 'PATH_INFO')
def log_request_info():
# Add in any extra debug logging about the request that is desired
# note that this is executed prior to routing the request to a resource
# so the data is somewhat raw.
for element in _ENVIRON_KEYS:
LOG.debug("environ['%(key)s']: %(value)s",
{'key': element,
'value': flask.request.environ.get(element, '<<NOT SET>>')})
Make Request Logging a little better
Use the flask.request properties instead of direct environ lookups,
as this is more representative of what is happening in the application.
Change-Id: Ic16c5ea26b2f526b51ef167e6f6977c72df1d06a
Partial-Bug: #1776504# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# LOG some debug output about the request. This was originally in the
# dispatch middleware
import flask
from oslo_log import log
LOG = log.getLogger(__name__)
def log_request_info():
# Add in any extra debug logging about the request that is desired
# note that this is executed prior to routing the request to a resource
# so the data is somewhat raw.
LOG.debug('REQUEST_METHOD: `%s`', flask.request.method)
LOG.debug('SCRIPT_NAME: `%s`', flask.request.script_root)
LOG.debug('PATH_INFO: `%s`', flask.request.path)
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# LOG some debug output about the request. This was originally in the
# dispatch middleware
import flask
from oslo_log import log
LOG = log.getLogger(__name__)
_ENVIRON_KEYS = ('SCRIPT_NAME', 'PATH_INFO')
def log_request_info():
# Add in any extra debug logging about the request that is desired
# note that this is executed prior to routing the request to a resource
# so the data is somewhat raw.
for element in _ENVIRON_KEYS:
LOG.debug("environ['%(key)s']: %(value)s",
{'key': element,
'value': flask.request.environ.get(element, '<<NOT SET>>')})
<commit_msg>Make Request Logging a little better
Use the flask.request properties instead of direct environ lookups,
as this is more representative of what is happening in the application.
Change-Id: Ic16c5ea26b2f526b51ef167e6f6977c72df1d06a
Partial-Bug: #1776504<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# LOG some debug output about the request. This was originally in the
# dispatch middleware
import flask
from oslo_log import log
LOG = log.getLogger(__name__)
def log_request_info():
# Add in any extra debug logging about the request that is desired
# note that this is executed prior to routing the request to a resource
# so the data is somewhat raw.
LOG.debug('REQUEST_METHOD: `%s`', flask.request.method)
LOG.debug('SCRIPT_NAME: `%s`', flask.request.script_root)
LOG.debug('PATH_INFO: `%s`', flask.request.path)
|
ebc5831cf8cd3a87c6d663c28afb94a952f4e42f | mint/scripts/db2db/migrate.py | mint/scripts/db2db/migrate.py | #!/usr/bin/python
#
# Copyright (c) 2009 rPath, Inc.
#
# All rights reserved.
#
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
destTuple = ('postgresql', 'rbuilder@localhost:5439/mint')
finalTuple = ('pgpool', 'rbuilder@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
| #!/usr/bin/python
#
# Copyright (c) 2009 rPath, Inc.
#
# All rights reserved.
#
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
destTuple = ('postgresql', 'postgres@localhost:5439/mint')
finalTuple = ('pgpool', 'postgres@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
| Fix another use of the rbuilder postgres user | Fix another use of the rbuilder postgres user
| Python | apache-2.0 | sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint | #!/usr/bin/python
#
# Copyright (c) 2009 rPath, Inc.
#
# All rights reserved.
#
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
destTuple = ('postgresql', 'rbuilder@localhost:5439/mint')
finalTuple = ('pgpool', 'rbuilder@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
Fix another use of the rbuilder postgres user | #!/usr/bin/python
#
# Copyright (c) 2009 rPath, Inc.
#
# All rights reserved.
#
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
destTuple = ('postgresql', 'postgres@localhost:5439/mint')
finalTuple = ('pgpool', 'postgres@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
| <commit_before>#!/usr/bin/python
#
# Copyright (c) 2009 rPath, Inc.
#
# All rights reserved.
#
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
destTuple = ('postgresql', 'rbuilder@localhost:5439/mint')
finalTuple = ('pgpool', 'rbuilder@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
<commit_msg>Fix another use of the rbuilder postgres user<commit_after> | #!/usr/bin/python
#
# Copyright (c) 2009 rPath, Inc.
#
# All rights reserved.
#
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
destTuple = ('postgresql', 'postgres@localhost:5439/mint')
finalTuple = ('pgpool', 'postgres@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
| #!/usr/bin/python
#
# Copyright (c) 2009 rPath, Inc.
#
# All rights reserved.
#
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
destTuple = ('postgresql', 'rbuilder@localhost:5439/mint')
finalTuple = ('pgpool', 'rbuilder@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
Fix another use of the rbuilder postgres user#!/usr/bin/python
#
# Copyright (c) 2009 rPath, Inc.
#
# All rights reserved.
#
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
destTuple = ('postgresql', 'postgres@localhost:5439/mint')
finalTuple = ('pgpool', 'postgres@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
| <commit_before>#!/usr/bin/python
#
# Copyright (c) 2009 rPath, Inc.
#
# All rights reserved.
#
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
destTuple = ('postgresql', 'rbuilder@localhost:5439/mint')
finalTuple = ('pgpool', 'rbuilder@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
<commit_msg>Fix another use of the rbuilder postgres user<commit_after>#!/usr/bin/python
#
# Copyright (c) 2009 rPath, Inc.
#
# All rights reserved.
#
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
destTuple = ('postgresql', 'postgres@localhost:5439/mint')
finalTuple = ('pgpool', 'postgres@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
|
9a3d8a96ed9cf0d1d1f002bf324d57b099ddde0f | gateway/utils/testfinder.py | gateway/utils/testfinder.py | import os
import unittest
from gateway.utils.resourcelocator import ResourceLocator
from unittest import TestLoader
TEST_PATH = "tests"
verbosity = 1
test_loader = unittest.defaultTestLoader
def find_test_modules(test_modules=None):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
return test_suite
def run_tests(test_classes=None):
test_runner = unittest.TextTestRunner(verbosity=verbosity)
if test_classes:
suite = load_test_from_classes(test_classes)
if not suite.countTestCases():
return -1
else:
test_runner.run(suite)
return 0
tests = find_test_modules(test_modules)
test_runner.run(tests)
return 0
def load_test_from_classes(class_names):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
temp_ts = unittest.TestSuite()
for test in test_suite:
suite = test.__dict__["_tests"]
if len(suite):
for case in suite:
if case.__dict__["_tests"][0].__class__.__name__ in class_names:
temp_ts.addTest(case)
return temp_ts
def load_module_test_case(module_name):
return test_loader.loadTestsFromName(module_name)
| import os
import unittest
from gateway.utils.resourcelocator import ResourceLocator
from unittest import TestLoader
TEST_PATH = "tests"
verbosity = 1
test_loader = unittest.defaultTestLoader
def find_test_modules(test_modules=None):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
return test_suite
def run_tests(test_classes=None):
"""
run_tests - runs a test suite with specified paramters
:param test_classes: list of tests classnames to only test
:return int: -1 for failure or 0 for success
"""
test_runner = unittest.TextTestRunner(verbosity=verbosity)
if test_classes:
suite = load_test_from_classes(test_classes)
if not suite.countTestCases():
return -1
else:
test_runner.run(suite)
return 0
tests = find_test_modules(test_modules)
test_runner.run(tests)
return 0
def load_test_from_classes(class_names):
"""
load_test_from_classes - returns a suite with specified class_names
:param class_names: list of tests classnames to add to the suite
"""
test_suite = find_test_modules()
temp_ts = unittest.TestSuite()
for test in test_suite:
suite = test.__dict__["_tests"]
if len(suite):
for case in suite:
if case.__dict__["_tests"][0].__class__.__name__ in class_names:
temp_ts.addTest(case)
return temp_ts
| Add documentation Clean duplicate code remove unused code | Add documentation
Clean duplicate code
remove unused code
| Python | mit | aceofwings/Evt-Gateway,aceofwings/Evt-Gateway | import os
import unittest
from gateway.utils.resourcelocator import ResourceLocator
from unittest import TestLoader
TEST_PATH = "tests"
verbosity = 1
test_loader = unittest.defaultTestLoader
def find_test_modules(test_modules=None):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
return test_suite
def run_tests(test_classes=None):
test_runner = unittest.TextTestRunner(verbosity=verbosity)
if test_classes:
suite = load_test_from_classes(test_classes)
if not suite.countTestCases():
return -1
else:
test_runner.run(suite)
return 0
tests = find_test_modules(test_modules)
test_runner.run(tests)
return 0
def load_test_from_classes(class_names):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
temp_ts = unittest.TestSuite()
for test in test_suite:
suite = test.__dict__["_tests"]
if len(suite):
for case in suite:
if case.__dict__["_tests"][0].__class__.__name__ in class_names:
temp_ts.addTest(case)
return temp_ts
def load_module_test_case(module_name):
return test_loader.loadTestsFromName(module_name)
Add documentation
Clean duplicate code
remove unused code | import os
import unittest
from gateway.utils.resourcelocator import ResourceLocator
from unittest import TestLoader
TEST_PATH = "tests"
verbosity = 1
test_loader = unittest.defaultTestLoader
def find_test_modules(test_modules=None):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
return test_suite
def run_tests(test_classes=None):
"""
run_tests - runs a test suite with specified paramters
:param test_classes: list of tests classnames to only test
:return int: -1 for failure or 0 for success
"""
test_runner = unittest.TextTestRunner(verbosity=verbosity)
if test_classes:
suite = load_test_from_classes(test_classes)
if not suite.countTestCases():
return -1
else:
test_runner.run(suite)
return 0
tests = find_test_modules(test_modules)
test_runner.run(tests)
return 0
def load_test_from_classes(class_names):
"""
load_test_from_classes - returns a suite with specified class_names
:param class_names: list of tests classnames to add to the suite
"""
test_suite = find_test_modules()
temp_ts = unittest.TestSuite()
for test in test_suite:
suite = test.__dict__["_tests"]
if len(suite):
for case in suite:
if case.__dict__["_tests"][0].__class__.__name__ in class_names:
temp_ts.addTest(case)
return temp_ts
| <commit_before>import os
import unittest
from gateway.utils.resourcelocator import ResourceLocator
from unittest import TestLoader
TEST_PATH = "tests"
verbosity = 1
test_loader = unittest.defaultTestLoader
def find_test_modules(test_modules=None):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
return test_suite
def run_tests(test_classes=None):
test_runner = unittest.TextTestRunner(verbosity=verbosity)
if test_classes:
suite = load_test_from_classes(test_classes)
if not suite.countTestCases():
return -1
else:
test_runner.run(suite)
return 0
tests = find_test_modules(test_modules)
test_runner.run(tests)
return 0
def load_test_from_classes(class_names):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
temp_ts = unittest.TestSuite()
for test in test_suite:
suite = test.__dict__["_tests"]
if len(suite):
for case in suite:
if case.__dict__["_tests"][0].__class__.__name__ in class_names:
temp_ts.addTest(case)
return temp_ts
def load_module_test_case(module_name):
return test_loader.loadTestsFromName(module_name)
<commit_msg>Add documentation
Clean duplicate code
remove unused code<commit_after> | import os
import unittest
from gateway.utils.resourcelocator import ResourceLocator
from unittest import TestLoader
TEST_PATH = "tests"
verbosity = 1
test_loader = unittest.defaultTestLoader
def find_test_modules(test_modules=None):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
return test_suite
def run_tests(test_classes=None):
"""
run_tests - runs a test suite with specified paramters
:param test_classes: list of tests classnames to only test
:return int: -1 for failure or 0 for success
"""
test_runner = unittest.TextTestRunner(verbosity=verbosity)
if test_classes:
suite = load_test_from_classes(test_classes)
if not suite.countTestCases():
return -1
else:
test_runner.run(suite)
return 0
tests = find_test_modules(test_modules)
test_runner.run(tests)
return 0
def load_test_from_classes(class_names):
"""
load_test_from_classes - returns a suite with specified class_names
:param class_names: list of tests classnames to add to the suite
"""
test_suite = find_test_modules()
temp_ts = unittest.TestSuite()
for test in test_suite:
suite = test.__dict__["_tests"]
if len(suite):
for case in suite:
if case.__dict__["_tests"][0].__class__.__name__ in class_names:
temp_ts.addTest(case)
return temp_ts
| import os
import unittest
from gateway.utils.resourcelocator import ResourceLocator
from unittest import TestLoader
TEST_PATH = "tests"
verbosity = 1
test_loader = unittest.defaultTestLoader
def find_test_modules(test_modules=None):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
return test_suite
def run_tests(test_classes=None):
test_runner = unittest.TextTestRunner(verbosity=verbosity)
if test_classes:
suite = load_test_from_classes(test_classes)
if not suite.countTestCases():
return -1
else:
test_runner.run(suite)
return 0
tests = find_test_modules(test_modules)
test_runner.run(tests)
return 0
def load_test_from_classes(class_names):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
temp_ts = unittest.TestSuite()
for test in test_suite:
suite = test.__dict__["_tests"]
if len(suite):
for case in suite:
if case.__dict__["_tests"][0].__class__.__name__ in class_names:
temp_ts.addTest(case)
return temp_ts
def load_module_test_case(module_name):
return test_loader.loadTestsFromName(module_name)
Add documentation
Clean duplicate code
remove unused codeimport os
import unittest
from gateway.utils.resourcelocator import ResourceLocator
from unittest import TestLoader
TEST_PATH = "tests"
verbosity = 1
test_loader = unittest.defaultTestLoader
def find_test_modules(test_modules=None):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
return test_suite
def run_tests(test_classes=None):
"""
run_tests - runs a test suite with specified paramters
:param test_classes: list of tests classnames to only test
:return int: -1 for failure or 0 for success
"""
test_runner = unittest.TextTestRunner(verbosity=verbosity)
if test_classes:
suite = load_test_from_classes(test_classes)
if not suite.countTestCases():
return -1
else:
test_runner.run(suite)
return 0
tests = find_test_modules(test_modules)
test_runner.run(tests)
return 0
def load_test_from_classes(class_names):
"""
load_test_from_classes - returns a suite with specified class_names
:param class_names: list of tests classnames to add to the suite
"""
test_suite = find_test_modules()
temp_ts = unittest.TestSuite()
for test in test_suite:
suite = test.__dict__["_tests"]
if len(suite):
for case in suite:
if case.__dict__["_tests"][0].__class__.__name__ in class_names:
temp_ts.addTest(case)
return temp_ts
| <commit_before>import os
import unittest
from gateway.utils.resourcelocator import ResourceLocator
from unittest import TestLoader
TEST_PATH = "tests"
verbosity = 1
test_loader = unittest.defaultTestLoader
def find_test_modules(test_modules=None):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
return test_suite
def run_tests(test_classes=None):
test_runner = unittest.TextTestRunner(verbosity=verbosity)
if test_classes:
suite = load_test_from_classes(test_classes)
if not suite.countTestCases():
return -1
else:
test_runner.run(suite)
return 0
tests = find_test_modules(test_modules)
test_runner.run(tests)
return 0
def load_test_from_classes(class_names):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
temp_ts = unittest.TestSuite()
for test in test_suite:
suite = test.__dict__["_tests"]
if len(suite):
for case in suite:
if case.__dict__["_tests"][0].__class__.__name__ in class_names:
temp_ts.addTest(case)
return temp_ts
def load_module_test_case(module_name):
return test_loader.loadTestsFromName(module_name)
<commit_msg>Add documentation
Clean duplicate code
remove unused code<commit_after>import os
import unittest
from gateway.utils.resourcelocator import ResourceLocator
from unittest import TestLoader
TEST_PATH = "tests"
verbosity = 1
test_loader = unittest.defaultTestLoader
def find_test_modules(test_modules=None):
test_locator = ResourceLocator.get_locator(TEST_PATH)
test_suite = test_loader.discover(test_locator.ROOT_PATH)
return test_suite
def run_tests(test_classes=None):
"""
run_tests - runs a test suite with specified paramters
:param test_classes: list of tests classnames to only test
:return int: -1 for failure or 0 for success
"""
test_runner = unittest.TextTestRunner(verbosity=verbosity)
if test_classes:
suite = load_test_from_classes(test_classes)
if not suite.countTestCases():
return -1
else:
test_runner.run(suite)
return 0
tests = find_test_modules(test_modules)
test_runner.run(tests)
return 0
def load_test_from_classes(class_names):
"""
load_test_from_classes - returns a suite with specified class_names
:param class_names: list of tests classnames to add to the suite
"""
test_suite = find_test_modules()
temp_ts = unittest.TestSuite()
for test in test_suite:
suite = test.__dict__["_tests"]
if len(suite):
for case in suite:
if case.__dict__["_tests"][0].__class__.__name__ in class_names:
temp_ts.addTest(case)
return temp_ts
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.