commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
764958cd46ecc0f40e609eab2deb1e7be5696617
tests/formatter/test_csver.py
tests/formatter/test_csver.py
import unittest, argparse from echolalia.formatter.csver import Formatter class CsverTestCase(unittest.TestCase): def setUp(self): self.parser = argparse.ArgumentParser() self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)] self.formatter = Formatter() def test_add_args(self): new_parser = self.formatter.add_args(self.parser) self.assertEqual(new_parser, self.parser) args = new_parser.parse_args(['--with_header']) self.assertTrue(args.with_header) args = new_parser.parse_args([]) self.assertFalse(args.with_header) def test_marshall_no_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args([]) result = self.formatter.marshall(args, self.data) expect = "a,1\r\nb,2\r\nc,3\r\n" def test_marshall_with_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args(['--with_header']) result = self.formatter.marshall(args, self.data) expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect)
Add tests for formatter csv
Add tests for formatter csv
Python
mit
eiri/echolalia-prototype
Add tests for formatter csv
import unittest, argparse from echolalia.formatter.csver import Formatter class CsverTestCase(unittest.TestCase): def setUp(self): self.parser = argparse.ArgumentParser() self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)] self.formatter = Formatter() def test_add_args(self): new_parser = self.formatter.add_args(self.parser) self.assertEqual(new_parser, self.parser) args = new_parser.parse_args(['--with_header']) self.assertTrue(args.with_header) args = new_parser.parse_args([]) self.assertFalse(args.with_header) def test_marshall_no_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args([]) result = self.formatter.marshall(args, self.data) expect = "a,1\r\nb,2\r\nc,3\r\n" def test_marshall_with_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args(['--with_header']) result = self.formatter.marshall(args, self.data) expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect)
<commit_before><commit_msg>Add tests for formatter csv<commit_after>
import unittest, argparse from echolalia.formatter.csver import Formatter class CsverTestCase(unittest.TestCase): def setUp(self): self.parser = argparse.ArgumentParser() self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)] self.formatter = Formatter() def test_add_args(self): new_parser = self.formatter.add_args(self.parser) self.assertEqual(new_parser, self.parser) args = new_parser.parse_args(['--with_header']) self.assertTrue(args.with_header) args = new_parser.parse_args([]) self.assertFalse(args.with_header) def test_marshall_no_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args([]) result = self.formatter.marshall(args, self.data) expect = "a,1\r\nb,2\r\nc,3\r\n" def test_marshall_with_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args(['--with_header']) result = self.formatter.marshall(args, self.data) expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect)
Add tests for formatter csvimport unittest, argparse from echolalia.formatter.csver import Formatter class CsverTestCase(unittest.TestCase): def setUp(self): self.parser = argparse.ArgumentParser() self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)] self.formatter = Formatter() def test_add_args(self): new_parser = self.formatter.add_args(self.parser) self.assertEqual(new_parser, self.parser) args = new_parser.parse_args(['--with_header']) self.assertTrue(args.with_header) args = new_parser.parse_args([]) self.assertFalse(args.with_header) def test_marshall_no_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args([]) result = self.formatter.marshall(args, self.data) expect = "a,1\r\nb,2\r\nc,3\r\n" def test_marshall_with_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args(['--with_header']) result = self.formatter.marshall(args, self.data) expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect)
<commit_before><commit_msg>Add tests for formatter csv<commit_after>import unittest, argparse from echolalia.formatter.csver import Formatter class CsverTestCase(unittest.TestCase): def setUp(self): self.parser = argparse.ArgumentParser() self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)] self.formatter = Formatter() def test_add_args(self): new_parser = self.formatter.add_args(self.parser) self.assertEqual(new_parser, self.parser) args = new_parser.parse_args(['--with_header']) self.assertTrue(args.with_header) args = new_parser.parse_args([]) self.assertFalse(args.with_header) def test_marshall_no_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args([]) result = self.formatter.marshall(args, self.data) expect = "a,1\r\nb,2\r\nc,3\r\n" def test_marshall_with_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args(['--with_header']) result = self.formatter.marshall(args, self.data) expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect)
e1d09aad308aabd76f2be808806c5ed024f31d14
dartcms/apps/modules/migrations/0004_insert_modules.py
dartcms/apps/modules/migrations/0004_insert_modules.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-05-20 02:26 from __future__ import unicode_literals from django.db import migrations from dartcms.apps.modules.models import Module, ModuleGroup MODULE_GROUPS = [ { 'sort': 4, 'description': '', 'fa': 'fa-rocket', 'slug': 'ads', 'name': 'Advertising', 'modules': [ { 'sort': 1, 'is_enabled': True, 'name': 'Ad banners', 'slug': 'ad', 'description': '' }, { 'sort': 2, 'is_enabled': True, 'name': 'Ad places', 'slug': 'adplace', 'description': '' }, { 'sort': 3, 'is_enabled': True, 'name': 'Ad sections', 'slug': 'adsection', 'description': '' } ] } ] def insert_modules(apps, schema): for group in MODULE_GROUPS: group_modules = group.pop('modules', []) group = ModuleGroup.objects.create(**group) for module in group_modules: module['group'] = group Module.objects.create(**module) def delete_modules(apps, schema): for group in MODULE_GROUPS: ModuleGroup.objects.get(slug=group['slug']).delete() class Migration(migrations.Migration): dependencies = [ ('modules', '0003_insert_modules'), ] operations = [ migrations.RunPython(insert_modules, delete_modules) ]
Add migration for Ads modules
Add migration for Ads modules
Python
mit
astrikov-d/dartcms,astrikov-d/dartcms,astrikov-d/dartcms
Add migration for Ads modules
# -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-05-20 02:26 from __future__ import unicode_literals from django.db import migrations from dartcms.apps.modules.models import Module, ModuleGroup MODULE_GROUPS = [ { 'sort': 4, 'description': '', 'fa': 'fa-rocket', 'slug': 'ads', 'name': 'Advertising', 'modules': [ { 'sort': 1, 'is_enabled': True, 'name': 'Ad banners', 'slug': 'ad', 'description': '' }, { 'sort': 2, 'is_enabled': True, 'name': 'Ad places', 'slug': 'adplace', 'description': '' }, { 'sort': 3, 'is_enabled': True, 'name': 'Ad sections', 'slug': 'adsection', 'description': '' } ] } ] def insert_modules(apps, schema): for group in MODULE_GROUPS: group_modules = group.pop('modules', []) group = ModuleGroup.objects.create(**group) for module in group_modules: module['group'] = group Module.objects.create(**module) def delete_modules(apps, schema): for group in MODULE_GROUPS: ModuleGroup.objects.get(slug=group['slug']).delete() class Migration(migrations.Migration): dependencies = [ ('modules', '0003_insert_modules'), ] operations = [ migrations.RunPython(insert_modules, delete_modules) ]
<commit_before><commit_msg>Add migration for Ads modules<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-05-20 02:26 from __future__ import unicode_literals from django.db import migrations from dartcms.apps.modules.models import Module, ModuleGroup MODULE_GROUPS = [ { 'sort': 4, 'description': '', 'fa': 'fa-rocket', 'slug': 'ads', 'name': 'Advertising', 'modules': [ { 'sort': 1, 'is_enabled': True, 'name': 'Ad banners', 'slug': 'ad', 'description': '' }, { 'sort': 2, 'is_enabled': True, 'name': 'Ad places', 'slug': 'adplace', 'description': '' }, { 'sort': 3, 'is_enabled': True, 'name': 'Ad sections', 'slug': 'adsection', 'description': '' } ] } ] def insert_modules(apps, schema): for group in MODULE_GROUPS: group_modules = group.pop('modules', []) group = ModuleGroup.objects.create(**group) for module in group_modules: module['group'] = group Module.objects.create(**module) def delete_modules(apps, schema): for group in MODULE_GROUPS: ModuleGroup.objects.get(slug=group['slug']).delete() class Migration(migrations.Migration): dependencies = [ ('modules', '0003_insert_modules'), ] operations = [ migrations.RunPython(insert_modules, delete_modules) ]
Add migration for Ads modules# -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-05-20 02:26 from __future__ import unicode_literals from django.db import migrations from dartcms.apps.modules.models import Module, ModuleGroup MODULE_GROUPS = [ { 'sort': 4, 'description': '', 'fa': 'fa-rocket', 'slug': 'ads', 'name': 'Advertising', 'modules': [ { 'sort': 1, 'is_enabled': True, 'name': 'Ad banners', 'slug': 'ad', 'description': '' }, { 'sort': 2, 'is_enabled': True, 'name': 'Ad places', 'slug': 'adplace', 'description': '' }, { 'sort': 3, 'is_enabled': True, 'name': 'Ad sections', 'slug': 'adsection', 'description': '' } ] } ] def insert_modules(apps, schema): for group in MODULE_GROUPS: group_modules = group.pop('modules', []) group = ModuleGroup.objects.create(**group) for module in group_modules: module['group'] = group Module.objects.create(**module) def delete_modules(apps, schema): for group in MODULE_GROUPS: ModuleGroup.objects.get(slug=group['slug']).delete() class Migration(migrations.Migration): dependencies = [ ('modules', '0003_insert_modules'), ] operations = [ migrations.RunPython(insert_modules, delete_modules) ]
<commit_before><commit_msg>Add migration for Ads modules<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-05-20 02:26 from __future__ import unicode_literals from django.db import migrations from dartcms.apps.modules.models import Module, ModuleGroup MODULE_GROUPS = [ { 'sort': 4, 'description': '', 'fa': 'fa-rocket', 'slug': 'ads', 'name': 'Advertising', 'modules': [ { 'sort': 1, 'is_enabled': True, 'name': 'Ad banners', 'slug': 'ad', 'description': '' }, { 'sort': 2, 'is_enabled': True, 'name': 'Ad places', 'slug': 'adplace', 'description': '' }, { 'sort': 3, 'is_enabled': True, 'name': 'Ad sections', 'slug': 'adsection', 'description': '' } ] } ] def insert_modules(apps, schema): for group in MODULE_GROUPS: group_modules = group.pop('modules', []) group = ModuleGroup.objects.create(**group) for module in group_modules: module['group'] = group Module.objects.create(**module) def delete_modules(apps, schema): for group in MODULE_GROUPS: ModuleGroup.objects.get(slug=group['slug']).delete() class Migration(migrations.Migration): dependencies = [ ('modules', '0003_insert_modules'), ] operations = [ migrations.RunPython(insert_modules, delete_modules) ]
f1c482f659311b5471dfde17a7b61251f33bf1e4
examples/demoproject/demoapp/views.py
examples/demoproject/demoapp/views.py
# Create your views here. from tasks import add from django.http import HttpResponse def foo(request): r = add.delay(2, 2) return HttpResponse(r.task_id)
# Create your views here. from demoapp import tasks from django.http import HttpResponse def foo(request): r = tasks.add.delay(2, 2) return HttpResponse(r.task_id)
Use from demoapp import tasks instead
Use from demoapp import tasks instead
Python
bsd-3-clause
digimarc/django-celery,nadios/django-celery,nadios/django-celery,tkanemoto/django-celery,celery/django-celery,Amanit/django-celery,CloudNcodeInc/django-celery,georgewhewell/django-celery,kanemra/django-celery,ask/django-celery,planorama/django-celery,axiom-data-science/django-celery,alexhayes/django-celery,georgewhewell/django-celery,axiom-data-science/django-celery,iris-edu-int/django-celery,ask/django-celery,digimarc/django-celery,kanemra/django-celery,alexhayes/django-celery,tkanemoto/django-celery,kanemra/django-celery,celery/django-celery,tkanemoto/django-celery,iris-edu-int/django-celery,CloudNcodeInc/django-celery,celery/django-celery,planorama/django-celery,Amanit/django-celery,iris-edu-int/django-celery,CloudNcodeInc/django-celery,axiom-data-science/django-celery,georgewhewell/django-celery,Amanit/django-celery,digimarc/django-celery
# Create your views here. from tasks import add from django.http import HttpResponse def foo(request): r = add.delay(2, 2) return HttpResponse(r.task_id) Use from demoapp import tasks instead
# Create your views here. from demoapp import tasks from django.http import HttpResponse def foo(request): r = tasks.add.delay(2, 2) return HttpResponse(r.task_id)
<commit_before># Create your views here. from tasks import add from django.http import HttpResponse def foo(request): r = add.delay(2, 2) return HttpResponse(r.task_id) <commit_msg>Use from demoapp import tasks instead<commit_after>
# Create your views here. from demoapp import tasks from django.http import HttpResponse def foo(request): r = tasks.add.delay(2, 2) return HttpResponse(r.task_id)
# Create your views here. from tasks import add from django.http import HttpResponse def foo(request): r = add.delay(2, 2) return HttpResponse(r.task_id) Use from demoapp import tasks instead# Create your views here. from demoapp import tasks from django.http import HttpResponse def foo(request): r = tasks.add.delay(2, 2) return HttpResponse(r.task_id)
<commit_before># Create your views here. from tasks import add from django.http import HttpResponse def foo(request): r = add.delay(2, 2) return HttpResponse(r.task_id) <commit_msg>Use from demoapp import tasks instead<commit_after># Create your views here. from demoapp import tasks from django.http import HttpResponse def foo(request): r = tasks.add.delay(2, 2) return HttpResponse(r.task_id)
7af4ec5f01af042b1a98401837a50b97af33a6f3
accounting/apps/books/managers.py
accounting/apps/books/managers.py
from datetime import date from django.db import models from django.db.models import Sum class TotalQuerySetMixin(object): def _get_total(self, prop): return self.aggregate(sum=Sum(prop))["sum"] def total_paid(self): return self._get_total('payments__amount') class InvoiceQuerySetMixin(object): def dued(self): return self.filter(date_issued__lte=date.today()) class EstimateQuerySet(TotalQuerySetMixin, models.QuerySet): pass class InvoiceQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def turnover_excl_tax(self): return self._get_total('total_excl_tax') def turnover_incl_tax(self): return self._get_total('total_incl_tax') class BillQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def debts_excl_tax(self): return self._get_total('total_excl_tax') def debts_incl_tax(self): return self._get_total('total_incl_tax')
from datetime import date from django.db import models from django.db.models import Sum class TotalQuerySetMixin(object): def _get_total(self, prop): return self.aggregate(sum=Sum(prop))["sum"] def total_paid(self): return self._get_total('payments__amount') class InvoiceQuerySetMixin(object): def dued(self): return self.filter(date_dued__lte=date.today()) class EstimateQuerySet(TotalQuerySetMixin, models.QuerySet): pass class InvoiceQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def turnover_excl_tax(self): return self._get_total('total_excl_tax') def turnover_incl_tax(self): return self._get_total('total_incl_tax') class BillQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def debts_excl_tax(self): return self._get_total('total_excl_tax') def debts_incl_tax(self): return self._get_total('total_incl_tax')
Fix the dued queryset filter
Fix the dued queryset filter
Python
mit
dulaccc/django-accounting,dulaccc/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting,dulaccc/django-accounting,kenjhim/django-accounting,kenjhim/django-accounting,kenjhim/django-accounting
from datetime import date from django.db import models from django.db.models import Sum class TotalQuerySetMixin(object): def _get_total(self, prop): return self.aggregate(sum=Sum(prop))["sum"] def total_paid(self): return self._get_total('payments__amount') class InvoiceQuerySetMixin(object): def dued(self): return self.filter(date_issued__lte=date.today()) class EstimateQuerySet(TotalQuerySetMixin, models.QuerySet): pass class InvoiceQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def turnover_excl_tax(self): return self._get_total('total_excl_tax') def turnover_incl_tax(self): return self._get_total('total_incl_tax') class BillQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def debts_excl_tax(self): return self._get_total('total_excl_tax') def debts_incl_tax(self): return self._get_total('total_incl_tax') Fix the dued queryset filter
from datetime import date from django.db import models from django.db.models import Sum class TotalQuerySetMixin(object): def _get_total(self, prop): return self.aggregate(sum=Sum(prop))["sum"] def total_paid(self): return self._get_total('payments__amount') class InvoiceQuerySetMixin(object): def dued(self): return self.filter(date_dued__lte=date.today()) class EstimateQuerySet(TotalQuerySetMixin, models.QuerySet): pass class InvoiceQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def turnover_excl_tax(self): return self._get_total('total_excl_tax') def turnover_incl_tax(self): return self._get_total('total_incl_tax') class BillQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def debts_excl_tax(self): return self._get_total('total_excl_tax') def debts_incl_tax(self): return self._get_total('total_incl_tax')
<commit_before>from datetime import date from django.db import models from django.db.models import Sum class TotalQuerySetMixin(object): def _get_total(self, prop): return self.aggregate(sum=Sum(prop))["sum"] def total_paid(self): return self._get_total('payments__amount') class InvoiceQuerySetMixin(object): def dued(self): return self.filter(date_issued__lte=date.today()) class EstimateQuerySet(TotalQuerySetMixin, models.QuerySet): pass class InvoiceQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def turnover_excl_tax(self): return self._get_total('total_excl_tax') def turnover_incl_tax(self): return self._get_total('total_incl_tax') class BillQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def debts_excl_tax(self): return self._get_total('total_excl_tax') def debts_incl_tax(self): return self._get_total('total_incl_tax') <commit_msg>Fix the dued queryset filter<commit_after>
from datetime import date from django.db import models from django.db.models import Sum class TotalQuerySetMixin(object): def _get_total(self, prop): return self.aggregate(sum=Sum(prop))["sum"] def total_paid(self): return self._get_total('payments__amount') class InvoiceQuerySetMixin(object): def dued(self): return self.filter(date_dued__lte=date.today()) class EstimateQuerySet(TotalQuerySetMixin, models.QuerySet): pass class InvoiceQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def turnover_excl_tax(self): return self._get_total('total_excl_tax') def turnover_incl_tax(self): return self._get_total('total_incl_tax') class BillQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def debts_excl_tax(self): return self._get_total('total_excl_tax') def debts_incl_tax(self): return self._get_total('total_incl_tax')
from datetime import date from django.db import models from django.db.models import Sum class TotalQuerySetMixin(object): def _get_total(self, prop): return self.aggregate(sum=Sum(prop))["sum"] def total_paid(self): return self._get_total('payments__amount') class InvoiceQuerySetMixin(object): def dued(self): return self.filter(date_issued__lte=date.today()) class EstimateQuerySet(TotalQuerySetMixin, models.QuerySet): pass class InvoiceQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def turnover_excl_tax(self): return self._get_total('total_excl_tax') def turnover_incl_tax(self): return self._get_total('total_incl_tax') class BillQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def debts_excl_tax(self): return self._get_total('total_excl_tax') def debts_incl_tax(self): return self._get_total('total_incl_tax') Fix the dued queryset filterfrom datetime import date from django.db import models from django.db.models import Sum class TotalQuerySetMixin(object): def _get_total(self, prop): return self.aggregate(sum=Sum(prop))["sum"] def total_paid(self): return self._get_total('payments__amount') class InvoiceQuerySetMixin(object): def dued(self): return self.filter(date_dued__lte=date.today()) class EstimateQuerySet(TotalQuerySetMixin, models.QuerySet): pass class InvoiceQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def turnover_excl_tax(self): return self._get_total('total_excl_tax') def turnover_incl_tax(self): return self._get_total('total_incl_tax') class BillQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def debts_excl_tax(self): return self._get_total('total_excl_tax') def debts_incl_tax(self): return self._get_total('total_incl_tax')
<commit_before>from datetime import date from django.db import models from django.db.models import Sum class TotalQuerySetMixin(object): def _get_total(self, prop): return self.aggregate(sum=Sum(prop))["sum"] def total_paid(self): return self._get_total('payments__amount') class InvoiceQuerySetMixin(object): def dued(self): return self.filter(date_issued__lte=date.today()) class EstimateQuerySet(TotalQuerySetMixin, models.QuerySet): pass class InvoiceQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def turnover_excl_tax(self): return self._get_total('total_excl_tax') def turnover_incl_tax(self): return self._get_total('total_incl_tax') class BillQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def debts_excl_tax(self): return self._get_total('total_excl_tax') def debts_incl_tax(self): return self._get_total('total_incl_tax') <commit_msg>Fix the dued queryset filter<commit_after>from datetime import date from django.db import models from django.db.models import Sum class TotalQuerySetMixin(object): def _get_total(self, prop): return self.aggregate(sum=Sum(prop))["sum"] def total_paid(self): return self._get_total('payments__amount') class InvoiceQuerySetMixin(object): def dued(self): return self.filter(date_dued__lte=date.today()) class EstimateQuerySet(TotalQuerySetMixin, models.QuerySet): pass class InvoiceQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def turnover_excl_tax(self): return self._get_total('total_excl_tax') def turnover_incl_tax(self): return self._get_total('total_incl_tax') class BillQuerySet(TotalQuerySetMixin, InvoiceQuerySetMixin, models.QuerySet): def debts_excl_tax(self): return self._get_total('total_excl_tax') def debts_incl_tax(self): return self._get_total('total_incl_tax')
e807b17fe1c981f7ff50926358cdfebd563758b2
jsk_arc2017_common/node_scripts/and_scale_rosserial.py
jsk_arc2017_common/node_scripts/and_scale_rosserial.py
#!/usr/bin/env python import serial import rospy from std_msgs.msg import Float32 class ANDScaleRosserial(object): """Read data from AND scale. Data Sheet: https://www.aandd.co.jp/adhome/pdf/manual/balance/ekew-i.pdf """ def __init__(self): super(ANDScaleRosserial, self).__init__() port = rospy.get_param('~port', '/dev/ttyUSB0') rospy.loginfo('port=%s', port) # EK-i/EW-i series default settings self.ser = serial.Serial( port, baudrate=2400, bytesize=7, parity=serial.PARITY_EVEN) self.pub = rospy.Publisher('~output', Float32, queue_size=1) rate = rospy.get_param('~rate', 10) self.read_timer = rospy.Timer(rospy.Duration(1. / rate), self._read_timer_cb) def _read_timer_cb(self, event): if self.pub.get_num_connections() == 0: return self.ser.write('Q\r\n') data = self.ser.read(17) header = data[:2] if header == 'ST': # scale mode weight = float(data[3:12]) unit = data[12:15] if unit != ' g': rospy.logerr('Unsupported unit: %s', unit) return msg = Float32(data=weight) self.pub.publish(msg) elif header == 'QT': # number mode rospy.loger('Unsupported mode: %s', header) return elif header == 'US': # unstable return elif header == 'OL': # scale over rospy.logerr('Scale over') return if __name__ == '__main__': rospy.init_node('and_scale_rosserial') ANDScaleRosserial() rospy.spin()
Read weight data from AND scale
Read weight data from AND scale - new file: and_scale_rosserial.py
Python
bsd-3-clause
pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc
Read weight data from AND scale - new file: and_scale_rosserial.py
#!/usr/bin/env python import serial import rospy from std_msgs.msg import Float32 class ANDScaleRosserial(object): """Read data from AND scale. Data Sheet: https://www.aandd.co.jp/adhome/pdf/manual/balance/ekew-i.pdf """ def __init__(self): super(ANDScaleRosserial, self).__init__() port = rospy.get_param('~port', '/dev/ttyUSB0') rospy.loginfo('port=%s', port) # EK-i/EW-i series default settings self.ser = serial.Serial( port, baudrate=2400, bytesize=7, parity=serial.PARITY_EVEN) self.pub = rospy.Publisher('~output', Float32, queue_size=1) rate = rospy.get_param('~rate', 10) self.read_timer = rospy.Timer(rospy.Duration(1. / rate), self._read_timer_cb) def _read_timer_cb(self, event): if self.pub.get_num_connections() == 0: return self.ser.write('Q\r\n') data = self.ser.read(17) header = data[:2] if header == 'ST': # scale mode weight = float(data[3:12]) unit = data[12:15] if unit != ' g': rospy.logerr('Unsupported unit: %s', unit) return msg = Float32(data=weight) self.pub.publish(msg) elif header == 'QT': # number mode rospy.loger('Unsupported mode: %s', header) return elif header == 'US': # unstable return elif header == 'OL': # scale over rospy.logerr('Scale over') return if __name__ == '__main__': rospy.init_node('and_scale_rosserial') ANDScaleRosserial() rospy.spin()
<commit_before><commit_msg>Read weight data from AND scale - new file: and_scale_rosserial.py<commit_after>
#!/usr/bin/env python import serial import rospy from std_msgs.msg import Float32 class ANDScaleRosserial(object): """Read data from AND scale. Data Sheet: https://www.aandd.co.jp/adhome/pdf/manual/balance/ekew-i.pdf """ def __init__(self): super(ANDScaleRosserial, self).__init__() port = rospy.get_param('~port', '/dev/ttyUSB0') rospy.loginfo('port=%s', port) # EK-i/EW-i series default settings self.ser = serial.Serial( port, baudrate=2400, bytesize=7, parity=serial.PARITY_EVEN) self.pub = rospy.Publisher('~output', Float32, queue_size=1) rate = rospy.get_param('~rate', 10) self.read_timer = rospy.Timer(rospy.Duration(1. / rate), self._read_timer_cb) def _read_timer_cb(self, event): if self.pub.get_num_connections() == 0: return self.ser.write('Q\r\n') data = self.ser.read(17) header = data[:2] if header == 'ST': # scale mode weight = float(data[3:12]) unit = data[12:15] if unit != ' g': rospy.logerr('Unsupported unit: %s', unit) return msg = Float32(data=weight) self.pub.publish(msg) elif header == 'QT': # number mode rospy.loger('Unsupported mode: %s', header) return elif header == 'US': # unstable return elif header == 'OL': # scale over rospy.logerr('Scale over') return if __name__ == '__main__': rospy.init_node('and_scale_rosserial') ANDScaleRosserial() rospy.spin()
Read weight data from AND scale - new file: and_scale_rosserial.py#!/usr/bin/env python import serial import rospy from std_msgs.msg import Float32 class ANDScaleRosserial(object): """Read data from AND scale. Data Sheet: https://www.aandd.co.jp/adhome/pdf/manual/balance/ekew-i.pdf """ def __init__(self): super(ANDScaleRosserial, self).__init__() port = rospy.get_param('~port', '/dev/ttyUSB0') rospy.loginfo('port=%s', port) # EK-i/EW-i series default settings self.ser = serial.Serial( port, baudrate=2400, bytesize=7, parity=serial.PARITY_EVEN) self.pub = rospy.Publisher('~output', Float32, queue_size=1) rate = rospy.get_param('~rate', 10) self.read_timer = rospy.Timer(rospy.Duration(1. / rate), self._read_timer_cb) def _read_timer_cb(self, event): if self.pub.get_num_connections() == 0: return self.ser.write('Q\r\n') data = self.ser.read(17) header = data[:2] if header == 'ST': # scale mode weight = float(data[3:12]) unit = data[12:15] if unit != ' g': rospy.logerr('Unsupported unit: %s', unit) return msg = Float32(data=weight) self.pub.publish(msg) elif header == 'QT': # number mode rospy.loger('Unsupported mode: %s', header) return elif header == 'US': # unstable return elif header == 'OL': # scale over rospy.logerr('Scale over') return if __name__ == '__main__': rospy.init_node('and_scale_rosserial') ANDScaleRosserial() rospy.spin()
<commit_before><commit_msg>Read weight data from AND scale - new file: and_scale_rosserial.py<commit_after>#!/usr/bin/env python import serial import rospy from std_msgs.msg import Float32 class ANDScaleRosserial(object): """Read data from AND scale. Data Sheet: https://www.aandd.co.jp/adhome/pdf/manual/balance/ekew-i.pdf """ def __init__(self): super(ANDScaleRosserial, self).__init__() port = rospy.get_param('~port', '/dev/ttyUSB0') rospy.loginfo('port=%s', port) # EK-i/EW-i series default settings self.ser = serial.Serial( port, baudrate=2400, bytesize=7, parity=serial.PARITY_EVEN) self.pub = rospy.Publisher('~output', Float32, queue_size=1) rate = rospy.get_param('~rate', 10) self.read_timer = rospy.Timer(rospy.Duration(1. / rate), self._read_timer_cb) def _read_timer_cb(self, event): if self.pub.get_num_connections() == 0: return self.ser.write('Q\r\n') data = self.ser.read(17) header = data[:2] if header == 'ST': # scale mode weight = float(data[3:12]) unit = data[12:15] if unit != ' g': rospy.logerr('Unsupported unit: %s', unit) return msg = Float32(data=weight) self.pub.publish(msg) elif header == 'QT': # number mode rospy.loger('Unsupported mode: %s', header) return elif header == 'US': # unstable return elif header == 'OL': # scale over rospy.logerr('Scale over') return if __name__ == '__main__': rospy.init_node('and_scale_rosserial') ANDScaleRosserial() rospy.spin()
648f22bae2ed11e5387417d22c47ee0c9ab5e053
tests/convergence_tests/run_convergence_tests_lspr.py
tests/convergence_tests/run_convergence_tests_lspr.py
import os import time import subprocess import datetime from check_for_meshes import check_mesh # tests to run tests = ['sphere_lspr.py', 'sphere_multiple_lspr.py'] # specify CUDA device to use CUDA_DEVICE = '0' ENV = os.environ.copy() ENV['CUDA_DEVICE'] = CUDA_DEVICE mesh_file = '' folder_name = 'lspr_convergence_test_meshes' rename_folder = 'geometry_lspr' size = '~3MB' check_mesh(mesh_file, folder_name, rename_folder, size) tic = time.time() for test in tests: subprocess.call(['python', '{}'.format(test)]) toc = time.time() print("Total runtime for convergence tests: ") print(str(datetime.timedelta(seconds=(toc - tic))))
Add script to run all lspr convergence tests
Add script to run all lspr convergence tests
Python
bsd-3-clause
barbagroup/pygbe,barbagroup/pygbe,barbagroup/pygbe
Add script to run all lspr convergence tests
import os import time import subprocess import datetime from check_for_meshes import check_mesh # tests to run tests = ['sphere_lspr.py', 'sphere_multiple_lspr.py'] # specify CUDA device to use CUDA_DEVICE = '0' ENV = os.environ.copy() ENV['CUDA_DEVICE'] = CUDA_DEVICE mesh_file = '' folder_name = 'lspr_convergence_test_meshes' rename_folder = 'geometry_lspr' size = '~3MB' check_mesh(mesh_file, folder_name, rename_folder, size) tic = time.time() for test in tests: subprocess.call(['python', '{}'.format(test)]) toc = time.time() print("Total runtime for convergence tests: ") print(str(datetime.timedelta(seconds=(toc - tic))))
<commit_before><commit_msg>Add script to run all lspr convergence tests<commit_after>
import os import time import subprocess import datetime from check_for_meshes import check_mesh # tests to run tests = ['sphere_lspr.py', 'sphere_multiple_lspr.py'] # specify CUDA device to use CUDA_DEVICE = '0' ENV = os.environ.copy() ENV['CUDA_DEVICE'] = CUDA_DEVICE mesh_file = '' folder_name = 'lspr_convergence_test_meshes' rename_folder = 'geometry_lspr' size = '~3MB' check_mesh(mesh_file, folder_name, rename_folder, size) tic = time.time() for test in tests: subprocess.call(['python', '{}'.format(test)]) toc = time.time() print("Total runtime for convergence tests: ") print(str(datetime.timedelta(seconds=(toc - tic))))
Add script to run all lspr convergence testsimport os import time import subprocess import datetime from check_for_meshes import check_mesh # tests to run tests = ['sphere_lspr.py', 'sphere_multiple_lspr.py'] # specify CUDA device to use CUDA_DEVICE = '0' ENV = os.environ.copy() ENV['CUDA_DEVICE'] = CUDA_DEVICE mesh_file = '' folder_name = 'lspr_convergence_test_meshes' rename_folder = 'geometry_lspr' size = '~3MB' check_mesh(mesh_file, folder_name, rename_folder, size) tic = time.time() for test in tests: subprocess.call(['python', '{}'.format(test)]) toc = time.time() print("Total runtime for convergence tests: ") print(str(datetime.timedelta(seconds=(toc - tic))))
<commit_before><commit_msg>Add script to run all lspr convergence tests<commit_after>import os import time import subprocess import datetime from check_for_meshes import check_mesh # tests to run tests = ['sphere_lspr.py', 'sphere_multiple_lspr.py'] # specify CUDA device to use CUDA_DEVICE = '0' ENV = os.environ.copy() ENV['CUDA_DEVICE'] = CUDA_DEVICE mesh_file = '' folder_name = 'lspr_convergence_test_meshes' rename_folder = 'geometry_lspr' size = '~3MB' check_mesh(mesh_file, folder_name, rename_folder, size) tic = time.time() for test in tests: subprocess.call(['python', '{}'.format(test)]) toc = time.time() print("Total runtime for convergence tests: ") print(str(datetime.timedelta(seconds=(toc - tic))))
589ae271909b3b5e8a5d153b143725f7d4a10491
tools/documentation_crawler/documentation_crawler/spiders/check_help_documentation.py
tools/documentation_crawler/documentation_crawler/spiders/check_help_documentation.py
#!/usr/bin/env python from __future__ import print_function from .common.spiders import BaseDocumentationSpider class HelpDocumentationSpider(BaseDocumentationSpider): name = "help_documentation_crawler" start_urls = ['http://localhost:9981/help'] deny_domains = [] # type: List[str] def _is_external_url(self, url): # type: (str) -> bool is_external = url.startswith('http') and 'localhost:9981/help' not in url return is_external or self._has_extension(url)
#!/usr/bin/env python from __future__ import print_function from .common.spiders import BaseDocumentationSpider class HelpDocumentationSpider(BaseDocumentationSpider): name = "help_documentation_crawler" start_urls = ['http://localhost:9981/help'] deny_domains = [] # type: List[str] deny = ['/privacy'] def _is_external_url(self, url): # type: (str) -> bool is_external = url.startswith('http') and 'localhost:9981/help' not in url return is_external or self._has_extension(url)
Exclude privacy page from documentation checking
documentation-crawler: Exclude privacy page from documentation checking
Python
apache-2.0
isht3/zulip,vaidap/zulip,sonali0901/zulip,rht/zulip,shubhamdhama/zulip,amyliu345/zulip,Galexrt/zulip,souravbadami/zulip,jainayush975/zulip,shubhamdhama/zulip,christi3k/zulip,jrowan/zulip,samatdav/zulip,PhilSk/zulip,cosmicAsymmetry/zulip,vabs22/zulip,KingxBanana/zulip,niftynei/zulip,Diptanshu8/zulip,isht3/zulip,dawran6/zulip,jrowan/zulip,mahim97/zulip,zulip/zulip,hackerkid/zulip,andersk/zulip,andersk/zulip,brockwhittaker/zulip,sharmaeklavya2/zulip,Galexrt/zulip,amanharitsh123/zulip,brainwane/zulip,SmartPeople/zulip,jphilipsen05/zulip,Galexrt/zulip,sharmaeklavya2/zulip,christi3k/zulip,SmartPeople/zulip,aakash-cr7/zulip,AZtheAsian/zulip,dawran6/zulip,eeshangarg/zulip,jphilipsen05/zulip,Diptanshu8/zulip,dattatreya303/zulip,aakash-cr7/zulip,hackerkid/zulip,isht3/zulip,isht3/zulip,arpith/zulip,dattatreya303/zulip,AZtheAsian/zulip,AZtheAsian/zulip,aakash-cr7/zulip,vabs22/zulip,vaidap/zulip,sharmaeklavya2/zulip,dawran6/zulip,brockwhittaker/zulip,brainwane/zulip,TigorC/zulip,aakash-cr7/zulip,AZtheAsian/zulip,hackerkid/zulip,rishig/zulip,verma-varsha/zulip,zacps/zulip,cosmicAsymmetry/zulip,verma-varsha/zulip,mahim97/zulip,dhcrzf/zulip,zacps/zulip,jainayush975/zulip,dhcrzf/zulip,jrowan/zulip,amanharitsh123/zulip,joyhchen/zulip,jphilipsen05/zulip,dhcrzf/zulip,JPJPJPOPOP/zulip,SmartPeople/zulip,tommyip/zulip,kou/zulip,jackrzhang/zulip,christi3k/zulip,zacps/zulip,synicalsyntax/zulip,dhcrzf/zulip,jainayush975/zulip,SmartPeople/zulip,dattatreya303/zulip,rishig/zulip,timabbott/zulip,timabbott/zulip,jrowan/zulip,cosmicAsymmetry/zulip,brainwane/zulip,shubhamdhama/zulip,jackrzhang/zulip,arpith/zulip,dattatreya303/zulip,AZtheAsian/zulip,Diptanshu8/zulip,timabbott/zulip,niftynei/zulip,zacps/zulip,tommyip/zulip,arpith/zulip,KingxBanana/zulip,andersk/zulip,eeshangarg/zulip,showell/zulip,rishig/zulip,Diptanshu8/zulip,showell/zulip,ryanbackman/zulip,amyliu345/zulip,tommyip/zulip,brainwane/zulip,jphilipsen05/zulip,zacps/zulip,Galexrt/zulip,shubhamdhama/zulip,niftynei/zulip,synicalsyntax/zulip,samatdav/zulip,dattatreya303/zulip,mahim97/zulip,TigorC/zulip,kou/zulip,niftynei/zulip,showell/zulip,jrowan/zulip,blaze225/zulip,synicalsyntax/zulip,niftynei/zulip,cosmicAsymmetry/zulip,rht/zulip,synicalsyntax/zulip,brainwane/zulip,shubhamdhama/zulip,Galexrt/zulip,Galexrt/zulip,rishig/zulip,jphilipsen05/zulip,susansls/zulip,KingxBanana/zulip,Juanvulcano/zulip,kou/zulip,dhcrzf/zulip,j831/zulip,punchagan/zulip,blaze225/zulip,eeshangarg/zulip,ryanbackman/zulip,punchagan/zulip,rht/zulip,brainwane/zulip,rishig/zulip,isht3/zulip,hackerkid/zulip,JPJPJPOPOP/zulip,jackrzhang/zulip,zulip/zulip,dhcrzf/zulip,sharmaeklavya2/zulip,sonali0901/zulip,brockwhittaker/zulip,christi3k/zulip,blaze225/zulip,punchagan/zulip,christi3k/zulip,hackerkid/zulip,punchagan/zulip,amyliu345/zulip,samatdav/zulip,jainayush975/zulip,JPJPJPOPOP/zulip,jphilipsen05/zulip,zulip/zulip,ryanbackman/zulip,tommyip/zulip,dhcrzf/zulip,synicalsyntax/zulip,joyhchen/zulip,eeshangarg/zulip,hackerkid/zulip,mahim97/zulip,amyliu345/zulip,blaze225/zulip,vaidap/zulip,souravbadami/zulip,andersk/zulip,joyhchen/zulip,shubhamdhama/zulip,susansls/zulip,TigorC/zulip,kou/zulip,j831/zulip,niftynei/zulip,SmartPeople/zulip,AZtheAsian/zulip,KingxBanana/zulip,dawran6/zulip,KingxBanana/zulip,arpith/zulip,showell/zulip,Juanvulcano/zulip,JPJPJPOPOP/zulip,mahim97/zulip,kou/zulip,christi3k/zulip,brockwhittaker/zulip,vaidap/zulip,JPJPJPOPOP/zulip,timabbott/zulip,susansls/zulip,jackrzhang/zulip,tommyip/zulip,kou/zulip,cosmicAsymmetry/zulip,sonali0901/zulip,synicalsyntax/zulip,amanharitsh123/zulip,zulip/zulip,souravbadami/zulip,vabs22/zulip,eeshangarg/zulip,amanharitsh123/zulip,brainwane/zulip,arpith/zulip,showell/zulip,rishig/zulip,verma-varsha/zulip,jackrzhang/zulip,PhilSk/zulip,eeshangarg/zulip,timabbott/zulip,samatdav/zulip,punchagan/zulip,verma-varsha/zulip,vabs22/zulip,tommyip/zulip,blaze225/zulip,KingxBanana/zulip,amyliu345/zulip,aakash-cr7/zulip,samatdav/zulip,souravbadami/zulip,joyhchen/zulip,eeshangarg/zulip,Diptanshu8/zulip,brockwhittaker/zulip,vabs22/zulip,showell/zulip,TigorC/zulip,ryanbackman/zulip,rishig/zulip,j831/zulip,rht/zulip,sonali0901/zulip,susansls/zulip,andersk/zulip,Juanvulcano/zulip,verma-varsha/zulip,jackrzhang/zulip,zulip/zulip,rht/zulip,tommyip/zulip,vabs22/zulip,susansls/zulip,jrowan/zulip,dawran6/zulip,jackrzhang/zulip,PhilSk/zulip,jainayush975/zulip,shubhamdhama/zulip,punchagan/zulip,zulip/zulip,susansls/zulip,punchagan/zulip,SmartPeople/zulip,ryanbackman/zulip,arpith/zulip,souravbadami/zulip,joyhchen/zulip,jainayush975/zulip,isht3/zulip,j831/zulip,rht/zulip,PhilSk/zulip,j831/zulip,cosmicAsymmetry/zulip,timabbott/zulip,PhilSk/zulip,vaidap/zulip,andersk/zulip,zulip/zulip,Galexrt/zulip,Juanvulcano/zulip,dattatreya303/zulip,amanharitsh123/zulip,aakash-cr7/zulip,amanharitsh123/zulip,TigorC/zulip,joyhchen/zulip,sharmaeklavya2/zulip,showell/zulip,JPJPJPOPOP/zulip,kou/zulip,sonali0901/zulip,sharmaeklavya2/zulip,amyliu345/zulip,PhilSk/zulip,Diptanshu8/zulip,Juanvulcano/zulip,timabbott/zulip,synicalsyntax/zulip,blaze225/zulip,vaidap/zulip,hackerkid/zulip,dawran6/zulip,TigorC/zulip,zacps/zulip,samatdav/zulip,souravbadami/zulip,Juanvulcano/zulip,andersk/zulip,brockwhittaker/zulip,ryanbackman/zulip,mahim97/zulip,rht/zulip,sonali0901/zulip,j831/zulip,verma-varsha/zulip
#!/usr/bin/env python from __future__ import print_function from .common.spiders import BaseDocumentationSpider class HelpDocumentationSpider(BaseDocumentationSpider): name = "help_documentation_crawler" start_urls = ['http://localhost:9981/help'] deny_domains = [] # type: List[str] def _is_external_url(self, url): # type: (str) -> bool is_external = url.startswith('http') and 'localhost:9981/help' not in url return is_external or self._has_extension(url) documentation-crawler: Exclude privacy page from documentation checking
#!/usr/bin/env python from __future__ import print_function from .common.spiders import BaseDocumentationSpider class HelpDocumentationSpider(BaseDocumentationSpider): name = "help_documentation_crawler" start_urls = ['http://localhost:9981/help'] deny_domains = [] # type: List[str] deny = ['/privacy'] def _is_external_url(self, url): # type: (str) -> bool is_external = url.startswith('http') and 'localhost:9981/help' not in url return is_external or self._has_extension(url)
<commit_before>#!/usr/bin/env python from __future__ import print_function from .common.spiders import BaseDocumentationSpider class HelpDocumentationSpider(BaseDocumentationSpider): name = "help_documentation_crawler" start_urls = ['http://localhost:9981/help'] deny_domains = [] # type: List[str] def _is_external_url(self, url): # type: (str) -> bool is_external = url.startswith('http') and 'localhost:9981/help' not in url return is_external or self._has_extension(url) <commit_msg>documentation-crawler: Exclude privacy page from documentation checking<commit_after>
#!/usr/bin/env python from __future__ import print_function from .common.spiders import BaseDocumentationSpider class HelpDocumentationSpider(BaseDocumentationSpider): name = "help_documentation_crawler" start_urls = ['http://localhost:9981/help'] deny_domains = [] # type: List[str] deny = ['/privacy'] def _is_external_url(self, url): # type: (str) -> bool is_external = url.startswith('http') and 'localhost:9981/help' not in url return is_external or self._has_extension(url)
#!/usr/bin/env python from __future__ import print_function from .common.spiders import BaseDocumentationSpider class HelpDocumentationSpider(BaseDocumentationSpider): name = "help_documentation_crawler" start_urls = ['http://localhost:9981/help'] deny_domains = [] # type: List[str] def _is_external_url(self, url): # type: (str) -> bool is_external = url.startswith('http') and 'localhost:9981/help' not in url return is_external or self._has_extension(url) documentation-crawler: Exclude privacy page from documentation checking#!/usr/bin/env python from __future__ import print_function from .common.spiders import BaseDocumentationSpider class HelpDocumentationSpider(BaseDocumentationSpider): name = "help_documentation_crawler" start_urls = ['http://localhost:9981/help'] deny_domains = [] # type: List[str] deny = ['/privacy'] def _is_external_url(self, url): # type: (str) -> bool is_external = url.startswith('http') and 'localhost:9981/help' not in url return is_external or self._has_extension(url)
<commit_before>#!/usr/bin/env python from __future__ import print_function from .common.spiders import BaseDocumentationSpider class HelpDocumentationSpider(BaseDocumentationSpider): name = "help_documentation_crawler" start_urls = ['http://localhost:9981/help'] deny_domains = [] # type: List[str] def _is_external_url(self, url): # type: (str) -> bool is_external = url.startswith('http') and 'localhost:9981/help' not in url return is_external or self._has_extension(url) <commit_msg>documentation-crawler: Exclude privacy page from documentation checking<commit_after>#!/usr/bin/env python from __future__ import print_function from .common.spiders import BaseDocumentationSpider class HelpDocumentationSpider(BaseDocumentationSpider): name = "help_documentation_crawler" start_urls = ['http://localhost:9981/help'] deny_domains = [] # type: List[str] deny = ['/privacy'] def _is_external_url(self, url): # type: (str) -> bool is_external = url.startswith('http') and 'localhost:9981/help' not in url return is_external or self._has_extension(url)
6afbf916dd5a721ae8779fb3f01a8c153b6bbc96
git-ignore.py
git-ignore.py
#! /usr/bin/env python2 # -*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright © 2014 Ciel <imwithye@gmail.com> # # Distributed under terms of the MIT license. import sys # print version def version(): print "git ignore, version 0.1." print print "http://github.com/imwithye/git-ignore" print "git ignore, copyright Ciel <imwithye@gmail.com>" # print usage def usage(): print "usage: git ignore <subcommand>" print print "Available subcommands are:" print " language Add gitignore files. Try use 'git ignore language Python C'" print " save Save current .gitignore file as a template" print " usage Show this help message and exit" print " version Show version and exit" print print "http://github.com/imwithye/git-ignore" print "git ignore, copyright Ciel <imwithye@gmail.com>" # subcommand router def select( argv ): if argv[1] == "language": print "language" elif argv[1] == "save": print "save" elif argv[1] == "help" or argv[1] == "usage": usage() exit() elif argv[1] == "version": version() exit() else: print "unknown subcommand" usage() exit() if __name__ == "__main__": if len(sys.argv)==1: sys.argv.append("usage") select(sys.argv)
Add usage, version, and command router.
Add usage, version, and command router.
Python
mit
imwithye/git-ignore,imwithye/git-ignore
Add usage, version, and command router.
#! /usr/bin/env python2 # -*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright © 2014 Ciel <imwithye@gmail.com> # # Distributed under terms of the MIT license. import sys # print version def version(): print "git ignore, version 0.1." print print "http://github.com/imwithye/git-ignore" print "git ignore, copyright Ciel <imwithye@gmail.com>" # print usage def usage(): print "usage: git ignore <subcommand>" print print "Available subcommands are:" print " language Add gitignore files. Try use 'git ignore language Python C'" print " save Save current .gitignore file as a template" print " usage Show this help message and exit" print " version Show version and exit" print print "http://github.com/imwithye/git-ignore" print "git ignore, copyright Ciel <imwithye@gmail.com>" # subcommand router def select( argv ): if argv[1] == "language": print "language" elif argv[1] == "save": print "save" elif argv[1] == "help" or argv[1] == "usage": usage() exit() elif argv[1] == "version": version() exit() else: print "unknown subcommand" usage() exit() if __name__ == "__main__": if len(sys.argv)==1: sys.argv.append("usage") select(sys.argv)
<commit_before><commit_msg>Add usage, version, and command router.<commit_after>
#! /usr/bin/env python2 # -*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright © 2014 Ciel <imwithye@gmail.com> # # Distributed under terms of the MIT license. import sys # print version def version(): print "git ignore, version 0.1." print print "http://github.com/imwithye/git-ignore" print "git ignore, copyright Ciel <imwithye@gmail.com>" # print usage def usage(): print "usage: git ignore <subcommand>" print print "Available subcommands are:" print " language Add gitignore files. Try use 'git ignore language Python C'" print " save Save current .gitignore file as a template" print " usage Show this help message and exit" print " version Show version and exit" print print "http://github.com/imwithye/git-ignore" print "git ignore, copyright Ciel <imwithye@gmail.com>" # subcommand router def select( argv ): if argv[1] == "language": print "language" elif argv[1] == "save": print "save" elif argv[1] == "help" or argv[1] == "usage": usage() exit() elif argv[1] == "version": version() exit() else: print "unknown subcommand" usage() exit() if __name__ == "__main__": if len(sys.argv)==1: sys.argv.append("usage") select(sys.argv)
Add usage, version, and command router.#! /usr/bin/env python2 # -*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright © 2014 Ciel <imwithye@gmail.com> # # Distributed under terms of the MIT license. import sys # print version def version(): print "git ignore, version 0.1." print print "http://github.com/imwithye/git-ignore" print "git ignore, copyright Ciel <imwithye@gmail.com>" # print usage def usage(): print "usage: git ignore <subcommand>" print print "Available subcommands are:" print " language Add gitignore files. Try use 'git ignore language Python C'" print " save Save current .gitignore file as a template" print " usage Show this help message and exit" print " version Show version and exit" print print "http://github.com/imwithye/git-ignore" print "git ignore, copyright Ciel <imwithye@gmail.com>" # subcommand router def select( argv ): if argv[1] == "language": print "language" elif argv[1] == "save": print "save" elif argv[1] == "help" or argv[1] == "usage": usage() exit() elif argv[1] == "version": version() exit() else: print "unknown subcommand" usage() exit() if __name__ == "__main__": if len(sys.argv)==1: sys.argv.append("usage") select(sys.argv)
<commit_before><commit_msg>Add usage, version, and command router.<commit_after>#! /usr/bin/env python2 # -*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright © 2014 Ciel <imwithye@gmail.com> # # Distributed under terms of the MIT license. import sys # print version def version(): print "git ignore, version 0.1." print print "http://github.com/imwithye/git-ignore" print "git ignore, copyright Ciel <imwithye@gmail.com>" # print usage def usage(): print "usage: git ignore <subcommand>" print print "Available subcommands are:" print " language Add gitignore files. Try use 'git ignore language Python C'" print " save Save current .gitignore file as a template" print " usage Show this help message and exit" print " version Show version and exit" print print "http://github.com/imwithye/git-ignore" print "git ignore, copyright Ciel <imwithye@gmail.com>" # subcommand router def select( argv ): if argv[1] == "language": print "language" elif argv[1] == "save": print "save" elif argv[1] == "help" or argv[1] == "usage": usage() exit() elif argv[1] == "version": version() exit() else: print "unknown subcommand" usage() exit() if __name__ == "__main__": if len(sys.argv)==1: sys.argv.append("usage") select(sys.argv)
1946b2730b9a934c3b3fb2204581fe82f5b4af04
events/management/commands/assign_abstract_to_user.py
events/management/commands/assign_abstract_to_user.py
from __future__ import unicode_literals from django.contrib.auth.models import User from django.core.management.base import BaseCommand from django.db.models import Count from django.utils import timezone from events.models import Event, Abstract import math class Command(BaseCommand): help = "Assign abstract to evaluators" def add_arguments(self, parser): parser.add_argument('--event-code-name', dest='event_code_name', default=None, type=str) parser.add_argument('--username', dest='username', default=None, type=str) parser.add_argument('--number_of_abstracts_to_assign', dest='number_of_abstracts_to_assign',default=None, type=int) def handle(self, *args, **options): if not options['event_code_name']: print "No event code name was provided." return if not options['username']: print "No username was provided." return if not options['number_of_abstracts_to_assign']: print "No number of abbstract to assign was provided." event = Event.objects.get(code_name=options['event_code_name']) new_evaluator = User.objects.filter(username=options['username']).first() number_of_abstracts_to_assign = int(options['number_of_abstracts_to_assign']) if not event.abstract_revision_team: print "Event has no abstract revision team." return if not new_evaluator: print "User doesn't exist." return print "Event name:", event.english_name print "Assigned user:", new_evaluator print "Number of abstracts assigned:", number_of_abstracts_to_assign pending_abstracts = Abstract.objects.annotate(num_b=Count('evaluation')).filter(event=event, is_deleted=False, num_b__lt=event.evaluators_per_abstract) print "There are {} pending abstracts:".format(pending_abstracts.count()) targeted_abstracts = pending_abstracts.exclude(evaluation__evaluator=new_evaluator)\ .exclude(evaluators=new_evaluator) if number_of_abstracts_to_assign > targeted_abstracts.count(): print "WARNING: There is not enough abstracts to assign!" for abstract in targeted_abstracts[:number_of_abstracts_to_assign]: pending_evaluator = abstract.evaluators.exclude(event_abstract_evaluations__abstract=abstract).first() if not pending_evaluator: continue abstract.evaluators.remove(pending_evaluator) abstract.evaluators.add(new_evaluator) print "Removed {} and aded {} from {}".format(pending_evaluator.username, new_evaluator.username, abstract.title)
Add command to assign more abstracts to a specific user
Add command to assign more abstracts to a specific user
Python
agpl-3.0
enjaz/enjaz,enjaz/enjaz,enjaz/enjaz,osamak/student-portal,osamak/student-portal,enjaz/enjaz,osamak/student-portal,enjaz/enjaz,osamak/student-portal,osamak/student-portal
Add command to assign more abstracts to a specific user
from __future__ import unicode_literals from django.contrib.auth.models import User from django.core.management.base import BaseCommand from django.db.models import Count from django.utils import timezone from events.models import Event, Abstract import math class Command(BaseCommand): help = "Assign abstract to evaluators" def add_arguments(self, parser): parser.add_argument('--event-code-name', dest='event_code_name', default=None, type=str) parser.add_argument('--username', dest='username', default=None, type=str) parser.add_argument('--number_of_abstracts_to_assign', dest='number_of_abstracts_to_assign',default=None, type=int) def handle(self, *args, **options): if not options['event_code_name']: print "No event code name was provided." return if not options['username']: print "No username was provided." return if not options['number_of_abstracts_to_assign']: print "No number of abbstract to assign was provided." event = Event.objects.get(code_name=options['event_code_name']) new_evaluator = User.objects.filter(username=options['username']).first() number_of_abstracts_to_assign = int(options['number_of_abstracts_to_assign']) if not event.abstract_revision_team: print "Event has no abstract revision team." return if not new_evaluator: print "User doesn't exist." return print "Event name:", event.english_name print "Assigned user:", new_evaluator print "Number of abstracts assigned:", number_of_abstracts_to_assign pending_abstracts = Abstract.objects.annotate(num_b=Count('evaluation')).filter(event=event, is_deleted=False, num_b__lt=event.evaluators_per_abstract) print "There are {} pending abstracts:".format(pending_abstracts.count()) targeted_abstracts = pending_abstracts.exclude(evaluation__evaluator=new_evaluator)\ .exclude(evaluators=new_evaluator) if number_of_abstracts_to_assign > targeted_abstracts.count(): print "WARNING: There is not enough abstracts to assign!" for abstract in targeted_abstracts[:number_of_abstracts_to_assign]: pending_evaluator = abstract.evaluators.exclude(event_abstract_evaluations__abstract=abstract).first() if not pending_evaluator: continue abstract.evaluators.remove(pending_evaluator) abstract.evaluators.add(new_evaluator) print "Removed {} and aded {} from {}".format(pending_evaluator.username, new_evaluator.username, abstract.title)
<commit_before><commit_msg>Add command to assign more abstracts to a specific user<commit_after>
from __future__ import unicode_literals from django.contrib.auth.models import User from django.core.management.base import BaseCommand from django.db.models import Count from django.utils import timezone from events.models import Event, Abstract import math class Command(BaseCommand): help = "Assign abstract to evaluators" def add_arguments(self, parser): parser.add_argument('--event-code-name', dest='event_code_name', default=None, type=str) parser.add_argument('--username', dest='username', default=None, type=str) parser.add_argument('--number_of_abstracts_to_assign', dest='number_of_abstracts_to_assign',default=None, type=int) def handle(self, *args, **options): if not options['event_code_name']: print "No event code name was provided." return if not options['username']: print "No username was provided." return if not options['number_of_abstracts_to_assign']: print "No number of abbstract to assign was provided." event = Event.objects.get(code_name=options['event_code_name']) new_evaluator = User.objects.filter(username=options['username']).first() number_of_abstracts_to_assign = int(options['number_of_abstracts_to_assign']) if not event.abstract_revision_team: print "Event has no abstract revision team." return if not new_evaluator: print "User doesn't exist." return print "Event name:", event.english_name print "Assigned user:", new_evaluator print "Number of abstracts assigned:", number_of_abstracts_to_assign pending_abstracts = Abstract.objects.annotate(num_b=Count('evaluation')).filter(event=event, is_deleted=False, num_b__lt=event.evaluators_per_abstract) print "There are {} pending abstracts:".format(pending_abstracts.count()) targeted_abstracts = pending_abstracts.exclude(evaluation__evaluator=new_evaluator)\ .exclude(evaluators=new_evaluator) if number_of_abstracts_to_assign > targeted_abstracts.count(): print "WARNING: There is not enough abstracts to assign!" for abstract in targeted_abstracts[:number_of_abstracts_to_assign]: pending_evaluator = abstract.evaluators.exclude(event_abstract_evaluations__abstract=abstract).first() if not pending_evaluator: continue abstract.evaluators.remove(pending_evaluator) abstract.evaluators.add(new_evaluator) print "Removed {} and aded {} from {}".format(pending_evaluator.username, new_evaluator.username, abstract.title)
Add command to assign more abstracts to a specific userfrom __future__ import unicode_literals from django.contrib.auth.models import User from django.core.management.base import BaseCommand from django.db.models import Count from django.utils import timezone from events.models import Event, Abstract import math class Command(BaseCommand): help = "Assign abstract to evaluators" def add_arguments(self, parser): parser.add_argument('--event-code-name', dest='event_code_name', default=None, type=str) parser.add_argument('--username', dest='username', default=None, type=str) parser.add_argument('--number_of_abstracts_to_assign', dest='number_of_abstracts_to_assign',default=None, type=int) def handle(self, *args, **options): if not options['event_code_name']: print "No event code name was provided." return if not options['username']: print "No username was provided." return if not options['number_of_abstracts_to_assign']: print "No number of abbstract to assign was provided." event = Event.objects.get(code_name=options['event_code_name']) new_evaluator = User.objects.filter(username=options['username']).first() number_of_abstracts_to_assign = int(options['number_of_abstracts_to_assign']) if not event.abstract_revision_team: print "Event has no abstract revision team." return if not new_evaluator: print "User doesn't exist." return print "Event name:", event.english_name print "Assigned user:", new_evaluator print "Number of abstracts assigned:", number_of_abstracts_to_assign pending_abstracts = Abstract.objects.annotate(num_b=Count('evaluation')).filter(event=event, is_deleted=False, num_b__lt=event.evaluators_per_abstract) print "There are {} pending abstracts:".format(pending_abstracts.count()) targeted_abstracts = pending_abstracts.exclude(evaluation__evaluator=new_evaluator)\ .exclude(evaluators=new_evaluator) if number_of_abstracts_to_assign > targeted_abstracts.count(): print "WARNING: There is not enough abstracts to assign!" for abstract in targeted_abstracts[:number_of_abstracts_to_assign]: pending_evaluator = abstract.evaluators.exclude(event_abstract_evaluations__abstract=abstract).first() if not pending_evaluator: continue abstract.evaluators.remove(pending_evaluator) abstract.evaluators.add(new_evaluator) print "Removed {} and aded {} from {}".format(pending_evaluator.username, new_evaluator.username, abstract.title)
<commit_before><commit_msg>Add command to assign more abstracts to a specific user<commit_after>from __future__ import unicode_literals from django.contrib.auth.models import User from django.core.management.base import BaseCommand from django.db.models import Count from django.utils import timezone from events.models import Event, Abstract import math class Command(BaseCommand): help = "Assign abstract to evaluators" def add_arguments(self, parser): parser.add_argument('--event-code-name', dest='event_code_name', default=None, type=str) parser.add_argument('--username', dest='username', default=None, type=str) parser.add_argument('--number_of_abstracts_to_assign', dest='number_of_abstracts_to_assign',default=None, type=int) def handle(self, *args, **options): if not options['event_code_name']: print "No event code name was provided." return if not options['username']: print "No username was provided." return if not options['number_of_abstracts_to_assign']: print "No number of abbstract to assign was provided." event = Event.objects.get(code_name=options['event_code_name']) new_evaluator = User.objects.filter(username=options['username']).first() number_of_abstracts_to_assign = int(options['number_of_abstracts_to_assign']) if not event.abstract_revision_team: print "Event has no abstract revision team." return if not new_evaluator: print "User doesn't exist." return print "Event name:", event.english_name print "Assigned user:", new_evaluator print "Number of abstracts assigned:", number_of_abstracts_to_assign pending_abstracts = Abstract.objects.annotate(num_b=Count('evaluation')).filter(event=event, is_deleted=False, num_b__lt=event.evaluators_per_abstract) print "There are {} pending abstracts:".format(pending_abstracts.count()) targeted_abstracts = pending_abstracts.exclude(evaluation__evaluator=new_evaluator)\ .exclude(evaluators=new_evaluator) if number_of_abstracts_to_assign > targeted_abstracts.count(): print "WARNING: There is not enough abstracts to assign!" for abstract in targeted_abstracts[:number_of_abstracts_to_assign]: pending_evaluator = abstract.evaluators.exclude(event_abstract_evaluations__abstract=abstract).first() if not pending_evaluator: continue abstract.evaluators.remove(pending_evaluator) abstract.evaluators.add(new_evaluator) print "Removed {} and aded {} from {}".format(pending_evaluator.username, new_evaluator.username, abstract.title)
b99c4005056487ffb37c2bd2cfdd0d9082c746c1
data_structures/Tree/Binary-tree/python/BinaryTree.py
data_structures/Tree/Binary-tree/python/BinaryTree.py
class Node: def __init__(self, key): self.right = None self.left = None self.key = key def addLeftChild(self, node): self.left = node def addRightChild(self, node): self.right = node class BinaryTree: def getRootOfTree(self): root = Node(1) node2 = Node(2) node3 = Node(3) node4 = Node(4) node5 = Node(5) node6 = Node(6) node7 = Node(7) root.addLeftChild(node2) root.addRightChild(node3) node2.addLeftChild(node4) node2.addRightChild(node5) node3.addLeftChild(node6) node3.addRightChild(node7) return root def inorder(node, array): if node is None: return array array = inorder(node.left, array) array.append(node.key) return inorder(node.right, array) if __name__ == '__main__': # Create a Tree tree = BinaryTree() root = tree.getRootOfTree() # Print Inorder Traversal of Tree print 'Inorder traversal for binary tree:', inorder(root, [])
Add binary tree in python
Add binary tree in python
Python
cc0-1.0
ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms
Add binary tree in python
class Node: def __init__(self, key): self.right = None self.left = None self.key = key def addLeftChild(self, node): self.left = node def addRightChild(self, node): self.right = node class BinaryTree: def getRootOfTree(self): root = Node(1) node2 = Node(2) node3 = Node(3) node4 = Node(4) node5 = Node(5) node6 = Node(6) node7 = Node(7) root.addLeftChild(node2) root.addRightChild(node3) node2.addLeftChild(node4) node2.addRightChild(node5) node3.addLeftChild(node6) node3.addRightChild(node7) return root def inorder(node, array): if node is None: return array array = inorder(node.left, array) array.append(node.key) return inorder(node.right, array) if __name__ == '__main__': # Create a Tree tree = BinaryTree() root = tree.getRootOfTree() # Print Inorder Traversal of Tree print 'Inorder traversal for binary tree:', inorder(root, [])
<commit_before><commit_msg>Add binary tree in python<commit_after>
class Node: def __init__(self, key): self.right = None self.left = None self.key = key def addLeftChild(self, node): self.left = node def addRightChild(self, node): self.right = node class BinaryTree: def getRootOfTree(self): root = Node(1) node2 = Node(2) node3 = Node(3) node4 = Node(4) node5 = Node(5) node6 = Node(6) node7 = Node(7) root.addLeftChild(node2) root.addRightChild(node3) node2.addLeftChild(node4) node2.addRightChild(node5) node3.addLeftChild(node6) node3.addRightChild(node7) return root def inorder(node, array): if node is None: return array array = inorder(node.left, array) array.append(node.key) return inorder(node.right, array) if __name__ == '__main__': # Create a Tree tree = BinaryTree() root = tree.getRootOfTree() # Print Inorder Traversal of Tree print 'Inorder traversal for binary tree:', inorder(root, [])
Add binary tree in pythonclass Node: def __init__(self, key): self.right = None self.left = None self.key = key def addLeftChild(self, node): self.left = node def addRightChild(self, node): self.right = node class BinaryTree: def getRootOfTree(self): root = Node(1) node2 = Node(2) node3 = Node(3) node4 = Node(4) node5 = Node(5) node6 = Node(6) node7 = Node(7) root.addLeftChild(node2) root.addRightChild(node3) node2.addLeftChild(node4) node2.addRightChild(node5) node3.addLeftChild(node6) node3.addRightChild(node7) return root def inorder(node, array): if node is None: return array array = inorder(node.left, array) array.append(node.key) return inorder(node.right, array) if __name__ == '__main__': # Create a Tree tree = BinaryTree() root = tree.getRootOfTree() # Print Inorder Traversal of Tree print 'Inorder traversal for binary tree:', inorder(root, [])
<commit_before><commit_msg>Add binary tree in python<commit_after>class Node: def __init__(self, key): self.right = None self.left = None self.key = key def addLeftChild(self, node): self.left = node def addRightChild(self, node): self.right = node class BinaryTree: def getRootOfTree(self): root = Node(1) node2 = Node(2) node3 = Node(3) node4 = Node(4) node5 = Node(5) node6 = Node(6) node7 = Node(7) root.addLeftChild(node2) root.addRightChild(node3) node2.addLeftChild(node4) node2.addRightChild(node5) node3.addLeftChild(node6) node3.addRightChild(node7) return root def inorder(node, array): if node is None: return array array = inorder(node.left, array) array.append(node.key) return inorder(node.right, array) if __name__ == '__main__': # Create a Tree tree = BinaryTree() root = tree.getRootOfTree() # Print Inorder Traversal of Tree print 'Inorder traversal for binary tree:', inorder(root, [])
55fb9752311f79c5aa2ecd85c4264ae6821f52da
django_summernote/migrations/0002_update-help_text.py
django_summernote/migrations/0002_update-help_text.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.4 on 2017-09-11 07:47 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('django_summernote', '0001_initial'), ] operations = [ migrations.AlterField( model_name='attachment', name='name', field=models.CharField(blank=True, help_text=b'Defaults to filename, if left blank', max_length=255, null=True), ), ]
Add a migration for updating help_text
Add a migration for updating help_text
Python
mit
lqez/django-summernote,summernote/django-summernote,lqez/django-summernote,summernote/django-summernote,summernote/django-summernote,lqez/django-summernote
Add a migration for updating help_text
# -*- coding: utf-8 -*- # Generated by Django 1.11.4 on 2017-09-11 07:47 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('django_summernote', '0001_initial'), ] operations = [ migrations.AlterField( model_name='attachment', name='name', field=models.CharField(blank=True, help_text=b'Defaults to filename, if left blank', max_length=255, null=True), ), ]
<commit_before><commit_msg>Add a migration for updating help_text<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.11.4 on 2017-09-11 07:47 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('django_summernote', '0001_initial'), ] operations = [ migrations.AlterField( model_name='attachment', name='name', field=models.CharField(blank=True, help_text=b'Defaults to filename, if left blank', max_length=255, null=True), ), ]
Add a migration for updating help_text# -*- coding: utf-8 -*- # Generated by Django 1.11.4 on 2017-09-11 07:47 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('django_summernote', '0001_initial'), ] operations = [ migrations.AlterField( model_name='attachment', name='name', field=models.CharField(blank=True, help_text=b'Defaults to filename, if left blank', max_length=255, null=True), ), ]
<commit_before><commit_msg>Add a migration for updating help_text<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.11.4 on 2017-09-11 07:47 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('django_summernote', '0001_initial'), ] operations = [ migrations.AlterField( model_name='attachment', name='name', field=models.CharField(blank=True, help_text=b'Defaults to filename, if left blank', max_length=255, null=True), ), ]
50bd003fce6ccc6545f610c6600852dcf6ebd05f
ansible/modules/hashivault/hashivault_leader.py
ansible/modules/hashivault/hashivault_leader.py
#!/usr/bin/env python from ansible.module_utils.hashivault import hashivault_argspec from ansible.module_utils.hashivault import hashivault_client from ansible.module_utils.hashivault import hashivault_init from ansible.module_utils.hashivault import hashiwrapper ANSIBLE_METADATA = {'status': ['stableinterface'], 'supported_by': 'community', 'version': '1.1'} DOCUMENTATION = ''' --- module: hashivault_leader version_added: "3.16.4" short_description: Hashicorp Vault leader module description: - Module to get leader information of Hashicorp Vault. options: url: description: - url for vault default: to environment variable VAULT_ADDR ca_cert: description: - "path to a PEM-encoded CA cert file to use to verify the Vault server TLS certificate" default: to environment variable VAULT_CACERT ca_path: description: - "path to a directory of PEM-encoded CA cert files to verify the Vault server TLS certificate : if ca_cert is specified, its value will take precedence" default: to environment variable VAULT_CAPATH client_cert: description: - "path to a PEM-encoded client certificate for TLS authentication to the Vault server" default: to environment variable VAULT_CLIENT_CERT client_key: description: - "path to an unencrypted PEM-encoded private key matching the client certificate" default: to environment variable VAULT_CLIENT_KEY verify: description: - "if set, do not verify presented TLS certificate before communicating with Vault server : setting this variable is not recommended except during testing" default: to environment variable VAULT_SKIP_VERIFY authtype: description: - "authentication type to use: token, userpass, github, ldap, approle" default: token token: description: - token for vault default: to environment variable VAULT_TOKEN username: description: - username to login to vault. default: to environment variable VAULT_USER password: description: - password to login to vault. default: to environment variable VAULT_PASSWORD ''' EXAMPLES = ''' --- - hosts: localhost tasks: - hashivault_leader: register: 'vault_leader' - debug: msg="Leader is {{vault_leader.status.leader_address}}" ''' def main(): argspec = hashivault_argspec() module = hashivault_init(argspec) result = hashivault_leader(module.params) if result.get('failed'): module.fail_json(**result) else: module.exit_json(**result) @hashiwrapper def hashivault_leader(params): client = hashivault_client(params) return {'status': client.sys.read_leader_status()} if __name__ == '__main__': main()
Add a module to fetch leader information cluster
Add a module to fetch leader information cluster
Python
mit
TerryHowe/ansible-modules-hashivault,TerryHowe/ansible-modules-hashivault
Add a module to fetch leader information cluster
#!/usr/bin/env python from ansible.module_utils.hashivault import hashivault_argspec from ansible.module_utils.hashivault import hashivault_client from ansible.module_utils.hashivault import hashivault_init from ansible.module_utils.hashivault import hashiwrapper ANSIBLE_METADATA = {'status': ['stableinterface'], 'supported_by': 'community', 'version': '1.1'} DOCUMENTATION = ''' --- module: hashivault_leader version_added: "3.16.4" short_description: Hashicorp Vault leader module description: - Module to get leader information of Hashicorp Vault. options: url: description: - url for vault default: to environment variable VAULT_ADDR ca_cert: description: - "path to a PEM-encoded CA cert file to use to verify the Vault server TLS certificate" default: to environment variable VAULT_CACERT ca_path: description: - "path to a directory of PEM-encoded CA cert files to verify the Vault server TLS certificate : if ca_cert is specified, its value will take precedence" default: to environment variable VAULT_CAPATH client_cert: description: - "path to a PEM-encoded client certificate for TLS authentication to the Vault server" default: to environment variable VAULT_CLIENT_CERT client_key: description: - "path to an unencrypted PEM-encoded private key matching the client certificate" default: to environment variable VAULT_CLIENT_KEY verify: description: - "if set, do not verify presented TLS certificate before communicating with Vault server : setting this variable is not recommended except during testing" default: to environment variable VAULT_SKIP_VERIFY authtype: description: - "authentication type to use: token, userpass, github, ldap, approle" default: token token: description: - token for vault default: to environment variable VAULT_TOKEN username: description: - username to login to vault. default: to environment variable VAULT_USER password: description: - password to login to vault. default: to environment variable VAULT_PASSWORD ''' EXAMPLES = ''' --- - hosts: localhost tasks: - hashivault_leader: register: 'vault_leader' - debug: msg="Leader is {{vault_leader.status.leader_address}}" ''' def main(): argspec = hashivault_argspec() module = hashivault_init(argspec) result = hashivault_leader(module.params) if result.get('failed'): module.fail_json(**result) else: module.exit_json(**result) @hashiwrapper def hashivault_leader(params): client = hashivault_client(params) return {'status': client.sys.read_leader_status()} if __name__ == '__main__': main()
<commit_before><commit_msg>Add a module to fetch leader information cluster<commit_after>
#!/usr/bin/env python from ansible.module_utils.hashivault import hashivault_argspec from ansible.module_utils.hashivault import hashivault_client from ansible.module_utils.hashivault import hashivault_init from ansible.module_utils.hashivault import hashiwrapper ANSIBLE_METADATA = {'status': ['stableinterface'], 'supported_by': 'community', 'version': '1.1'} DOCUMENTATION = ''' --- module: hashivault_leader version_added: "3.16.4" short_description: Hashicorp Vault leader module description: - Module to get leader information of Hashicorp Vault. options: url: description: - url for vault default: to environment variable VAULT_ADDR ca_cert: description: - "path to a PEM-encoded CA cert file to use to verify the Vault server TLS certificate" default: to environment variable VAULT_CACERT ca_path: description: - "path to a directory of PEM-encoded CA cert files to verify the Vault server TLS certificate : if ca_cert is specified, its value will take precedence" default: to environment variable VAULT_CAPATH client_cert: description: - "path to a PEM-encoded client certificate for TLS authentication to the Vault server" default: to environment variable VAULT_CLIENT_CERT client_key: description: - "path to an unencrypted PEM-encoded private key matching the client certificate" default: to environment variable VAULT_CLIENT_KEY verify: description: - "if set, do not verify presented TLS certificate before communicating with Vault server : setting this variable is not recommended except during testing" default: to environment variable VAULT_SKIP_VERIFY authtype: description: - "authentication type to use: token, userpass, github, ldap, approle" default: token token: description: - token for vault default: to environment variable VAULT_TOKEN username: description: - username to login to vault. default: to environment variable VAULT_USER password: description: - password to login to vault. default: to environment variable VAULT_PASSWORD ''' EXAMPLES = ''' --- - hosts: localhost tasks: - hashivault_leader: register: 'vault_leader' - debug: msg="Leader is {{vault_leader.status.leader_address}}" ''' def main(): argspec = hashivault_argspec() module = hashivault_init(argspec) result = hashivault_leader(module.params) if result.get('failed'): module.fail_json(**result) else: module.exit_json(**result) @hashiwrapper def hashivault_leader(params): client = hashivault_client(params) return {'status': client.sys.read_leader_status()} if __name__ == '__main__': main()
Add a module to fetch leader information cluster#!/usr/bin/env python from ansible.module_utils.hashivault import hashivault_argspec from ansible.module_utils.hashivault import hashivault_client from ansible.module_utils.hashivault import hashivault_init from ansible.module_utils.hashivault import hashiwrapper ANSIBLE_METADATA = {'status': ['stableinterface'], 'supported_by': 'community', 'version': '1.1'} DOCUMENTATION = ''' --- module: hashivault_leader version_added: "3.16.4" short_description: Hashicorp Vault leader module description: - Module to get leader information of Hashicorp Vault. options: url: description: - url for vault default: to environment variable VAULT_ADDR ca_cert: description: - "path to a PEM-encoded CA cert file to use to verify the Vault server TLS certificate" default: to environment variable VAULT_CACERT ca_path: description: - "path to a directory of PEM-encoded CA cert files to verify the Vault server TLS certificate : if ca_cert is specified, its value will take precedence" default: to environment variable VAULT_CAPATH client_cert: description: - "path to a PEM-encoded client certificate for TLS authentication to the Vault server" default: to environment variable VAULT_CLIENT_CERT client_key: description: - "path to an unencrypted PEM-encoded private key matching the client certificate" default: to environment variable VAULT_CLIENT_KEY verify: description: - "if set, do not verify presented TLS certificate before communicating with Vault server : setting this variable is not recommended except during testing" default: to environment variable VAULT_SKIP_VERIFY authtype: description: - "authentication type to use: token, userpass, github, ldap, approle" default: token token: description: - token for vault default: to environment variable VAULT_TOKEN username: description: - username to login to vault. default: to environment variable VAULT_USER password: description: - password to login to vault. default: to environment variable VAULT_PASSWORD ''' EXAMPLES = ''' --- - hosts: localhost tasks: - hashivault_leader: register: 'vault_leader' - debug: msg="Leader is {{vault_leader.status.leader_address}}" ''' def main(): argspec = hashivault_argspec() module = hashivault_init(argspec) result = hashivault_leader(module.params) if result.get('failed'): module.fail_json(**result) else: module.exit_json(**result) @hashiwrapper def hashivault_leader(params): client = hashivault_client(params) return {'status': client.sys.read_leader_status()} if __name__ == '__main__': main()
<commit_before><commit_msg>Add a module to fetch leader information cluster<commit_after>#!/usr/bin/env python from ansible.module_utils.hashivault import hashivault_argspec from ansible.module_utils.hashivault import hashivault_client from ansible.module_utils.hashivault import hashivault_init from ansible.module_utils.hashivault import hashiwrapper ANSIBLE_METADATA = {'status': ['stableinterface'], 'supported_by': 'community', 'version': '1.1'} DOCUMENTATION = ''' --- module: hashivault_leader version_added: "3.16.4" short_description: Hashicorp Vault leader module description: - Module to get leader information of Hashicorp Vault. options: url: description: - url for vault default: to environment variable VAULT_ADDR ca_cert: description: - "path to a PEM-encoded CA cert file to use to verify the Vault server TLS certificate" default: to environment variable VAULT_CACERT ca_path: description: - "path to a directory of PEM-encoded CA cert files to verify the Vault server TLS certificate : if ca_cert is specified, its value will take precedence" default: to environment variable VAULT_CAPATH client_cert: description: - "path to a PEM-encoded client certificate for TLS authentication to the Vault server" default: to environment variable VAULT_CLIENT_CERT client_key: description: - "path to an unencrypted PEM-encoded private key matching the client certificate" default: to environment variable VAULT_CLIENT_KEY verify: description: - "if set, do not verify presented TLS certificate before communicating with Vault server : setting this variable is not recommended except during testing" default: to environment variable VAULT_SKIP_VERIFY authtype: description: - "authentication type to use: token, userpass, github, ldap, approle" default: token token: description: - token for vault default: to environment variable VAULT_TOKEN username: description: - username to login to vault. default: to environment variable VAULT_USER password: description: - password to login to vault. default: to environment variable VAULT_PASSWORD ''' EXAMPLES = ''' --- - hosts: localhost tasks: - hashivault_leader: register: 'vault_leader' - debug: msg="Leader is {{vault_leader.status.leader_address}}" ''' def main(): argspec = hashivault_argspec() module = hashivault_init(argspec) result = hashivault_leader(module.params) if result.get('failed'): module.fail_json(**result) else: module.exit_json(**result) @hashiwrapper def hashivault_leader(params): client = hashivault_client(params) return {'status': client.sys.read_leader_status()} if __name__ == '__main__': main()
bb60309dc207f388c3f84837041d1f0115521049
py/increasing-subsequences.py
py/increasing-subsequences.py
class Solution(object): def findSubsequences(self, nums): """ :type nums: List[int] :rtype: List[List[int]] """ lnums = len(nums) def dfs(idx, cur): if idx == lnums: if len(cur) > 1: yield tuple(cur) else: for x in dfs(idx + 1, cur): yield x if not cur or nums[idx] >= cur[-1]: cur.append(nums[idx]) for x in dfs(idx + 1, cur): yield x cur.pop() return map(list, set(dfs(0, [])))
Add py solution for 491. Increasing Subsequences
Add py solution for 491. Increasing Subsequences 491. Increasing Subsequences: https://leetcode.com/problems/increasing-subsequences/
Python
apache-2.0
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
Add py solution for 491. Increasing Subsequences 491. Increasing Subsequences: https://leetcode.com/problems/increasing-subsequences/
class Solution(object): def findSubsequences(self, nums): """ :type nums: List[int] :rtype: List[List[int]] """ lnums = len(nums) def dfs(idx, cur): if idx == lnums: if len(cur) > 1: yield tuple(cur) else: for x in dfs(idx + 1, cur): yield x if not cur or nums[idx] >= cur[-1]: cur.append(nums[idx]) for x in dfs(idx + 1, cur): yield x cur.pop() return map(list, set(dfs(0, [])))
<commit_before><commit_msg>Add py solution for 491. Increasing Subsequences 491. Increasing Subsequences: https://leetcode.com/problems/increasing-subsequences/<commit_after>
class Solution(object): def findSubsequences(self, nums): """ :type nums: List[int] :rtype: List[List[int]] """ lnums = len(nums) def dfs(idx, cur): if idx == lnums: if len(cur) > 1: yield tuple(cur) else: for x in dfs(idx + 1, cur): yield x if not cur or nums[idx] >= cur[-1]: cur.append(nums[idx]) for x in dfs(idx + 1, cur): yield x cur.pop() return map(list, set(dfs(0, [])))
Add py solution for 491. Increasing Subsequences 491. Increasing Subsequences: https://leetcode.com/problems/increasing-subsequences/class Solution(object): def findSubsequences(self, nums): """ :type nums: List[int] :rtype: List[List[int]] """ lnums = len(nums) def dfs(idx, cur): if idx == lnums: if len(cur) > 1: yield tuple(cur) else: for x in dfs(idx + 1, cur): yield x if not cur or nums[idx] >= cur[-1]: cur.append(nums[idx]) for x in dfs(idx + 1, cur): yield x cur.pop() return map(list, set(dfs(0, [])))
<commit_before><commit_msg>Add py solution for 491. Increasing Subsequences 491. Increasing Subsequences: https://leetcode.com/problems/increasing-subsequences/<commit_after>class Solution(object): def findSubsequences(self, nums): """ :type nums: List[int] :rtype: List[List[int]] """ lnums = len(nums) def dfs(idx, cur): if idx == lnums: if len(cur) > 1: yield tuple(cur) else: for x in dfs(idx + 1, cur): yield x if not cur or nums[idx] >= cur[-1]: cur.append(nums[idx]) for x in dfs(idx + 1, cur): yield x cur.pop() return map(list, set(dfs(0, [])))
8f79afd448f0234eab82eb1d3e3d48e0f657bcc7
nova/tests/functional/regressions/test_bug_1902925.py
nova/tests/functional/regressions/test_bug_1902925.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.tests.functional.api import client from nova.tests.functional import integrated_helpers from nova.tests.unit import cast_as_call class ComputeVersion5xPinnedRpcTests(integrated_helpers._IntegratedTestBase): compute_driver = 'fake.MediumFakeDriver' ADMIN_API = True api_major_version = 'v2.1' microversion = 'latest' def setUp(self): super(ComputeVersion5xPinnedRpcTests, self).setUp() self.useFixture(cast_as_call.CastAsCall(self)) self.compute1 = self._start_compute(host='host1') def _test_rebuild_instance_with_compute_rpc_pin(self, version_cap): self.flags(compute=version_cap, group='upgrade_levels') server_req = self._build_server(networks='none') server = self.api.post_server({'server': server_req}) server = self._wait_for_state_change(server, 'ACTIVE') self.api.post_server_action(server['id'], {'rebuild': { 'imageRef': '155d900f-4e14-4e4c-a73d-069cbf4541e6' }}) def test_rebuild_instance_5_0(self): e = self.assertRaises(client.OpenStackApiException, self._test_rebuild_instance_with_compute_rpc_pin, '5.0') self.assertEqual(500, e.response.status_code) # NOTE(sbauza): It provides a TypeError because of 'accel_uuids' # parameter missing # TypeError: rebuild_instance() missing 1 required positional argument: # 'accel_uuids' self.assertIn('TypeError', e.response.text) def test_rebuild_instance_5_12(self): self._test_rebuild_instance_with_compute_rpc_pin('5.12')
Add a regression test for 5.12 compute API issue
Add a regression test for 5.12 compute API issue In I147bf4d95e6d86ff1f967a8ce37260730f21d236 we wrote a breaking RPC change for the 5.12 version as the accel_uuids parameter is not optional. Adding a regression test to check the issue. Change-Id: I1f3914e16294c99a625b3984ca0098d835cd9b92 Related-Bug: #1902925
Python
apache-2.0
klmitch/nova,mahak/nova,mahak/nova,klmitch/nova,openstack/nova,mahak/nova,openstack/nova,klmitch/nova,klmitch/nova,openstack/nova
Add a regression test for 5.12 compute API issue In I147bf4d95e6d86ff1f967a8ce37260730f21d236 we wrote a breaking RPC change for the 5.12 version as the accel_uuids parameter is not optional. Adding a regression test to check the issue. Change-Id: I1f3914e16294c99a625b3984ca0098d835cd9b92 Related-Bug: #1902925
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.tests.functional.api import client from nova.tests.functional import integrated_helpers from nova.tests.unit import cast_as_call class ComputeVersion5xPinnedRpcTests(integrated_helpers._IntegratedTestBase): compute_driver = 'fake.MediumFakeDriver' ADMIN_API = True api_major_version = 'v2.1' microversion = 'latest' def setUp(self): super(ComputeVersion5xPinnedRpcTests, self).setUp() self.useFixture(cast_as_call.CastAsCall(self)) self.compute1 = self._start_compute(host='host1') def _test_rebuild_instance_with_compute_rpc_pin(self, version_cap): self.flags(compute=version_cap, group='upgrade_levels') server_req = self._build_server(networks='none') server = self.api.post_server({'server': server_req}) server = self._wait_for_state_change(server, 'ACTIVE') self.api.post_server_action(server['id'], {'rebuild': { 'imageRef': '155d900f-4e14-4e4c-a73d-069cbf4541e6' }}) def test_rebuild_instance_5_0(self): e = self.assertRaises(client.OpenStackApiException, self._test_rebuild_instance_with_compute_rpc_pin, '5.0') self.assertEqual(500, e.response.status_code) # NOTE(sbauza): It provides a TypeError because of 'accel_uuids' # parameter missing # TypeError: rebuild_instance() missing 1 required positional argument: # 'accel_uuids' self.assertIn('TypeError', e.response.text) def test_rebuild_instance_5_12(self): self._test_rebuild_instance_with_compute_rpc_pin('5.12')
<commit_before><commit_msg>Add a regression test for 5.12 compute API issue In I147bf4d95e6d86ff1f967a8ce37260730f21d236 we wrote a breaking RPC change for the 5.12 version as the accel_uuids parameter is not optional. Adding a regression test to check the issue. Change-Id: I1f3914e16294c99a625b3984ca0098d835cd9b92 Related-Bug: #1902925<commit_after>
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.tests.functional.api import client from nova.tests.functional import integrated_helpers from nova.tests.unit import cast_as_call class ComputeVersion5xPinnedRpcTests(integrated_helpers._IntegratedTestBase): compute_driver = 'fake.MediumFakeDriver' ADMIN_API = True api_major_version = 'v2.1' microversion = 'latest' def setUp(self): super(ComputeVersion5xPinnedRpcTests, self).setUp() self.useFixture(cast_as_call.CastAsCall(self)) self.compute1 = self._start_compute(host='host1') def _test_rebuild_instance_with_compute_rpc_pin(self, version_cap): self.flags(compute=version_cap, group='upgrade_levels') server_req = self._build_server(networks='none') server = self.api.post_server({'server': server_req}) server = self._wait_for_state_change(server, 'ACTIVE') self.api.post_server_action(server['id'], {'rebuild': { 'imageRef': '155d900f-4e14-4e4c-a73d-069cbf4541e6' }}) def test_rebuild_instance_5_0(self): e = self.assertRaises(client.OpenStackApiException, self._test_rebuild_instance_with_compute_rpc_pin, '5.0') self.assertEqual(500, e.response.status_code) # NOTE(sbauza): It provides a TypeError because of 'accel_uuids' # parameter missing # TypeError: rebuild_instance() missing 1 required positional argument: # 'accel_uuids' self.assertIn('TypeError', e.response.text) def test_rebuild_instance_5_12(self): self._test_rebuild_instance_with_compute_rpc_pin('5.12')
Add a regression test for 5.12 compute API issue In I147bf4d95e6d86ff1f967a8ce37260730f21d236 we wrote a breaking RPC change for the 5.12 version as the accel_uuids parameter is not optional. Adding a regression test to check the issue. Change-Id: I1f3914e16294c99a625b3984ca0098d835cd9b92 Related-Bug: #1902925# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.tests.functional.api import client from nova.tests.functional import integrated_helpers from nova.tests.unit import cast_as_call class ComputeVersion5xPinnedRpcTests(integrated_helpers._IntegratedTestBase): compute_driver = 'fake.MediumFakeDriver' ADMIN_API = True api_major_version = 'v2.1' microversion = 'latest' def setUp(self): super(ComputeVersion5xPinnedRpcTests, self).setUp() self.useFixture(cast_as_call.CastAsCall(self)) self.compute1 = self._start_compute(host='host1') def _test_rebuild_instance_with_compute_rpc_pin(self, version_cap): self.flags(compute=version_cap, group='upgrade_levels') server_req = self._build_server(networks='none') server = self.api.post_server({'server': server_req}) server = self._wait_for_state_change(server, 'ACTIVE') self.api.post_server_action(server['id'], {'rebuild': { 'imageRef': '155d900f-4e14-4e4c-a73d-069cbf4541e6' }}) def test_rebuild_instance_5_0(self): e = self.assertRaises(client.OpenStackApiException, self._test_rebuild_instance_with_compute_rpc_pin, '5.0') self.assertEqual(500, e.response.status_code) # NOTE(sbauza): It provides a TypeError because of 'accel_uuids' # parameter missing # TypeError: rebuild_instance() missing 1 required positional argument: # 'accel_uuids' self.assertIn('TypeError', e.response.text) def test_rebuild_instance_5_12(self): self._test_rebuild_instance_with_compute_rpc_pin('5.12')
<commit_before><commit_msg>Add a regression test for 5.12 compute API issue In I147bf4d95e6d86ff1f967a8ce37260730f21d236 we wrote a breaking RPC change for the 5.12 version as the accel_uuids parameter is not optional. Adding a regression test to check the issue. Change-Id: I1f3914e16294c99a625b3984ca0098d835cd9b92 Related-Bug: #1902925<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.tests.functional.api import client from nova.tests.functional import integrated_helpers from nova.tests.unit import cast_as_call class ComputeVersion5xPinnedRpcTests(integrated_helpers._IntegratedTestBase): compute_driver = 'fake.MediumFakeDriver' ADMIN_API = True api_major_version = 'v2.1' microversion = 'latest' def setUp(self): super(ComputeVersion5xPinnedRpcTests, self).setUp() self.useFixture(cast_as_call.CastAsCall(self)) self.compute1 = self._start_compute(host='host1') def _test_rebuild_instance_with_compute_rpc_pin(self, version_cap): self.flags(compute=version_cap, group='upgrade_levels') server_req = self._build_server(networks='none') server = self.api.post_server({'server': server_req}) server = self._wait_for_state_change(server, 'ACTIVE') self.api.post_server_action(server['id'], {'rebuild': { 'imageRef': '155d900f-4e14-4e4c-a73d-069cbf4541e6' }}) def test_rebuild_instance_5_0(self): e = self.assertRaises(client.OpenStackApiException, self._test_rebuild_instance_with_compute_rpc_pin, '5.0') self.assertEqual(500, e.response.status_code) # NOTE(sbauza): It provides a TypeError because of 'accel_uuids' # parameter missing # TypeError: rebuild_instance() missing 1 required positional argument: # 'accel_uuids' self.assertIn('TypeError', e.response.text) def test_rebuild_instance_5_12(self): self._test_rebuild_instance_with_compute_rpc_pin('5.12')
b6cc76f1599620bee0b7f478c447588b965e960f
jal_stats/stats/migrations/0002_auto_20151027_0223.py
jal_stats/stats/migrations/0002_auto_20151027_0223.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('stats', '0001_initial'), ] operations = [ migrations.AlterField( model_name='datapoint', name='timestamp', field=models.DateTimeField(), ), ]
Remove now() constraint on DateTimeField
Remove now() constraint on DateTimeField
Python
mit
jal-stats/django
Remove now() constraint on DateTimeField
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('stats', '0001_initial'), ] operations = [ migrations.AlterField( model_name='datapoint', name='timestamp', field=models.DateTimeField(), ), ]
<commit_before><commit_msg>Remove now() constraint on DateTimeField<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('stats', '0001_initial'), ] operations = [ migrations.AlterField( model_name='datapoint', name='timestamp', field=models.DateTimeField(), ), ]
Remove now() constraint on DateTimeField# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('stats', '0001_initial'), ] operations = [ migrations.AlterField( model_name='datapoint', name='timestamp', field=models.DateTimeField(), ), ]
<commit_before><commit_msg>Remove now() constraint on DateTimeField<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('stats', '0001_initial'), ] operations = [ migrations.AlterField( model_name='datapoint', name='timestamp', field=models.DateTimeField(), ), ]
ae83756e8874b30681d336fded42866d81a7b919
scripts/data_download/rais/download_manager_rais.py
scripts/data_download/rais/download_manager_rais.py
import os import commands import time import logging import sys if len(sys.argv) != 5 or (sys.argv[1:][0] not in ['pt', 'en']): print "ERROR! use :\n python scripts/data_download/rais/download_manager_rais.py en/pt output_path year time(seconds)\n" exit() files = ["i", "lo", "lio"] type_location = ["regions", "states", "mesoregions", "microregions", "municipalities", "no_location"] time_delay = int(sys.argv[4]) logging.basicConfig(filename='rais_create_files_by_installments.log',level=logging.DEBUG) # industry i = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" i ") logging.info(i+"\n"+"data_download i ok\n\n") time.sleep(time_delay) ## number of seconds #industry occupation lo = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" lo") logging.info(lo+"\n\n"+"data_download lo ok\n\n") time.sleep(time_delay) ## number of seconds #else for type_file in type_location: lio = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" lio "+type_file) logging.info(lio+"\n"+"data_download lio " + type_file + " ok\n") time.sleep(time_delay) ## number of seconds
Add mananger of files download.
Add mananger of files download.
Python
mit
DataViva/dataviva-site,DataViva/dataviva-site,DataViva/dataviva-site,DataViva/dataviva-site
Add mananger of files download.
import os import commands import time import logging import sys if len(sys.argv) != 5 or (sys.argv[1:][0] not in ['pt', 'en']): print "ERROR! use :\n python scripts/data_download/rais/download_manager_rais.py en/pt output_path year time(seconds)\n" exit() files = ["i", "lo", "lio"] type_location = ["regions", "states", "mesoregions", "microregions", "municipalities", "no_location"] time_delay = int(sys.argv[4]) logging.basicConfig(filename='rais_create_files_by_installments.log',level=logging.DEBUG) # industry i = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" i ") logging.info(i+"\n"+"data_download i ok\n\n") time.sleep(time_delay) ## number of seconds #industry occupation lo = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" lo") logging.info(lo+"\n\n"+"data_download lo ok\n\n") time.sleep(time_delay) ## number of seconds #else for type_file in type_location: lio = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" lio "+type_file) logging.info(lio+"\n"+"data_download lio " + type_file + " ok\n") time.sleep(time_delay) ## number of seconds
<commit_before><commit_msg>Add mananger of files download.<commit_after>
import os import commands import time import logging import sys if len(sys.argv) != 5 or (sys.argv[1:][0] not in ['pt', 'en']): print "ERROR! use :\n python scripts/data_download/rais/download_manager_rais.py en/pt output_path year time(seconds)\n" exit() files = ["i", "lo", "lio"] type_location = ["regions", "states", "mesoregions", "microregions", "municipalities", "no_location"] time_delay = int(sys.argv[4]) logging.basicConfig(filename='rais_create_files_by_installments.log',level=logging.DEBUG) # industry i = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" i ") logging.info(i+"\n"+"data_download i ok\n\n") time.sleep(time_delay) ## number of seconds #industry occupation lo = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" lo") logging.info(lo+"\n\n"+"data_download lo ok\n\n") time.sleep(time_delay) ## number of seconds #else for type_file in type_location: lio = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" lio "+type_file) logging.info(lio+"\n"+"data_download lio " + type_file + " ok\n") time.sleep(time_delay) ## number of seconds
Add mananger of files download.import os import commands import time import logging import sys if len(sys.argv) != 5 or (sys.argv[1:][0] not in ['pt', 'en']): print "ERROR! use :\n python scripts/data_download/rais/download_manager_rais.py en/pt output_path year time(seconds)\n" exit() files = ["i", "lo", "lio"] type_location = ["regions", "states", "mesoregions", "microregions", "municipalities", "no_location"] time_delay = int(sys.argv[4]) logging.basicConfig(filename='rais_create_files_by_installments.log',level=logging.DEBUG) # industry i = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" i ") logging.info(i+"\n"+"data_download i ok\n\n") time.sleep(time_delay) ## number of seconds #industry occupation lo = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" lo") logging.info(lo+"\n\n"+"data_download lo ok\n\n") time.sleep(time_delay) ## number of seconds #else for type_file in type_location: lio = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" lio "+type_file) logging.info(lio+"\n"+"data_download lio " + type_file + " ok\n") time.sleep(time_delay) ## number of seconds
<commit_before><commit_msg>Add mananger of files download.<commit_after>import os import commands import time import logging import sys if len(sys.argv) != 5 or (sys.argv[1:][0] not in ['pt', 'en']): print "ERROR! use :\n python scripts/data_download/rais/download_manager_rais.py en/pt output_path year time(seconds)\n" exit() files = ["i", "lo", "lio"] type_location = ["regions", "states", "mesoregions", "microregions", "municipalities", "no_location"] time_delay = int(sys.argv[4]) logging.basicConfig(filename='rais_create_files_by_installments.log',level=logging.DEBUG) # industry i = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" i ") logging.info(i+"\n"+"data_download i ok\n\n") time.sleep(time_delay) ## number of seconds #industry occupation lo = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" lo") logging.info(lo+"\n\n"+"data_download lo ok\n\n") time.sleep(time_delay) ## number of seconds #else for type_file in type_location: lio = commands.getoutput("python scripts/data_download/rais/create_files_by_installments.py "+sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]+" lio "+type_file) logging.info(lio+"\n"+"data_download lio " + type_file + " ok\n") time.sleep(time_delay) ## number of seconds
25abc6eb0498e629160543d37d43a8dfa665397a
waftools/configure_appinfo.py
waftools/configure_appinfo.py
import json from waflib.Configure import conf @conf def configure_appinfo(ctx, transforms): with open('appinfo.json', 'r') as appinfo_file: appinfo_json = json.load(appinfo_file) for transform in transforms: transform(appinfo_json) with open('appinfo.json', 'w') as appinfo_file: json.dump(appinfo_json, appinfo_file, indent=2, sort_keys=True, separators=(',', ': '))
Add configure appinfo waftool for transforming the appinfo
Add configure appinfo waftool for transforming the appinfo
Python
mit
youtux/PebbleShows,carlo-colombo/dublin-bus-pebble,jiangege/pebblejs-project,sunshineyyy/CatchOneBus,carlo-colombo/dublin-bus-pebble,sunshineyyy/CatchOneBus,youtux/PebbleShows,daduke/LMSController,jiangege/pebblejs-project,bkbilly/Tvheadend-EPG,fletchto99/pebblejs,jsfi/pebblejs,fletchto99/pebblejs,daduke/LMSController,pebble/pebblejs,pebble/pebblejs,bkbilly/Tvheadend-EPG,daduke/LMSController,fletchto99/pebblejs,jiangege/pebblejs-project,sunshineyyy/CatchOneBus,jsfi/pebblejs,sunshineyyy/CatchOneBus,bkbilly/Tvheadend-EPG,pebble/pebblejs,pebble/pebblejs,fletchto99/pebblejs,jsfi/pebblejs,jiangege/pebblejs-project,youtux/PebbleShows,daduke/LMSController,carlo-colombo/dublin-bus-pebble,sunshineyyy/CatchOneBus,carlo-colombo/dublin-bus-pebble,carlo-colombo/dublin-bus-pebble,bkbilly/Tvheadend-EPG,jsfi/pebblejs,jsfi/pebblejs,daduke/LMSController,fletchto99/pebblejs,jiangege/pebblejs-project,bkbilly/Tvheadend-EPG,pebble/pebblejs,youtux/PebbleShows
Add configure appinfo waftool for transforming the appinfo
import json from waflib.Configure import conf @conf def configure_appinfo(ctx, transforms): with open('appinfo.json', 'r') as appinfo_file: appinfo_json = json.load(appinfo_file) for transform in transforms: transform(appinfo_json) with open('appinfo.json', 'w') as appinfo_file: json.dump(appinfo_json, appinfo_file, indent=2, sort_keys=True, separators=(',', ': '))
<commit_before><commit_msg>Add configure appinfo waftool for transforming the appinfo<commit_after>
import json from waflib.Configure import conf @conf def configure_appinfo(ctx, transforms): with open('appinfo.json', 'r') as appinfo_file: appinfo_json = json.load(appinfo_file) for transform in transforms: transform(appinfo_json) with open('appinfo.json', 'w') as appinfo_file: json.dump(appinfo_json, appinfo_file, indent=2, sort_keys=True, separators=(',', ': '))
Add configure appinfo waftool for transforming the appinfoimport json from waflib.Configure import conf @conf def configure_appinfo(ctx, transforms): with open('appinfo.json', 'r') as appinfo_file: appinfo_json = json.load(appinfo_file) for transform in transforms: transform(appinfo_json) with open('appinfo.json', 'w') as appinfo_file: json.dump(appinfo_json, appinfo_file, indent=2, sort_keys=True, separators=(',', ': '))
<commit_before><commit_msg>Add configure appinfo waftool for transforming the appinfo<commit_after>import json from waflib.Configure import conf @conf def configure_appinfo(ctx, transforms): with open('appinfo.json', 'r') as appinfo_file: appinfo_json = json.load(appinfo_file) for transform in transforms: transform(appinfo_json) with open('appinfo.json', 'w') as appinfo_file: json.dump(appinfo_json, appinfo_file, indent=2, sort_keys=True, separators=(',', ': '))
9c3e6fbd762acae29fe71668e5c4faa28d8749a3
keyform/migrations/0014_auto_20180305_1627.py
keyform/migrations/0014_auto_20180305_1627.py
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2018-03-05 16:27 from __future__ import unicode_literals from django.db import migrations def convert_names(apps, schema_editor): contacts = apps.get_model('keyform', 'Contact') for contact in contacts.objects.all(): contact.email = contact.email.lower() contact.save() class Migration(migrations.Migration): dependencies = [ ('keyform', '0013_auto_20180304_2240'), ] operations = [ migrations.RunPython(convert_names, migrations.RunPython.noop) ]
Make a data migration that makes all contacts emails lowercase
Make a data migration that makes all contacts emails lowercase
Python
mit
mostateresnet/keyformproject,mostateresnet/keyformproject,mostateresnet/keyformproject
Make a data migration that makes all contacts emails lowercase
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2018-03-05 16:27 from __future__ import unicode_literals from django.db import migrations def convert_names(apps, schema_editor): contacts = apps.get_model('keyform', 'Contact') for contact in contacts.objects.all(): contact.email = contact.email.lower() contact.save() class Migration(migrations.Migration): dependencies = [ ('keyform', '0013_auto_20180304_2240'), ] operations = [ migrations.RunPython(convert_names, migrations.RunPython.noop) ]
<commit_before><commit_msg>Make a data migration that makes all contacts emails lowercase<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2018-03-05 16:27 from __future__ import unicode_literals from django.db import migrations def convert_names(apps, schema_editor): contacts = apps.get_model('keyform', 'Contact') for contact in contacts.objects.all(): contact.email = contact.email.lower() contact.save() class Migration(migrations.Migration): dependencies = [ ('keyform', '0013_auto_20180304_2240'), ] operations = [ migrations.RunPython(convert_names, migrations.RunPython.noop) ]
Make a data migration that makes all contacts emails lowercase# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2018-03-05 16:27 from __future__ import unicode_literals from django.db import migrations def convert_names(apps, schema_editor): contacts = apps.get_model('keyform', 'Contact') for contact in contacts.objects.all(): contact.email = contact.email.lower() contact.save() class Migration(migrations.Migration): dependencies = [ ('keyform', '0013_auto_20180304_2240'), ] operations = [ migrations.RunPython(convert_names, migrations.RunPython.noop) ]
<commit_before><commit_msg>Make a data migration that makes all contacts emails lowercase<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.11 on 2018-03-05 16:27 from __future__ import unicode_literals from django.db import migrations def convert_names(apps, schema_editor): contacts = apps.get_model('keyform', 'Contact') for contact in contacts.objects.all(): contact.email = contact.email.lower() contact.save() class Migration(migrations.Migration): dependencies = [ ('keyform', '0013_auto_20180304_2240'), ] operations = [ migrations.RunPython(convert_names, migrations.RunPython.noop) ]
b941f17d80b19f905cc15a350fc3d9a4f083baf9
tools/invocation-info-info.py
tools/invocation-info-info.py
#!/usr/bin/env python # Copyright (c) 2016, Daniel Liew # This file is covered by the license in LICENSE-SVCB.txt """ Read an invocation info files and display information about it. """ from load_klee_runner import add_KleeRunner_to_module_search_path add_KleeRunner_to_module_search_path() from KleeRunner import InvocationInfo import argparse import logging import os import pprint import re import sys import yaml _logger = None def main(args): global _logger parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("-l","--log-level",type=str, default="info", dest="log_level", choices=['debug','info','warning','error']) parser.add_argument('invocation_info_file', help='Invocation info file', type=argparse.FileType('r')) pargs = parser.parse_args() logLevel = getattr(logging, pargs.log_level.upper(),None) logging.basicConfig(level=logLevel) _logger = logging.getLogger(__name__) invocationInfos = InvocationInfo.loadRawInvocationInfos(pargs.invocation_info_file) print("schema version: {}".format(invocationInfos['schema_version'])) print("# of jobs: {}".format(len(invocationInfos['jobs']))) return 0 if __name__ == '__main__': sys.exit(main(sys.argv))
Add tool to display information about an invocation info file.
Add tool to display information about an invocation info file.
Python
mit
delcypher/klee-runner,delcypher/klee-runner
Add tool to display information about an invocation info file.
#!/usr/bin/env python # Copyright (c) 2016, Daniel Liew # This file is covered by the license in LICENSE-SVCB.txt """ Read an invocation info files and display information about it. """ from load_klee_runner import add_KleeRunner_to_module_search_path add_KleeRunner_to_module_search_path() from KleeRunner import InvocationInfo import argparse import logging import os import pprint import re import sys import yaml _logger = None def main(args): global _logger parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("-l","--log-level",type=str, default="info", dest="log_level", choices=['debug','info','warning','error']) parser.add_argument('invocation_info_file', help='Invocation info file', type=argparse.FileType('r')) pargs = parser.parse_args() logLevel = getattr(logging, pargs.log_level.upper(),None) logging.basicConfig(level=logLevel) _logger = logging.getLogger(__name__) invocationInfos = InvocationInfo.loadRawInvocationInfos(pargs.invocation_info_file) print("schema version: {}".format(invocationInfos['schema_version'])) print("# of jobs: {}".format(len(invocationInfos['jobs']))) return 0 if __name__ == '__main__': sys.exit(main(sys.argv))
<commit_before><commit_msg>Add tool to display information about an invocation info file.<commit_after>
#!/usr/bin/env python # Copyright (c) 2016, Daniel Liew # This file is covered by the license in LICENSE-SVCB.txt """ Read an invocation info files and display information about it. """ from load_klee_runner import add_KleeRunner_to_module_search_path add_KleeRunner_to_module_search_path() from KleeRunner import InvocationInfo import argparse import logging import os import pprint import re import sys import yaml _logger = None def main(args): global _logger parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("-l","--log-level",type=str, default="info", dest="log_level", choices=['debug','info','warning','error']) parser.add_argument('invocation_info_file', help='Invocation info file', type=argparse.FileType('r')) pargs = parser.parse_args() logLevel = getattr(logging, pargs.log_level.upper(),None) logging.basicConfig(level=logLevel) _logger = logging.getLogger(__name__) invocationInfos = InvocationInfo.loadRawInvocationInfos(pargs.invocation_info_file) print("schema version: {}".format(invocationInfos['schema_version'])) print("# of jobs: {}".format(len(invocationInfos['jobs']))) return 0 if __name__ == '__main__': sys.exit(main(sys.argv))
Add tool to display information about an invocation info file.#!/usr/bin/env python # Copyright (c) 2016, Daniel Liew # This file is covered by the license in LICENSE-SVCB.txt """ Read an invocation info files and display information about it. """ from load_klee_runner import add_KleeRunner_to_module_search_path add_KleeRunner_to_module_search_path() from KleeRunner import InvocationInfo import argparse import logging import os import pprint import re import sys import yaml _logger = None def main(args): global _logger parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("-l","--log-level",type=str, default="info", dest="log_level", choices=['debug','info','warning','error']) parser.add_argument('invocation_info_file', help='Invocation info file', type=argparse.FileType('r')) pargs = parser.parse_args() logLevel = getattr(logging, pargs.log_level.upper(),None) logging.basicConfig(level=logLevel) _logger = logging.getLogger(__name__) invocationInfos = InvocationInfo.loadRawInvocationInfos(pargs.invocation_info_file) print("schema version: {}".format(invocationInfos['schema_version'])) print("# of jobs: {}".format(len(invocationInfos['jobs']))) return 0 if __name__ == '__main__': sys.exit(main(sys.argv))
<commit_before><commit_msg>Add tool to display information about an invocation info file.<commit_after>#!/usr/bin/env python # Copyright (c) 2016, Daniel Liew # This file is covered by the license in LICENSE-SVCB.txt """ Read an invocation info files and display information about it. """ from load_klee_runner import add_KleeRunner_to_module_search_path add_KleeRunner_to_module_search_path() from KleeRunner import InvocationInfo import argparse import logging import os import pprint import re import sys import yaml _logger = None def main(args): global _logger parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("-l","--log-level",type=str, default="info", dest="log_level", choices=['debug','info','warning','error']) parser.add_argument('invocation_info_file', help='Invocation info file', type=argparse.FileType('r')) pargs = parser.parse_args() logLevel = getattr(logging, pargs.log_level.upper(),None) logging.basicConfig(level=logLevel) _logger = logging.getLogger(__name__) invocationInfos = InvocationInfo.loadRawInvocationInfos(pargs.invocation_info_file) print("schema version: {}".format(invocationInfos['schema_version'])) print("# of jobs: {}".format(len(invocationInfos['jobs']))) return 0 if __name__ == '__main__': sys.exit(main(sys.argv))
0aafbfe14ff9889c496ec2a4de382e113cb9c4a4
PynamoDB/util.py
PynamoDB/util.py
""" util.py ~~~~~~~~~~~~ contains utility functions. """ import hashlib from datetime import datetime # Classes def get_timestamp(): return datetime.utcnow() class Key(object): """ A simple object for adding aditional properties to keys """ def __init__(self, key=None, node_hash=None): self.key = key self.node_hash = node_hash class Value(object): """ A simple object for adding aditional properties to values """ def __init__(self, value=None, timestamp=None): self.value = value if not timestamp: self.timestamp = get_timestamp() else: self.timestamp = timestamp class ErrorCode(object): """ Object for passing around error codes from put/get/delete commands""" def __init__(self, error_code='\x00'): self.error_code = error_code def __eq__(self, other): return self.error_code == other.error_code def __nonzero__(self): if self.error_code == '\x00': return True else: return False def get_error_message(self): """returns a human-readable message for a given error.""" try: return{ '\x00': "Error code: {}. Operation successful.".format([error_code]), '\x01': "Error code: {}. Inexistant key.".format([error_code]), '\x02': "Error code: {}. MemoryError.".format([error_code]), '\x03': "Error code: {}. System overload.".format([error_code]), '\x04': "Error code: {}. Internal KVStore failure.".format([error_code]), '\x05': "Error code: {}. Unrecognized command.".format([error_code]), '\x06': "Error code: {}. Unrecognized error: {}.".format([error_code], sys.exc_info()[0]) }[self.error_code] except KeyError: self.logger.warn('Error code %s not found.', error, exc_info=True)
Add Key, Value, ErrorCode classes.
Add Key, Value, ErrorCode classes.
Python
mit
samuelwu90/PynamoDB
Add Key, Value, ErrorCode classes.
""" util.py ~~~~~~~~~~~~ contains utility functions. """ import hashlib from datetime import datetime # Classes def get_timestamp(): return datetime.utcnow() class Key(object): """ A simple object for adding aditional properties to keys """ def __init__(self, key=None, node_hash=None): self.key = key self.node_hash = node_hash class Value(object): """ A simple object for adding aditional properties to values """ def __init__(self, value=None, timestamp=None): self.value = value if not timestamp: self.timestamp = get_timestamp() else: self.timestamp = timestamp class ErrorCode(object): """ Object for passing around error codes from put/get/delete commands""" def __init__(self, error_code='\x00'): self.error_code = error_code def __eq__(self, other): return self.error_code == other.error_code def __nonzero__(self): if self.error_code == '\x00': return True else: return False def get_error_message(self): """returns a human-readable message for a given error.""" try: return{ '\x00': "Error code: {}. Operation successful.".format([error_code]), '\x01': "Error code: {}. Inexistant key.".format([error_code]), '\x02': "Error code: {}. MemoryError.".format([error_code]), '\x03': "Error code: {}. System overload.".format([error_code]), '\x04': "Error code: {}. Internal KVStore failure.".format([error_code]), '\x05': "Error code: {}. Unrecognized command.".format([error_code]), '\x06': "Error code: {}. Unrecognized error: {}.".format([error_code], sys.exc_info()[0]) }[self.error_code] except KeyError: self.logger.warn('Error code %s not found.', error, exc_info=True)
<commit_before><commit_msg>Add Key, Value, ErrorCode classes.<commit_after>
""" util.py ~~~~~~~~~~~~ contains utility functions. """ import hashlib from datetime import datetime # Classes def get_timestamp(): return datetime.utcnow() class Key(object): """ A simple object for adding aditional properties to keys """ def __init__(self, key=None, node_hash=None): self.key = key self.node_hash = node_hash class Value(object): """ A simple object for adding aditional properties to values """ def __init__(self, value=None, timestamp=None): self.value = value if not timestamp: self.timestamp = get_timestamp() else: self.timestamp = timestamp class ErrorCode(object): """ Object for passing around error codes from put/get/delete commands""" def __init__(self, error_code='\x00'): self.error_code = error_code def __eq__(self, other): return self.error_code == other.error_code def __nonzero__(self): if self.error_code == '\x00': return True else: return False def get_error_message(self): """returns a human-readable message for a given error.""" try: return{ '\x00': "Error code: {}. Operation successful.".format([error_code]), '\x01': "Error code: {}. Inexistant key.".format([error_code]), '\x02': "Error code: {}. MemoryError.".format([error_code]), '\x03': "Error code: {}. System overload.".format([error_code]), '\x04': "Error code: {}. Internal KVStore failure.".format([error_code]), '\x05': "Error code: {}. Unrecognized command.".format([error_code]), '\x06': "Error code: {}. Unrecognized error: {}.".format([error_code], sys.exc_info()[0]) }[self.error_code] except KeyError: self.logger.warn('Error code %s not found.', error, exc_info=True)
Add Key, Value, ErrorCode classes.""" util.py ~~~~~~~~~~~~ contains utility functions. """ import hashlib from datetime import datetime # Classes def get_timestamp(): return datetime.utcnow() class Key(object): """ A simple object for adding aditional properties to keys """ def __init__(self, key=None, node_hash=None): self.key = key self.node_hash = node_hash class Value(object): """ A simple object for adding aditional properties to values """ def __init__(self, value=None, timestamp=None): self.value = value if not timestamp: self.timestamp = get_timestamp() else: self.timestamp = timestamp class ErrorCode(object): """ Object for passing around error codes from put/get/delete commands""" def __init__(self, error_code='\x00'): self.error_code = error_code def __eq__(self, other): return self.error_code == other.error_code def __nonzero__(self): if self.error_code == '\x00': return True else: return False def get_error_message(self): """returns a human-readable message for a given error.""" try: return{ '\x00': "Error code: {}. Operation successful.".format([error_code]), '\x01': "Error code: {}. Inexistant key.".format([error_code]), '\x02': "Error code: {}. MemoryError.".format([error_code]), '\x03': "Error code: {}. System overload.".format([error_code]), '\x04': "Error code: {}. Internal KVStore failure.".format([error_code]), '\x05': "Error code: {}. Unrecognized command.".format([error_code]), '\x06': "Error code: {}. Unrecognized error: {}.".format([error_code], sys.exc_info()[0]) }[self.error_code] except KeyError: self.logger.warn('Error code %s not found.', error, exc_info=True)
<commit_before><commit_msg>Add Key, Value, ErrorCode classes.<commit_after>""" util.py ~~~~~~~~~~~~ contains utility functions. """ import hashlib from datetime import datetime # Classes def get_timestamp(): return datetime.utcnow() class Key(object): """ A simple object for adding aditional properties to keys """ def __init__(self, key=None, node_hash=None): self.key = key self.node_hash = node_hash class Value(object): """ A simple object for adding aditional properties to values """ def __init__(self, value=None, timestamp=None): self.value = value if not timestamp: self.timestamp = get_timestamp() else: self.timestamp = timestamp class ErrorCode(object): """ Object for passing around error codes from put/get/delete commands""" def __init__(self, error_code='\x00'): self.error_code = error_code def __eq__(self, other): return self.error_code == other.error_code def __nonzero__(self): if self.error_code == '\x00': return True else: return False def get_error_message(self): """returns a human-readable message for a given error.""" try: return{ '\x00': "Error code: {}. Operation successful.".format([error_code]), '\x01': "Error code: {}. Inexistant key.".format([error_code]), '\x02': "Error code: {}. MemoryError.".format([error_code]), '\x03': "Error code: {}. System overload.".format([error_code]), '\x04': "Error code: {}. Internal KVStore failure.".format([error_code]), '\x05': "Error code: {}. Unrecognized command.".format([error_code]), '\x06': "Error code: {}. Unrecognized error: {}.".format([error_code], sys.exc_info()[0]) }[self.error_code] except KeyError: self.logger.warn('Error code %s not found.', error, exc_info=True)
0d6a0b592c3d137c39cbc17f159d81a037a12730
sideloader/tests/test_utils.py
sideloader/tests/test_utils.py
from sideloader.utils import args_str, create_venv_paths def test_args_str_string_list(): """ args_str should join a list of strings. """ assert args_str(['a', 'b']) == 'a b' def test_args_str_mixed_list(): """ args_str should join a list of objects, converting each to a string. """ assert args_str(['a', 1, None]) == 'a 1 None' def test_args_str_empty_list(): """ args_str should join return an empty string for an empty list. """ assert args_str([]) == '' def test_args_str_string(): """ args_str should leave string arguments as strings. """ assert args_str('abc def') == 'abc def' def test_args_str_none(): """ args_str should return the string form of non-string arguments. """ assert args_str(None) == 'None' def test_create_venv_paths(): """ create_venv_paths should return the correct set of paths for a virtualenv at the specified location. """ venv_paths = create_venv_paths('/mypath', 'myvenv') assert venv_paths.venv == '/mypath/myvenv' assert venv_paths.bin == '/mypath/myvenv/bin' assert venv_paths.activate == '/mypath/myvenv/bin/activate' assert venv_paths.pip == '/mypath/myvenv/bin/pip' assert venv_paths.python == '/mypath/myvenv/bin/python'
Add some tests for utils
Add some tests for utils
Python
mit
praekelt/sideloader2,praekelt/sideloader2,praekelt/sideloader2
Add some tests for utils
from sideloader.utils import args_str, create_venv_paths def test_args_str_string_list(): """ args_str should join a list of strings. """ assert args_str(['a', 'b']) == 'a b' def test_args_str_mixed_list(): """ args_str should join a list of objects, converting each to a string. """ assert args_str(['a', 1, None]) == 'a 1 None' def test_args_str_empty_list(): """ args_str should join return an empty string for an empty list. """ assert args_str([]) == '' def test_args_str_string(): """ args_str should leave string arguments as strings. """ assert args_str('abc def') == 'abc def' def test_args_str_none(): """ args_str should return the string form of non-string arguments. """ assert args_str(None) == 'None' def test_create_venv_paths(): """ create_venv_paths should return the correct set of paths for a virtualenv at the specified location. """ venv_paths = create_venv_paths('/mypath', 'myvenv') assert venv_paths.venv == '/mypath/myvenv' assert venv_paths.bin == '/mypath/myvenv/bin' assert venv_paths.activate == '/mypath/myvenv/bin/activate' assert venv_paths.pip == '/mypath/myvenv/bin/pip' assert venv_paths.python == '/mypath/myvenv/bin/python'
<commit_before><commit_msg>Add some tests for utils<commit_after>
from sideloader.utils import args_str, create_venv_paths def test_args_str_string_list(): """ args_str should join a list of strings. """ assert args_str(['a', 'b']) == 'a b' def test_args_str_mixed_list(): """ args_str should join a list of objects, converting each to a string. """ assert args_str(['a', 1, None]) == 'a 1 None' def test_args_str_empty_list(): """ args_str should join return an empty string for an empty list. """ assert args_str([]) == '' def test_args_str_string(): """ args_str should leave string arguments as strings. """ assert args_str('abc def') == 'abc def' def test_args_str_none(): """ args_str should return the string form of non-string arguments. """ assert args_str(None) == 'None' def test_create_venv_paths(): """ create_venv_paths should return the correct set of paths for a virtualenv at the specified location. """ venv_paths = create_venv_paths('/mypath', 'myvenv') assert venv_paths.venv == '/mypath/myvenv' assert venv_paths.bin == '/mypath/myvenv/bin' assert venv_paths.activate == '/mypath/myvenv/bin/activate' assert venv_paths.pip == '/mypath/myvenv/bin/pip' assert venv_paths.python == '/mypath/myvenv/bin/python'
Add some tests for utilsfrom sideloader.utils import args_str, create_venv_paths def test_args_str_string_list(): """ args_str should join a list of strings. """ assert args_str(['a', 'b']) == 'a b' def test_args_str_mixed_list(): """ args_str should join a list of objects, converting each to a string. """ assert args_str(['a', 1, None]) == 'a 1 None' def test_args_str_empty_list(): """ args_str should join return an empty string for an empty list. """ assert args_str([]) == '' def test_args_str_string(): """ args_str should leave string arguments as strings. """ assert args_str('abc def') == 'abc def' def test_args_str_none(): """ args_str should return the string form of non-string arguments. """ assert args_str(None) == 'None' def test_create_venv_paths(): """ create_venv_paths should return the correct set of paths for a virtualenv at the specified location. """ venv_paths = create_venv_paths('/mypath', 'myvenv') assert venv_paths.venv == '/mypath/myvenv' assert venv_paths.bin == '/mypath/myvenv/bin' assert venv_paths.activate == '/mypath/myvenv/bin/activate' assert venv_paths.pip == '/mypath/myvenv/bin/pip' assert venv_paths.python == '/mypath/myvenv/bin/python'
<commit_before><commit_msg>Add some tests for utils<commit_after>from sideloader.utils import args_str, create_venv_paths def test_args_str_string_list(): """ args_str should join a list of strings. """ assert args_str(['a', 'b']) == 'a b' def test_args_str_mixed_list(): """ args_str should join a list of objects, converting each to a string. """ assert args_str(['a', 1, None]) == 'a 1 None' def test_args_str_empty_list(): """ args_str should join return an empty string for an empty list. """ assert args_str([]) == '' def test_args_str_string(): """ args_str should leave string arguments as strings. """ assert args_str('abc def') == 'abc def' def test_args_str_none(): """ args_str should return the string form of non-string arguments. """ assert args_str(None) == 'None' def test_create_venv_paths(): """ create_venv_paths should return the correct set of paths for a virtualenv at the specified location. """ venv_paths = create_venv_paths('/mypath', 'myvenv') assert venv_paths.venv == '/mypath/myvenv' assert venv_paths.bin == '/mypath/myvenv/bin' assert venv_paths.activate == '/mypath/myvenv/bin/activate' assert venv_paths.pip == '/mypath/myvenv/bin/pip' assert venv_paths.python == '/mypath/myvenv/bin/python'
989d83eba8a3207d614486693fa33cf85def6515
src/collectors/PowerDNSCollector/PowerDNSCollector.py
src/collectors/PowerDNSCollector/PowerDNSCollector.py
import diamond.collector import subprocess class PowerDNSCollector(diamond.collector.Collector): """ Collects all metrics exported by the powerdns nameserver using the pdns_control binary. """ def get_default_config(self): """ Returns the default collector settings """ return { 'pdns_control': '/usr/bin/pdns_control', 'path': 'powerdns', } def collect(self): sp = subprocess.Popen([self.config['pdns_control'], "list"], stdout=subprocess.PIPE) data = sp.communicate()[0] for metric in data.split(','): if not metric.strip(): continue metric, value = metric.split('=') self.publish(metric, int(value))
Add a collector for powerdns statistics
Add a collector for powerdns statistics
Python
mit
dcsquared13/Diamond,Ensighten/Diamond,MichaelDoyle/Diamond,Nihn/Diamond-1,eMerzh/Diamond-1,jumping/Diamond,jriguera/Diamond,socialwareinc/Diamond,Clever/Diamond,Ormod/Diamond,TinLe/Diamond,disqus/Diamond,Slach/Diamond,tellapart/Diamond,skbkontur/Diamond,CYBERBUGJR/Diamond,jaingaurav/Diamond,saucelabs/Diamond,datafiniti/Diamond,zoidbergwill/Diamond,TAKEALOT/Diamond,actmd/Diamond,zoidbergwill/Diamond,dcsquared13/Diamond,tellapart/Diamond,TAKEALOT/Diamond,Basis/Diamond,joel-airspring/Diamond,jriguera/Diamond,Basis/Diamond,signalfx/Diamond,Precis/Diamond,tellapart/Diamond,janisz/Diamond-1,EzyInsights/Diamond,gg7/diamond,TAKEALOT/Diamond,socialwareinc/Diamond,bmhatfield/Diamond,actmd/Diamond,Slach/Diamond,acquia/Diamond,szibis/Diamond,Netuitive/netuitive-diamond,jumping/Diamond,bmhatfield/Diamond,szibis/Diamond,actmd/Diamond,eMerzh/Diamond-1,hvnsweeting/Diamond,python-diamond/Diamond,mfriedenhagen/Diamond,h00dy/Diamond,EzyInsights/Diamond,ceph/Diamond,MediaMath/Diamond,Ensighten/Diamond,saucelabs/Diamond,stuartbfox/Diamond,Ensighten/Diamond,anandbhoraskar/Diamond,socialwareinc/Diamond,jumping/Diamond,Precis/Diamond,joel-airspring/Diamond,ramjothikumar/Diamond,Nihn/Diamond-1,sebbrandt87/Diamond,gg7/diamond,saucelabs/Diamond,Ssawa/Diamond,gg7/diamond,actmd/Diamond,ramjothikumar/Diamond,Precis/Diamond,gg7/diamond,works-mobile/Diamond,signalfx/Diamond,dcsquared13/Diamond,mzupan/Diamond,works-mobile/Diamond,tusharmakkar08/Diamond,anandbhoraskar/Diamond,hamelg/Diamond,hamelg/Diamond,python-diamond/Diamond,tuenti/Diamond,zoidbergwill/Diamond,stuartbfox/Diamond,works-mobile/Diamond,timchenxiaoyu/Diamond,janisz/Diamond-1,sebbrandt87/Diamond,saucelabs/Diamond,rtoma/Diamond,skbkontur/Diamond,hamelg/Diamond,EzyInsights/Diamond,cannium/Diamond,python-diamond/Diamond,ramjothikumar/Diamond,skbkontur/Diamond,MichaelDoyle/Diamond,MediaMath/Diamond,mzupan/Diamond,stuartbfox/Diamond,krbaker/Diamond,Nihn/Diamond-1,krbaker/Diamond,Clever/Diamond,TinLe/Diamond,datafiniti/Diamond,janisz/Diamond-1,Basis/Diamond,CYBERBUGJR/Diamond,acquia/Diamond,szibis/Diamond,timchenxiaoyu/Diamond,Basis/Diamond,disqus/Diamond,h00dy/Diamond,EzyInsights/Diamond,jriguera/Diamond,krbaker/Diamond,krbaker/Diamond,anandbhoraskar/Diamond,jaingaurav/Diamond,cannium/Diamond,TinLe/Diamond,MichaelDoyle/Diamond,eMerzh/Diamond-1,Netuitive/netuitive-diamond,Ssawa/Diamond,thardie/Diamond,joel-airspring/Diamond,signalfx/Diamond,thardie/Diamond,MichaelDoyle/Diamond,codepython/Diamond,ceph/Diamond,jaingaurav/Diamond,TinLe/Diamond,anandbhoraskar/Diamond,Netuitive/Diamond,MediaMath/Diamond,signalfx/Diamond,Slach/Diamond,russss/Diamond,CYBERBUGJR/Diamond,Ormod/Diamond,hvnsweeting/Diamond,jaingaurav/Diamond,Netuitive/Diamond,mfriedenhagen/Diamond,Slach/Diamond,dcsquared13/Diamond,tusharmakkar08/Diamond,ceph/Diamond,tuenti/Diamond,metamx/Diamond,Netuitive/Diamond,tellapart/Diamond,thardie/Diamond,disqus/Diamond,Ssawa/Diamond,hamelg/Diamond,jumping/Diamond,tusharmakkar08/Diamond,codepython/Diamond,rtoma/Diamond,Ormod/Diamond,russss/Diamond,datafiniti/Diamond,szibis/Diamond,cannium/Diamond,Ssawa/Diamond,mfriedenhagen/Diamond,bmhatfield/Diamond,mzupan/Diamond,acquia/Diamond,mzupan/Diamond,rtoma/Diamond,bmhatfield/Diamond,MediaMath/Diamond,Clever/Diamond,Ormod/Diamond,zoidbergwill/Diamond,acquia/Diamond,Netuitive/netuitive-diamond,h00dy/Diamond,Netuitive/netuitive-diamond,CYBERBUGJR/Diamond,tuenti/Diamond,metamx/Diamond,works-mobile/Diamond,ceph/Diamond,socialwareinc/Diamond,Netuitive/Diamond,Clever/Diamond,hvnsweeting/Diamond,metamx/Diamond,codepython/Diamond,mfriedenhagen/Diamond,datafiniti/Diamond,sebbrandt87/Diamond,Precis/Diamond,tuenti/Diamond,timchenxiaoyu/Diamond,ramjothikumar/Diamond,rtoma/Diamond,russss/Diamond,Ensighten/Diamond,hvnsweeting/Diamond,thardie/Diamond,skbkontur/Diamond,codepython/Diamond,stuartbfox/Diamond,eMerzh/Diamond-1,joel-airspring/Diamond,sebbrandt87/Diamond,timchenxiaoyu/Diamond,cannium/Diamond,janisz/Diamond-1,h00dy/Diamond,Nihn/Diamond-1,tusharmakkar08/Diamond,TAKEALOT/Diamond,russss/Diamond,jriguera/Diamond
Add a collector for powerdns statistics
import diamond.collector import subprocess class PowerDNSCollector(diamond.collector.Collector): """ Collects all metrics exported by the powerdns nameserver using the pdns_control binary. """ def get_default_config(self): """ Returns the default collector settings """ return { 'pdns_control': '/usr/bin/pdns_control', 'path': 'powerdns', } def collect(self): sp = subprocess.Popen([self.config['pdns_control'], "list"], stdout=subprocess.PIPE) data = sp.communicate()[0] for metric in data.split(','): if not metric.strip(): continue metric, value = metric.split('=') self.publish(metric, int(value))
<commit_before><commit_msg>Add a collector for powerdns statistics<commit_after>
import diamond.collector import subprocess class PowerDNSCollector(diamond.collector.Collector): """ Collects all metrics exported by the powerdns nameserver using the pdns_control binary. """ def get_default_config(self): """ Returns the default collector settings """ return { 'pdns_control': '/usr/bin/pdns_control', 'path': 'powerdns', } def collect(self): sp = subprocess.Popen([self.config['pdns_control'], "list"], stdout=subprocess.PIPE) data = sp.communicate()[0] for metric in data.split(','): if not metric.strip(): continue metric, value = metric.split('=') self.publish(metric, int(value))
Add a collector for powerdns statisticsimport diamond.collector import subprocess class PowerDNSCollector(diamond.collector.Collector): """ Collects all metrics exported by the powerdns nameserver using the pdns_control binary. """ def get_default_config(self): """ Returns the default collector settings """ return { 'pdns_control': '/usr/bin/pdns_control', 'path': 'powerdns', } def collect(self): sp = subprocess.Popen([self.config['pdns_control'], "list"], stdout=subprocess.PIPE) data = sp.communicate()[0] for metric in data.split(','): if not metric.strip(): continue metric, value = metric.split('=') self.publish(metric, int(value))
<commit_before><commit_msg>Add a collector for powerdns statistics<commit_after>import diamond.collector import subprocess class PowerDNSCollector(diamond.collector.Collector): """ Collects all metrics exported by the powerdns nameserver using the pdns_control binary. """ def get_default_config(self): """ Returns the default collector settings """ return { 'pdns_control': '/usr/bin/pdns_control', 'path': 'powerdns', } def collect(self): sp = subprocess.Popen([self.config['pdns_control'], "list"], stdout=subprocess.PIPE) data = sp.communicate()[0] for metric in data.split(','): if not metric.strip(): continue metric, value = metric.split('=') self.publish(metric, int(value))
f9accb1320daeabef68dc9f78e5748c1fd498bce
anthemav/tools.py
anthemav/tools.py
import argparse import asyncio import anthemav import logging log = logging.getLogger(__name__) @asyncio.coroutine def console(loop): parser = argparse.ArgumentParser(description=console.__doc__) parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR') parser.add_argument('--port', default='14999', help='Port of AVR') parser.add_argument('--verbose', '-v', action='count') args = parser.parse_args() if args.verbose: level = logging.DEBUG else: level = logging.INFO logging.basicConfig(level=level) def log_callback(message): log.info('Callback invoked: %s' % message) host = args.host port = int(args.port) log.info('Connecting to Anthem AVR at %s:%i' % (host, port)) conn = yield from anthemav.Connection.create(host=host,port=port,loop=loop,update_callback=log_callback) log.info('Power state is '+str(conn.protocol.power)) conn.protocol.power = True log.info('Power state is '+str(conn.protocol.power)) yield from asyncio.sleep(2, loop=loop) log.info('Panel brightness (raw) is '+str(conn.protocol.panel_brightness)) log.info('Panel brightness (text) is '+str(conn.protocol.panel_brightness_text)) def monitor(): loop = asyncio.get_event_loop() asyncio.async(console(loop)) loop.run_forever()
Set up console monitor as a tool
Set up console monitor as a tool
Python
mit
nugget/python-anthemav
Set up console monitor as a tool
import argparse import asyncio import anthemav import logging log = logging.getLogger(__name__) @asyncio.coroutine def console(loop): parser = argparse.ArgumentParser(description=console.__doc__) parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR') parser.add_argument('--port', default='14999', help='Port of AVR') parser.add_argument('--verbose', '-v', action='count') args = parser.parse_args() if args.verbose: level = logging.DEBUG else: level = logging.INFO logging.basicConfig(level=level) def log_callback(message): log.info('Callback invoked: %s' % message) host = args.host port = int(args.port) log.info('Connecting to Anthem AVR at %s:%i' % (host, port)) conn = yield from anthemav.Connection.create(host=host,port=port,loop=loop,update_callback=log_callback) log.info('Power state is '+str(conn.protocol.power)) conn.protocol.power = True log.info('Power state is '+str(conn.protocol.power)) yield from asyncio.sleep(2, loop=loop) log.info('Panel brightness (raw) is '+str(conn.protocol.panel_brightness)) log.info('Panel brightness (text) is '+str(conn.protocol.panel_brightness_text)) def monitor(): loop = asyncio.get_event_loop() asyncio.async(console(loop)) loop.run_forever()
<commit_before><commit_msg>Set up console monitor as a tool<commit_after>
import argparse import asyncio import anthemav import logging log = logging.getLogger(__name__) @asyncio.coroutine def console(loop): parser = argparse.ArgumentParser(description=console.__doc__) parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR') parser.add_argument('--port', default='14999', help='Port of AVR') parser.add_argument('--verbose', '-v', action='count') args = parser.parse_args() if args.verbose: level = logging.DEBUG else: level = logging.INFO logging.basicConfig(level=level) def log_callback(message): log.info('Callback invoked: %s' % message) host = args.host port = int(args.port) log.info('Connecting to Anthem AVR at %s:%i' % (host, port)) conn = yield from anthemav.Connection.create(host=host,port=port,loop=loop,update_callback=log_callback) log.info('Power state is '+str(conn.protocol.power)) conn.protocol.power = True log.info('Power state is '+str(conn.protocol.power)) yield from asyncio.sleep(2, loop=loop) log.info('Panel brightness (raw) is '+str(conn.protocol.panel_brightness)) log.info('Panel brightness (text) is '+str(conn.protocol.panel_brightness_text)) def monitor(): loop = asyncio.get_event_loop() asyncio.async(console(loop)) loop.run_forever()
Set up console monitor as a toolimport argparse import asyncio import anthemav import logging log = logging.getLogger(__name__) @asyncio.coroutine def console(loop): parser = argparse.ArgumentParser(description=console.__doc__) parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR') parser.add_argument('--port', default='14999', help='Port of AVR') parser.add_argument('--verbose', '-v', action='count') args = parser.parse_args() if args.verbose: level = logging.DEBUG else: level = logging.INFO logging.basicConfig(level=level) def log_callback(message): log.info('Callback invoked: %s' % message) host = args.host port = int(args.port) log.info('Connecting to Anthem AVR at %s:%i' % (host, port)) conn = yield from anthemav.Connection.create(host=host,port=port,loop=loop,update_callback=log_callback) log.info('Power state is '+str(conn.protocol.power)) conn.protocol.power = True log.info('Power state is '+str(conn.protocol.power)) yield from asyncio.sleep(2, loop=loop) log.info('Panel brightness (raw) is '+str(conn.protocol.panel_brightness)) log.info('Panel brightness (text) is '+str(conn.protocol.panel_brightness_text)) def monitor(): loop = asyncio.get_event_loop() asyncio.async(console(loop)) loop.run_forever()
<commit_before><commit_msg>Set up console monitor as a tool<commit_after>import argparse import asyncio import anthemav import logging log = logging.getLogger(__name__) @asyncio.coroutine def console(loop): parser = argparse.ArgumentParser(description=console.__doc__) parser.add_argument('--host', default='127.0.0.1', help='IP or FQDN of AVR') parser.add_argument('--port', default='14999', help='Port of AVR') parser.add_argument('--verbose', '-v', action='count') args = parser.parse_args() if args.verbose: level = logging.DEBUG else: level = logging.INFO logging.basicConfig(level=level) def log_callback(message): log.info('Callback invoked: %s' % message) host = args.host port = int(args.port) log.info('Connecting to Anthem AVR at %s:%i' % (host, port)) conn = yield from anthemav.Connection.create(host=host,port=port,loop=loop,update_callback=log_callback) log.info('Power state is '+str(conn.protocol.power)) conn.protocol.power = True log.info('Power state is '+str(conn.protocol.power)) yield from asyncio.sleep(2, loop=loop) log.info('Panel brightness (raw) is '+str(conn.protocol.panel_brightness)) log.info('Panel brightness (text) is '+str(conn.protocol.panel_brightness_text)) def monitor(): loop = asyncio.get_event_loop() asyncio.async(console(loop)) loop.run_forever()
dd481b5aac131be2987364408c41c0dab835f6af
vumi/scripts/model_migrator.py
vumi/scripts/model_migrator.py
# -*- test-case-name: vumi.scripts.tests.test_model_migrator -*- import sys from twisted.python import usage from vumi.utils import load_class_by_string from vumi.persist.riak_manager import RiakManager class Options(usage.Options): optParameters = [ ["model", "m", None, "Full Python name of the model class to migrate." " E.g. 'vumi.components.message_store.InboundMessage'."], ["bucket-prefix", "b", None, "The bucket prefix for the Riak manager."], ] longdesc = """Offline model migrator. Necessary for updating models when index names change so that old model instances remain findable by index searches. """ def postOptions(self): if self.options['model'] is None: raise usage.UsageError("Please specify a model class.") if self.options['bucket_prefix'] is None: raise usage.UsageError("Please specify a bucket prefix.") class ConfigHolder(object): def __init__(self, options): self.options = options model_cls = load_class_by_string(options['model']) riak_config = { 'bucket_prefix': options['bucket-prefix'], } manager = RiakManager.from_config(riak_config) self.model = manager.proxy(model_cls) def emit(self, s): print s def run(self): for key in self.model.all_keys(): obj = self.model.load(key) if obj is not None: obj.save() if __name__ == '__main__': try: options = Options() options.parseOptions() except usage.UsageError, errortext: print '%s: %s' % (sys.argv[0], errortext) print '%s: Try --help for usage details.' % (sys.argv[0]) sys.exit(1) cfg = ConfigHolder(options) cfg.run()
Add start of migrator script.
Add start of migrator script.
Python
bsd-3-clause
vishwaprakashmishra/xmatrix,TouK/vumi,vishwaprakashmishra/xmatrix,TouK/vumi,harrissoerja/vumi,TouK/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi
Add start of migrator script.
# -*- test-case-name: vumi.scripts.tests.test_model_migrator -*- import sys from twisted.python import usage from vumi.utils import load_class_by_string from vumi.persist.riak_manager import RiakManager class Options(usage.Options): optParameters = [ ["model", "m", None, "Full Python name of the model class to migrate." " E.g. 'vumi.components.message_store.InboundMessage'."], ["bucket-prefix", "b", None, "The bucket prefix for the Riak manager."], ] longdesc = """Offline model migrator. Necessary for updating models when index names change so that old model instances remain findable by index searches. """ def postOptions(self): if self.options['model'] is None: raise usage.UsageError("Please specify a model class.") if self.options['bucket_prefix'] is None: raise usage.UsageError("Please specify a bucket prefix.") class ConfigHolder(object): def __init__(self, options): self.options = options model_cls = load_class_by_string(options['model']) riak_config = { 'bucket_prefix': options['bucket-prefix'], } manager = RiakManager.from_config(riak_config) self.model = manager.proxy(model_cls) def emit(self, s): print s def run(self): for key in self.model.all_keys(): obj = self.model.load(key) if obj is not None: obj.save() if __name__ == '__main__': try: options = Options() options.parseOptions() except usage.UsageError, errortext: print '%s: %s' % (sys.argv[0], errortext) print '%s: Try --help for usage details.' % (sys.argv[0]) sys.exit(1) cfg = ConfigHolder(options) cfg.run()
<commit_before><commit_msg>Add start of migrator script.<commit_after>
# -*- test-case-name: vumi.scripts.tests.test_model_migrator -*- import sys from twisted.python import usage from vumi.utils import load_class_by_string from vumi.persist.riak_manager import RiakManager class Options(usage.Options): optParameters = [ ["model", "m", None, "Full Python name of the model class to migrate." " E.g. 'vumi.components.message_store.InboundMessage'."], ["bucket-prefix", "b", None, "The bucket prefix for the Riak manager."], ] longdesc = """Offline model migrator. Necessary for updating models when index names change so that old model instances remain findable by index searches. """ def postOptions(self): if self.options['model'] is None: raise usage.UsageError("Please specify a model class.") if self.options['bucket_prefix'] is None: raise usage.UsageError("Please specify a bucket prefix.") class ConfigHolder(object): def __init__(self, options): self.options = options model_cls = load_class_by_string(options['model']) riak_config = { 'bucket_prefix': options['bucket-prefix'], } manager = RiakManager.from_config(riak_config) self.model = manager.proxy(model_cls) def emit(self, s): print s def run(self): for key in self.model.all_keys(): obj = self.model.load(key) if obj is not None: obj.save() if __name__ == '__main__': try: options = Options() options.parseOptions() except usage.UsageError, errortext: print '%s: %s' % (sys.argv[0], errortext) print '%s: Try --help for usage details.' % (sys.argv[0]) sys.exit(1) cfg = ConfigHolder(options) cfg.run()
Add start of migrator script.# -*- test-case-name: vumi.scripts.tests.test_model_migrator -*- import sys from twisted.python import usage from vumi.utils import load_class_by_string from vumi.persist.riak_manager import RiakManager class Options(usage.Options): optParameters = [ ["model", "m", None, "Full Python name of the model class to migrate." " E.g. 'vumi.components.message_store.InboundMessage'."], ["bucket-prefix", "b", None, "The bucket prefix for the Riak manager."], ] longdesc = """Offline model migrator. Necessary for updating models when index names change so that old model instances remain findable by index searches. """ def postOptions(self): if self.options['model'] is None: raise usage.UsageError("Please specify a model class.") if self.options['bucket_prefix'] is None: raise usage.UsageError("Please specify a bucket prefix.") class ConfigHolder(object): def __init__(self, options): self.options = options model_cls = load_class_by_string(options['model']) riak_config = { 'bucket_prefix': options['bucket-prefix'], } manager = RiakManager.from_config(riak_config) self.model = manager.proxy(model_cls) def emit(self, s): print s def run(self): for key in self.model.all_keys(): obj = self.model.load(key) if obj is not None: obj.save() if __name__ == '__main__': try: options = Options() options.parseOptions() except usage.UsageError, errortext: print '%s: %s' % (sys.argv[0], errortext) print '%s: Try --help for usage details.' % (sys.argv[0]) sys.exit(1) cfg = ConfigHolder(options) cfg.run()
<commit_before><commit_msg>Add start of migrator script.<commit_after># -*- test-case-name: vumi.scripts.tests.test_model_migrator -*- import sys from twisted.python import usage from vumi.utils import load_class_by_string from vumi.persist.riak_manager import RiakManager class Options(usage.Options): optParameters = [ ["model", "m", None, "Full Python name of the model class to migrate." " E.g. 'vumi.components.message_store.InboundMessage'."], ["bucket-prefix", "b", None, "The bucket prefix for the Riak manager."], ] longdesc = """Offline model migrator. Necessary for updating models when index names change so that old model instances remain findable by index searches. """ def postOptions(self): if self.options['model'] is None: raise usage.UsageError("Please specify a model class.") if self.options['bucket_prefix'] is None: raise usage.UsageError("Please specify a bucket prefix.") class ConfigHolder(object): def __init__(self, options): self.options = options model_cls = load_class_by_string(options['model']) riak_config = { 'bucket_prefix': options['bucket-prefix'], } manager = RiakManager.from_config(riak_config) self.model = manager.proxy(model_cls) def emit(self, s): print s def run(self): for key in self.model.all_keys(): obj = self.model.load(key) if obj is not None: obj.save() if __name__ == '__main__': try: options = Options() options.parseOptions() except usage.UsageError, errortext: print '%s: %s' % (sys.argv[0], errortext) print '%s: Try --help for usage details.' % (sys.argv[0]) sys.exit(1) cfg = ConfigHolder(options) cfg.run()
5119d396b67e732646545e50fedac80e5a663475
tests/test_meta.py
tests/test_meta.py
""" Tests for this repository. """ from pathlib import Path def test_init_files() -> None: """ ``__init__`` files exist where they should do. If ``__init__`` files are missing, linters may not run on all files that they should run on. """ directories = (Path('src'), Path('tests')) for directory in directories: files = directory.glob('**/*.py') for python_file in files: parent = python_file.parent expected_init = parent / '__init__.py' assert expected_init.exists()
Add meta test for __init__ file
Add meta test for __init__ file
Python
mit
adamtheturtle/vws-python,adamtheturtle/vws-python
Add meta test for __init__ file
""" Tests for this repository. """ from pathlib import Path def test_init_files() -> None: """ ``__init__`` files exist where they should do. If ``__init__`` files are missing, linters may not run on all files that they should run on. """ directories = (Path('src'), Path('tests')) for directory in directories: files = directory.glob('**/*.py') for python_file in files: parent = python_file.parent expected_init = parent / '__init__.py' assert expected_init.exists()
<commit_before><commit_msg>Add meta test for __init__ file<commit_after>
""" Tests for this repository. """ from pathlib import Path def test_init_files() -> None: """ ``__init__`` files exist where they should do. If ``__init__`` files are missing, linters may not run on all files that they should run on. """ directories = (Path('src'), Path('tests')) for directory in directories: files = directory.glob('**/*.py') for python_file in files: parent = python_file.parent expected_init = parent / '__init__.py' assert expected_init.exists()
Add meta test for __init__ file""" Tests for this repository. """ from pathlib import Path def test_init_files() -> None: """ ``__init__`` files exist where they should do. If ``__init__`` files are missing, linters may not run on all files that they should run on. """ directories = (Path('src'), Path('tests')) for directory in directories: files = directory.glob('**/*.py') for python_file in files: parent = python_file.parent expected_init = parent / '__init__.py' assert expected_init.exists()
<commit_before><commit_msg>Add meta test for __init__ file<commit_after>""" Tests for this repository. """ from pathlib import Path def test_init_files() -> None: """ ``__init__`` files exist where they should do. If ``__init__`` files are missing, linters may not run on all files that they should run on. """ directories = (Path('src'), Path('tests')) for directory in directories: files = directory.glob('**/*.py') for python_file in files: parent = python_file.parent expected_init = parent / '__init__.py' assert expected_init.exists()
9fc4c61e251f3f0657c5905bc031b307861ac679
ckanext/nhm/tests/test_helpers.py
ckanext/nhm/tests/test_helpers.py
#!/usr/bin/env python # encoding: utf-8 import unittest from ckanext.nhm.lib.helpers import dataset_author_truncate class AuthorTruncateTest(unittest.TestCase): ''' Tests for the dataset_author_truncate helper function. ''' def test_untruncated_author(self): ''' dataset_author_truncate shouldn't truncate when the author is shorter than the max ''' author = u'Dr. Someone' self.assertEqual(author, dataset_author_truncate(author)) def test_untruncated_unicode_author(self): ''' dataset_author_truncate shouldn't truncate when the author is longer than the max and contains unicode characters ''' author = u'Dr. Someoné' self.assertEqual(author, dataset_author_truncate(author)) def test_truncated_author(self): ''' dataset_author_truncate should truncate when the author is longer than the max ''' author = u', '.join([u'Dr. Someone']*10) try: dataset_author_truncate(author) except UnicodeEncodeError: self.fail("Test unexpectantly threw UnicodeEncodeError when it shouldn't have") def test_truncated_unicode_author(self): ''' dataset_author_truncate should truncate when the author is longer than the max and contains unicode characters ''' author = u', '.join([u'Dr. Someoné']*10) try: dataset_author_truncate(author) except UnicodeEncodeError: self.fail("Test unexpectantly threw UnicodeEncodeError when it shouldn't have") if __name__ == '__main__': unittest.main()
Add tests for unicode usage in author truncation
Add tests for unicode usage in author truncation
Python
mit
NaturalHistoryMuseum/ckanext-nhm,NaturalHistoryMuseum/ckanext-nhm,NaturalHistoryMuseum/ckanext-nhm
Add tests for unicode usage in author truncation
#!/usr/bin/env python # encoding: utf-8 import unittest from ckanext.nhm.lib.helpers import dataset_author_truncate class AuthorTruncateTest(unittest.TestCase): ''' Tests for the dataset_author_truncate helper function. ''' def test_untruncated_author(self): ''' dataset_author_truncate shouldn't truncate when the author is shorter than the max ''' author = u'Dr. Someone' self.assertEqual(author, dataset_author_truncate(author)) def test_untruncated_unicode_author(self): ''' dataset_author_truncate shouldn't truncate when the author is longer than the max and contains unicode characters ''' author = u'Dr. Someoné' self.assertEqual(author, dataset_author_truncate(author)) def test_truncated_author(self): ''' dataset_author_truncate should truncate when the author is longer than the max ''' author = u', '.join([u'Dr. Someone']*10) try: dataset_author_truncate(author) except UnicodeEncodeError: self.fail("Test unexpectantly threw UnicodeEncodeError when it shouldn't have") def test_truncated_unicode_author(self): ''' dataset_author_truncate should truncate when the author is longer than the max and contains unicode characters ''' author = u', '.join([u'Dr. Someoné']*10) try: dataset_author_truncate(author) except UnicodeEncodeError: self.fail("Test unexpectantly threw UnicodeEncodeError when it shouldn't have") if __name__ == '__main__': unittest.main()
<commit_before><commit_msg>Add tests for unicode usage in author truncation<commit_after>
#!/usr/bin/env python # encoding: utf-8 import unittest from ckanext.nhm.lib.helpers import dataset_author_truncate class AuthorTruncateTest(unittest.TestCase): ''' Tests for the dataset_author_truncate helper function. ''' def test_untruncated_author(self): ''' dataset_author_truncate shouldn't truncate when the author is shorter than the max ''' author = u'Dr. Someone' self.assertEqual(author, dataset_author_truncate(author)) def test_untruncated_unicode_author(self): ''' dataset_author_truncate shouldn't truncate when the author is longer than the max and contains unicode characters ''' author = u'Dr. Someoné' self.assertEqual(author, dataset_author_truncate(author)) def test_truncated_author(self): ''' dataset_author_truncate should truncate when the author is longer than the max ''' author = u', '.join([u'Dr. Someone']*10) try: dataset_author_truncate(author) except UnicodeEncodeError: self.fail("Test unexpectantly threw UnicodeEncodeError when it shouldn't have") def test_truncated_unicode_author(self): ''' dataset_author_truncate should truncate when the author is longer than the max and contains unicode characters ''' author = u', '.join([u'Dr. Someoné']*10) try: dataset_author_truncate(author) except UnicodeEncodeError: self.fail("Test unexpectantly threw UnicodeEncodeError when it shouldn't have") if __name__ == '__main__': unittest.main()
Add tests for unicode usage in author truncation#!/usr/bin/env python # encoding: utf-8 import unittest from ckanext.nhm.lib.helpers import dataset_author_truncate class AuthorTruncateTest(unittest.TestCase): ''' Tests for the dataset_author_truncate helper function. ''' def test_untruncated_author(self): ''' dataset_author_truncate shouldn't truncate when the author is shorter than the max ''' author = u'Dr. Someone' self.assertEqual(author, dataset_author_truncate(author)) def test_untruncated_unicode_author(self): ''' dataset_author_truncate shouldn't truncate when the author is longer than the max and contains unicode characters ''' author = u'Dr. Someoné' self.assertEqual(author, dataset_author_truncate(author)) def test_truncated_author(self): ''' dataset_author_truncate should truncate when the author is longer than the max ''' author = u', '.join([u'Dr. Someone']*10) try: dataset_author_truncate(author) except UnicodeEncodeError: self.fail("Test unexpectantly threw UnicodeEncodeError when it shouldn't have") def test_truncated_unicode_author(self): ''' dataset_author_truncate should truncate when the author is longer than the max and contains unicode characters ''' author = u', '.join([u'Dr. Someoné']*10) try: dataset_author_truncate(author) except UnicodeEncodeError: self.fail("Test unexpectantly threw UnicodeEncodeError when it shouldn't have") if __name__ == '__main__': unittest.main()
<commit_before><commit_msg>Add tests for unicode usage in author truncation<commit_after>#!/usr/bin/env python # encoding: utf-8 import unittest from ckanext.nhm.lib.helpers import dataset_author_truncate class AuthorTruncateTest(unittest.TestCase): ''' Tests for the dataset_author_truncate helper function. ''' def test_untruncated_author(self): ''' dataset_author_truncate shouldn't truncate when the author is shorter than the max ''' author = u'Dr. Someone' self.assertEqual(author, dataset_author_truncate(author)) def test_untruncated_unicode_author(self): ''' dataset_author_truncate shouldn't truncate when the author is longer than the max and contains unicode characters ''' author = u'Dr. Someoné' self.assertEqual(author, dataset_author_truncate(author)) def test_truncated_author(self): ''' dataset_author_truncate should truncate when the author is longer than the max ''' author = u', '.join([u'Dr. Someone']*10) try: dataset_author_truncate(author) except UnicodeEncodeError: self.fail("Test unexpectantly threw UnicodeEncodeError when it shouldn't have") def test_truncated_unicode_author(self): ''' dataset_author_truncate should truncate when the author is longer than the max and contains unicode characters ''' author = u', '.join([u'Dr. Someoné']*10) try: dataset_author_truncate(author) except UnicodeEncodeError: self.fail("Test unexpectantly threw UnicodeEncodeError when it shouldn't have") if __name__ == '__main__': unittest.main()
b01f024cabc3649467dd3b42961225e035b032a7
camera_filters.py
camera_filters.py
""" Apply different filters here """ import cv2 # import OpenCV 3 module camera = cv2.VideoCapture(0) # get default camera mode = 2 # default mode, apply Canny edge detection while True: ok, frame = camera.read() # read frame if ok: # frame is read correctly if mode == 2: frame = cv2.Canny(frame, 100, 200) # Canny edge detection cv2.imshow('My camera', frame) # show frame key = cv2.waitKey(1) & 0xff # read keystroke if key == 27: break # <Escape> key pressed, exit from cycle if key == ord('1'): mode = 1 # show unchanged frame if key == ord('2'): mode = 2 # apply Canny edge detection camera.release() # release web camera cv2.destroyAllWindows()
Apply different filters for web camera image
Apply different filters for web camera image
Python
mit
foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard
Apply different filters for web camera image
""" Apply different filters here """ import cv2 # import OpenCV 3 module camera = cv2.VideoCapture(0) # get default camera mode = 2 # default mode, apply Canny edge detection while True: ok, frame = camera.read() # read frame if ok: # frame is read correctly if mode == 2: frame = cv2.Canny(frame, 100, 200) # Canny edge detection cv2.imshow('My camera', frame) # show frame key = cv2.waitKey(1) & 0xff # read keystroke if key == 27: break # <Escape> key pressed, exit from cycle if key == ord('1'): mode = 1 # show unchanged frame if key == ord('2'): mode = 2 # apply Canny edge detection camera.release() # release web camera cv2.destroyAllWindows()
<commit_before><commit_msg>Apply different filters for web camera image<commit_after>
""" Apply different filters here """ import cv2 # import OpenCV 3 module camera = cv2.VideoCapture(0) # get default camera mode = 2 # default mode, apply Canny edge detection while True: ok, frame = camera.read() # read frame if ok: # frame is read correctly if mode == 2: frame = cv2.Canny(frame, 100, 200) # Canny edge detection cv2.imshow('My camera', frame) # show frame key = cv2.waitKey(1) & 0xff # read keystroke if key == 27: break # <Escape> key pressed, exit from cycle if key == ord('1'): mode = 1 # show unchanged frame if key == ord('2'): mode = 2 # apply Canny edge detection camera.release() # release web camera cv2.destroyAllWindows()
Apply different filters for web camera image""" Apply different filters here """ import cv2 # import OpenCV 3 module camera = cv2.VideoCapture(0) # get default camera mode = 2 # default mode, apply Canny edge detection while True: ok, frame = camera.read() # read frame if ok: # frame is read correctly if mode == 2: frame = cv2.Canny(frame, 100, 200) # Canny edge detection cv2.imshow('My camera', frame) # show frame key = cv2.waitKey(1) & 0xff # read keystroke if key == 27: break # <Escape> key pressed, exit from cycle if key == ord('1'): mode = 1 # show unchanged frame if key == ord('2'): mode = 2 # apply Canny edge detection camera.release() # release web camera cv2.destroyAllWindows()
<commit_before><commit_msg>Apply different filters for web camera image<commit_after>""" Apply different filters here """ import cv2 # import OpenCV 3 module camera = cv2.VideoCapture(0) # get default camera mode = 2 # default mode, apply Canny edge detection while True: ok, frame = camera.read() # read frame if ok: # frame is read correctly if mode == 2: frame = cv2.Canny(frame, 100, 200) # Canny edge detection cv2.imshow('My camera', frame) # show frame key = cv2.waitKey(1) & 0xff # read keystroke if key == 27: break # <Escape> key pressed, exit from cycle if key == ord('1'): mode = 1 # show unchanged frame if key == ord('2'): mode = 2 # apply Canny edge detection camera.release() # release web camera cv2.destroyAllWindows()
1b6853b7036025330d9055144fbd22f6fbe95de6
senlin/tests/tempest/api/policies/test_policy_show.py
senlin/tests/tempest/api/policies/test_policy_show.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tempest.lib.common.utils import data_utils from tempest.lib import decorators from senlin.tests.tempest.api import base from senlin.tests.tempest.common import constants class TestPolicyShow(base.BaseSenlinTest): @classmethod def resource_setup(cls): super(TestPolicyShow, cls).resource_setup() # Create policy params = { 'policy': { 'name': data_utils.rand_name("tempest-created-policy"), 'spec': constants.spec_scaling_policy } } cls.policy = cls.client.create_obj('policies', params)['body'] @classmethod def resource_cleanup(cls): # Delete policy cls.client.delete_obj('policies', cls.policy['id']) super(TestPolicyShow, cls).resource_cleanup() @decorators.idempotent_id('7ab18be1-e554-452d-91ac-9b5e5c87430b') def test_show_policy(self): res = self.client.get_obj('policies', self.policy['id']) # Verify resp of policy show API self.assertEqual(200, res['status']) self.assertIsNone(res['location']) self.assertIsNotNone(res['body']) policy = res['body'] for key in ['created_at', 'data', 'domain', 'id', 'name', 'project', 'spec', 'type', 'updated_at', 'user']: self.assertIn(key, policy)
Add API test for policy show
Add API test for policy show Change-Id: I14aea48e014dda3850430b334afbb1f7b188a171
Python
apache-2.0
openstack/senlin,stackforge/senlin,stackforge/senlin,openstack/senlin,openstack/senlin
Add API test for policy show Change-Id: I14aea48e014dda3850430b334afbb1f7b188a171
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tempest.lib.common.utils import data_utils from tempest.lib import decorators from senlin.tests.tempest.api import base from senlin.tests.tempest.common import constants class TestPolicyShow(base.BaseSenlinTest): @classmethod def resource_setup(cls): super(TestPolicyShow, cls).resource_setup() # Create policy params = { 'policy': { 'name': data_utils.rand_name("tempest-created-policy"), 'spec': constants.spec_scaling_policy } } cls.policy = cls.client.create_obj('policies', params)['body'] @classmethod def resource_cleanup(cls): # Delete policy cls.client.delete_obj('policies', cls.policy['id']) super(TestPolicyShow, cls).resource_cleanup() @decorators.idempotent_id('7ab18be1-e554-452d-91ac-9b5e5c87430b') def test_show_policy(self): res = self.client.get_obj('policies', self.policy['id']) # Verify resp of policy show API self.assertEqual(200, res['status']) self.assertIsNone(res['location']) self.assertIsNotNone(res['body']) policy = res['body'] for key in ['created_at', 'data', 'domain', 'id', 'name', 'project', 'spec', 'type', 'updated_at', 'user']: self.assertIn(key, policy)
<commit_before><commit_msg>Add API test for policy show Change-Id: I14aea48e014dda3850430b334afbb1f7b188a171<commit_after>
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tempest.lib.common.utils import data_utils from tempest.lib import decorators from senlin.tests.tempest.api import base from senlin.tests.tempest.common import constants class TestPolicyShow(base.BaseSenlinTest): @classmethod def resource_setup(cls): super(TestPolicyShow, cls).resource_setup() # Create policy params = { 'policy': { 'name': data_utils.rand_name("tempest-created-policy"), 'spec': constants.spec_scaling_policy } } cls.policy = cls.client.create_obj('policies', params)['body'] @classmethod def resource_cleanup(cls): # Delete policy cls.client.delete_obj('policies', cls.policy['id']) super(TestPolicyShow, cls).resource_cleanup() @decorators.idempotent_id('7ab18be1-e554-452d-91ac-9b5e5c87430b') def test_show_policy(self): res = self.client.get_obj('policies', self.policy['id']) # Verify resp of policy show API self.assertEqual(200, res['status']) self.assertIsNone(res['location']) self.assertIsNotNone(res['body']) policy = res['body'] for key in ['created_at', 'data', 'domain', 'id', 'name', 'project', 'spec', 'type', 'updated_at', 'user']: self.assertIn(key, policy)
Add API test for policy show Change-Id: I14aea48e014dda3850430b334afbb1f7b188a171# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tempest.lib.common.utils import data_utils from tempest.lib import decorators from senlin.tests.tempest.api import base from senlin.tests.tempest.common import constants class TestPolicyShow(base.BaseSenlinTest): @classmethod def resource_setup(cls): super(TestPolicyShow, cls).resource_setup() # Create policy params = { 'policy': { 'name': data_utils.rand_name("tempest-created-policy"), 'spec': constants.spec_scaling_policy } } cls.policy = cls.client.create_obj('policies', params)['body'] @classmethod def resource_cleanup(cls): # Delete policy cls.client.delete_obj('policies', cls.policy['id']) super(TestPolicyShow, cls).resource_cleanup() @decorators.idempotent_id('7ab18be1-e554-452d-91ac-9b5e5c87430b') def test_show_policy(self): res = self.client.get_obj('policies', self.policy['id']) # Verify resp of policy show API self.assertEqual(200, res['status']) self.assertIsNone(res['location']) self.assertIsNotNone(res['body']) policy = res['body'] for key in ['created_at', 'data', 'domain', 'id', 'name', 'project', 'spec', 'type', 'updated_at', 'user']: self.assertIn(key, policy)
<commit_before><commit_msg>Add API test for policy show Change-Id: I14aea48e014dda3850430b334afbb1f7b188a171<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tempest.lib.common.utils import data_utils from tempest.lib import decorators from senlin.tests.tempest.api import base from senlin.tests.tempest.common import constants class TestPolicyShow(base.BaseSenlinTest): @classmethod def resource_setup(cls): super(TestPolicyShow, cls).resource_setup() # Create policy params = { 'policy': { 'name': data_utils.rand_name("tempest-created-policy"), 'spec': constants.spec_scaling_policy } } cls.policy = cls.client.create_obj('policies', params)['body'] @classmethod def resource_cleanup(cls): # Delete policy cls.client.delete_obj('policies', cls.policy['id']) super(TestPolicyShow, cls).resource_cleanup() @decorators.idempotent_id('7ab18be1-e554-452d-91ac-9b5e5c87430b') def test_show_policy(self): res = self.client.get_obj('policies', self.policy['id']) # Verify resp of policy show API self.assertEqual(200, res['status']) self.assertIsNone(res['location']) self.assertIsNotNone(res['body']) policy = res['body'] for key in ['created_at', 'data', 'domain', 'id', 'name', 'project', 'spec', 'type', 'updated_at', 'user']: self.assertIn(key, policy)
aa1c2880dc85228d9a8d534858c1cfe70428cbde
src/ggrc/migrations/versions/20160510122526_44ebc240800b_remove_response_relationships.py
src/ggrc/migrations/versions/20160510122526_44ebc240800b_remove_response_relationships.py
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: goodson@google.com # Maintained By: goodson@google.com """ Remove relationships related to deleted response objects Create Date: 2016-05-10 12:25:26.383695 """ # disable Invalid constant name pylint warning for mandatory Alembic variables. # pylint: disable=invalid-name from alembic import op # revision identifiers, used by Alembic. revision = '44ebc240800b' down_revision = '3715694bd315' def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.execute( """ DELETE FROM relationships WHERE source_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") OR destination_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) def downgrade(): """Downgrade database schema and/or data back to the previous revision.""" pass
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: goodson@google.com # Maintained By: goodson@google.com """ Remove relationships related to deleted response objects Create Date: 2016-05-10 12:25:26.383695 """ # disable Invalid constant name pylint warning for mandatory Alembic variables. # pylint: disable=invalid-name from alembic import op # revision identifiers, used by Alembic. revision = '44ebc240800b' down_revision = '3715694bd315' def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.execute( """ DELETE FROM relationships WHERE source_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") OR destination_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) op.execute( """ DELETE FROM object_documents WHERE documentable_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) def downgrade(): """Downgrade database schema and/or data back to the previous revision.""" pass
Remove responses references in object_documents
Remove responses references in object_documents
Python
apache-2.0
selahssea/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: goodson@google.com # Maintained By: goodson@google.com """ Remove relationships related to deleted response objects Create Date: 2016-05-10 12:25:26.383695 """ # disable Invalid constant name pylint warning for mandatory Alembic variables. # pylint: disable=invalid-name from alembic import op # revision identifiers, used by Alembic. revision = '44ebc240800b' down_revision = '3715694bd315' def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.execute( """ DELETE FROM relationships WHERE source_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") OR destination_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) def downgrade(): """Downgrade database schema and/or data back to the previous revision.""" pass Remove responses references in object_documents
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: goodson@google.com # Maintained By: goodson@google.com """ Remove relationships related to deleted response objects Create Date: 2016-05-10 12:25:26.383695 """ # disable Invalid constant name pylint warning for mandatory Alembic variables. # pylint: disable=invalid-name from alembic import op # revision identifiers, used by Alembic. revision = '44ebc240800b' down_revision = '3715694bd315' def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.execute( """ DELETE FROM relationships WHERE source_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") OR destination_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) op.execute( """ DELETE FROM object_documents WHERE documentable_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) def downgrade(): """Downgrade database schema and/or data back to the previous revision.""" pass
<commit_before># Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: goodson@google.com # Maintained By: goodson@google.com """ Remove relationships related to deleted response objects Create Date: 2016-05-10 12:25:26.383695 """ # disable Invalid constant name pylint warning for mandatory Alembic variables. # pylint: disable=invalid-name from alembic import op # revision identifiers, used by Alembic. revision = '44ebc240800b' down_revision = '3715694bd315' def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.execute( """ DELETE FROM relationships WHERE source_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") OR destination_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) def downgrade(): """Downgrade database schema and/or data back to the previous revision.""" pass <commit_msg>Remove responses references in object_documents<commit_after>
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: goodson@google.com # Maintained By: goodson@google.com """ Remove relationships related to deleted response objects Create Date: 2016-05-10 12:25:26.383695 """ # disable Invalid constant name pylint warning for mandatory Alembic variables. # pylint: disable=invalid-name from alembic import op # revision identifiers, used by Alembic. revision = '44ebc240800b' down_revision = '3715694bd315' def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.execute( """ DELETE FROM relationships WHERE source_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") OR destination_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) op.execute( """ DELETE FROM object_documents WHERE documentable_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) def downgrade(): """Downgrade database schema and/or data back to the previous revision.""" pass
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: goodson@google.com # Maintained By: goodson@google.com """ Remove relationships related to deleted response objects Create Date: 2016-05-10 12:25:26.383695 """ # disable Invalid constant name pylint warning for mandatory Alembic variables. # pylint: disable=invalid-name from alembic import op # revision identifiers, used by Alembic. revision = '44ebc240800b' down_revision = '3715694bd315' def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.execute( """ DELETE FROM relationships WHERE source_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") OR destination_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) def downgrade(): """Downgrade database schema and/or data back to the previous revision.""" pass Remove responses references in object_documents# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: goodson@google.com # Maintained By: goodson@google.com """ Remove relationships related to deleted response objects Create Date: 2016-05-10 12:25:26.383695 """ # disable Invalid constant name pylint warning for mandatory Alembic variables. # pylint: disable=invalid-name from alembic import op # revision identifiers, used by Alembic. revision = '44ebc240800b' down_revision = '3715694bd315' def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.execute( """ DELETE FROM relationships WHERE source_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") OR destination_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) op.execute( """ DELETE FROM object_documents WHERE documentable_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) def downgrade(): """Downgrade database schema and/or data back to the previous revision.""" pass
<commit_before># Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: goodson@google.com # Maintained By: goodson@google.com """ Remove relationships related to deleted response objects Create Date: 2016-05-10 12:25:26.383695 """ # disable Invalid constant name pylint warning for mandatory Alembic variables. # pylint: disable=invalid-name from alembic import op # revision identifiers, used by Alembic. revision = '44ebc240800b' down_revision = '3715694bd315' def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.execute( """ DELETE FROM relationships WHERE source_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") OR destination_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) def downgrade(): """Downgrade database schema and/or data back to the previous revision.""" pass <commit_msg>Remove responses references in object_documents<commit_after># Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: goodson@google.com # Maintained By: goodson@google.com """ Remove relationships related to deleted response objects Create Date: 2016-05-10 12:25:26.383695 """ # disable Invalid constant name pylint warning for mandatory Alembic variables. # pylint: disable=invalid-name from alembic import op # revision identifiers, used by Alembic. revision = '44ebc240800b' down_revision = '3715694bd315' def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.execute( """ DELETE FROM relationships WHERE source_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") OR destination_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) op.execute( """ DELETE FROM object_documents WHERE documentable_type IN ("Response", "DocumentationResponse", "InterviewResponse", "PopulationSampleResponse") """) def downgrade(): """Downgrade database schema and/or data back to the previous revision.""" pass
4672c7bc0036c9bfb478f8483ca2dcffe0644b0e
apps/authentication/migrations/0014_auto_20170916_0124.py
apps/authentication/migrations/0014_auto_20170916_0124.py
# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2017-09-16 01:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('authentication', '0013_auto_20161209_1447'), ] operations = [ migrations.AlterField( model_name='registertoken', name='created', field=models.DateTimeField(auto_now_add=True, verbose_name=b'created'), ), ]
Add migrations that fixes pre 1.10 behavior
Add migrations that fixes pre 1.10 behavior
Python
mit
CasualGaming/studlan,dotKom/studlan,dotKom/studlan,dotKom/studlan,CasualGaming/studlan,dotKom/studlan,CasualGaming/studlan,CasualGaming/studlan
Add migrations that fixes pre 1.10 behavior
# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2017-09-16 01:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('authentication', '0013_auto_20161209_1447'), ] operations = [ migrations.AlterField( model_name='registertoken', name='created', field=models.DateTimeField(auto_now_add=True, verbose_name=b'created'), ), ]
<commit_before><commit_msg>Add migrations that fixes pre 1.10 behavior<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2017-09-16 01:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('authentication', '0013_auto_20161209_1447'), ] operations = [ migrations.AlterField( model_name='registertoken', name='created', field=models.DateTimeField(auto_now_add=True, verbose_name=b'created'), ), ]
Add migrations that fixes pre 1.10 behavior# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2017-09-16 01:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('authentication', '0013_auto_20161209_1447'), ] operations = [ migrations.AlterField( model_name='registertoken', name='created', field=models.DateTimeField(auto_now_add=True, verbose_name=b'created'), ), ]
<commit_before><commit_msg>Add migrations that fixes pre 1.10 behavior<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.10 on 2017-09-16 01:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('authentication', '0013_auto_20161209_1447'), ] operations = [ migrations.AlterField( model_name='registertoken', name='created', field=models.DateTimeField(auto_now_add=True, verbose_name=b'created'), ), ]
1ae72b244dd867240ca23415faa2771f97d57569
astropy/io/misc/asdf/tags/time/tests/test_timedelta.py
astropy/io/misc/asdf/tags/time/tests/test_timedelta.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import pytest from asdf.tests.helpers import assert_roundtrip_tree from astropy.time import Time @pytest.mark.parametrize('fmt', Time.FORMATS.keys()) def test_timedelta(fmt, tmpdir): t1 = Time(Time.now(), format=fmt) t2 = Time(Time.now(), format=fmt) td = t2 - t1 tree = dict(timedelta=td) assert_roundtrip_tree(tree, tmpdir)
Add test for serializing TimeDelta using ASDF
Add test for serializing TimeDelta using ASDF
Python
bsd-3-clause
aleksandr-bakanov/astropy,bsipocz/astropy,StuartLittlefair/astropy,saimn/astropy,pllim/astropy,MSeifert04/astropy,larrybradley/astropy,dhomeier/astropy,lpsinger/astropy,saimn/astropy,StuartLittlefair/astropy,mhvk/astropy,saimn/astropy,bsipocz/astropy,stargaser/astropy,bsipocz/astropy,mhvk/astropy,stargaser/astropy,lpsinger/astropy,stargaser/astropy,mhvk/astropy,saimn/astropy,aleksandr-bakanov/astropy,bsipocz/astropy,MSeifert04/astropy,pllim/astropy,dhomeier/astropy,pllim/astropy,dhomeier/astropy,stargaser/astropy,lpsinger/astropy,StuartLittlefair/astropy,larrybradley/astropy,larrybradley/astropy,larrybradley/astropy,aleksandr-bakanov/astropy,mhvk/astropy,larrybradley/astropy,StuartLittlefair/astropy,astropy/astropy,astropy/astropy,mhvk/astropy,astropy/astropy,dhomeier/astropy,lpsinger/astropy,StuartLittlefair/astropy,MSeifert04/astropy,astropy/astropy,pllim/astropy,lpsinger/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,astropy/astropy,MSeifert04/astropy,saimn/astropy,pllim/astropy
Add test for serializing TimeDelta using ASDF
# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import pytest from asdf.tests.helpers import assert_roundtrip_tree from astropy.time import Time @pytest.mark.parametrize('fmt', Time.FORMATS.keys()) def test_timedelta(fmt, tmpdir): t1 = Time(Time.now(), format=fmt) t2 = Time(Time.now(), format=fmt) td = t2 - t1 tree = dict(timedelta=td) assert_roundtrip_tree(tree, tmpdir)
<commit_before><commit_msg>Add test for serializing TimeDelta using ASDF<commit_after>
# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import pytest from asdf.tests.helpers import assert_roundtrip_tree from astropy.time import Time @pytest.mark.parametrize('fmt', Time.FORMATS.keys()) def test_timedelta(fmt, tmpdir): t1 = Time(Time.now(), format=fmt) t2 = Time(Time.now(), format=fmt) td = t2 - t1 tree = dict(timedelta=td) assert_roundtrip_tree(tree, tmpdir)
Add test for serializing TimeDelta using ASDF# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import pytest from asdf.tests.helpers import assert_roundtrip_tree from astropy.time import Time @pytest.mark.parametrize('fmt', Time.FORMATS.keys()) def test_timedelta(fmt, tmpdir): t1 = Time(Time.now(), format=fmt) t2 = Time(Time.now(), format=fmt) td = t2 - t1 tree = dict(timedelta=td) assert_roundtrip_tree(tree, tmpdir)
<commit_before><commit_msg>Add test for serializing TimeDelta using ASDF<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- import pytest from asdf.tests.helpers import assert_roundtrip_tree from astropy.time import Time @pytest.mark.parametrize('fmt', Time.FORMATS.keys()) def test_timedelta(fmt, tmpdir): t1 = Time(Time.now(), format=fmt) t2 = Time(Time.now(), format=fmt) td = t2 - t1 tree = dict(timedelta=td) assert_roundtrip_tree(tree, tmpdir)
bee2e32aae26721d687e81838718fe5458fa0504
create_tables.py
create_tables.py
# Copyright 2013 Pau Haro Negre # based on C++ code by Carl Staelin Copyright 2009-2011 # # See the NOTICE file distributed with this work for additional information # regarding copyright ownership. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from run_experiment import Cognon from run_experiment import Configuration def run_table_row(w, active, C, D1, D2, Q, R, G, H): repetitions = 20 config = Configuration() config.neuron_params(C, D1, D2, Q, R, G, H) config.test_params(active, w, 5000) cognon = Cognon() return cognon.run_configuration(config, repetitions) def table21_row(N, H, S, w, G): r = run_table_row(w, N, 1, 1, 1, S/float(H), 1, G, float(H)) pF_mean = r['pF'].mean()*100 pF_std = r['pF'].std()*100 L_mean = r['L'].mean() print "{:.2f}\t{:.2}\t{:.1f}".format(pF_mean, pF_std, L_mean) def table21(): # N | H | S0 | w | G # table21_row( 4, 4, 10, 1, 100) table21_row( 5, 4, 10, 1, 100) table21_row( 4, 4, 10, 2, 100) table21_row(10, 10, 100, 4, 100) table21_row(11, 10, 100, 4, 100) table21_row(11, 10, 100, 5, 100) table21_row(11, 10, 1000, 60, 100) table21_row(11, 10, 10000, 600, 100) table21_row(22, 20, 10000, 450, 100) table21()
Add script to recreate the tables in the book
Add script to recreate the tables in the book
Python
apache-2.0
pauh/neuron
Add script to recreate the tables in the book
# Copyright 2013 Pau Haro Negre # based on C++ code by Carl Staelin Copyright 2009-2011 # # See the NOTICE file distributed with this work for additional information # regarding copyright ownership. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from run_experiment import Cognon from run_experiment import Configuration def run_table_row(w, active, C, D1, D2, Q, R, G, H): repetitions = 20 config = Configuration() config.neuron_params(C, D1, D2, Q, R, G, H) config.test_params(active, w, 5000) cognon = Cognon() return cognon.run_configuration(config, repetitions) def table21_row(N, H, S, w, G): r = run_table_row(w, N, 1, 1, 1, S/float(H), 1, G, float(H)) pF_mean = r['pF'].mean()*100 pF_std = r['pF'].std()*100 L_mean = r['L'].mean() print "{:.2f}\t{:.2}\t{:.1f}".format(pF_mean, pF_std, L_mean) def table21(): # N | H | S0 | w | G # table21_row( 4, 4, 10, 1, 100) table21_row( 5, 4, 10, 1, 100) table21_row( 4, 4, 10, 2, 100) table21_row(10, 10, 100, 4, 100) table21_row(11, 10, 100, 4, 100) table21_row(11, 10, 100, 5, 100) table21_row(11, 10, 1000, 60, 100) table21_row(11, 10, 10000, 600, 100) table21_row(22, 20, 10000, 450, 100) table21()
<commit_before><commit_msg>Add script to recreate the tables in the book<commit_after>
# Copyright 2013 Pau Haro Negre # based on C++ code by Carl Staelin Copyright 2009-2011 # # See the NOTICE file distributed with this work for additional information # regarding copyright ownership. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from run_experiment import Cognon from run_experiment import Configuration def run_table_row(w, active, C, D1, D2, Q, R, G, H): repetitions = 20 config = Configuration() config.neuron_params(C, D1, D2, Q, R, G, H) config.test_params(active, w, 5000) cognon = Cognon() return cognon.run_configuration(config, repetitions) def table21_row(N, H, S, w, G): r = run_table_row(w, N, 1, 1, 1, S/float(H), 1, G, float(H)) pF_mean = r['pF'].mean()*100 pF_std = r['pF'].std()*100 L_mean = r['L'].mean() print "{:.2f}\t{:.2}\t{:.1f}".format(pF_mean, pF_std, L_mean) def table21(): # N | H | S0 | w | G # table21_row( 4, 4, 10, 1, 100) table21_row( 5, 4, 10, 1, 100) table21_row( 4, 4, 10, 2, 100) table21_row(10, 10, 100, 4, 100) table21_row(11, 10, 100, 4, 100) table21_row(11, 10, 100, 5, 100) table21_row(11, 10, 1000, 60, 100) table21_row(11, 10, 10000, 600, 100) table21_row(22, 20, 10000, 450, 100) table21()
Add script to recreate the tables in the book# Copyright 2013 Pau Haro Negre # based on C++ code by Carl Staelin Copyright 2009-2011 # # See the NOTICE file distributed with this work for additional information # regarding copyright ownership. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from run_experiment import Cognon from run_experiment import Configuration def run_table_row(w, active, C, D1, D2, Q, R, G, H): repetitions = 20 config = Configuration() config.neuron_params(C, D1, D2, Q, R, G, H) config.test_params(active, w, 5000) cognon = Cognon() return cognon.run_configuration(config, repetitions) def table21_row(N, H, S, w, G): r = run_table_row(w, N, 1, 1, 1, S/float(H), 1, G, float(H)) pF_mean = r['pF'].mean()*100 pF_std = r['pF'].std()*100 L_mean = r['L'].mean() print "{:.2f}\t{:.2}\t{:.1f}".format(pF_mean, pF_std, L_mean) def table21(): # N | H | S0 | w | G # table21_row( 4, 4, 10, 1, 100) table21_row( 5, 4, 10, 1, 100) table21_row( 4, 4, 10, 2, 100) table21_row(10, 10, 100, 4, 100) table21_row(11, 10, 100, 4, 100) table21_row(11, 10, 100, 5, 100) table21_row(11, 10, 1000, 60, 100) table21_row(11, 10, 10000, 600, 100) table21_row(22, 20, 10000, 450, 100) table21()
<commit_before><commit_msg>Add script to recreate the tables in the book<commit_after># Copyright 2013 Pau Haro Negre # based on C++ code by Carl Staelin Copyright 2009-2011 # # See the NOTICE file distributed with this work for additional information # regarding copyright ownership. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from run_experiment import Cognon from run_experiment import Configuration def run_table_row(w, active, C, D1, D2, Q, R, G, H): repetitions = 20 config = Configuration() config.neuron_params(C, D1, D2, Q, R, G, H) config.test_params(active, w, 5000) cognon = Cognon() return cognon.run_configuration(config, repetitions) def table21_row(N, H, S, w, G): r = run_table_row(w, N, 1, 1, 1, S/float(H), 1, G, float(H)) pF_mean = r['pF'].mean()*100 pF_std = r['pF'].std()*100 L_mean = r['L'].mean() print "{:.2f}\t{:.2}\t{:.1f}".format(pF_mean, pF_std, L_mean) def table21(): # N | H | S0 | w | G # table21_row( 4, 4, 10, 1, 100) table21_row( 5, 4, 10, 1, 100) table21_row( 4, 4, 10, 2, 100) table21_row(10, 10, 100, 4, 100) table21_row(11, 10, 100, 4, 100) table21_row(11, 10, 100, 5, 100) table21_row(11, 10, 1000, 60, 100) table21_row(11, 10, 10000, 600, 100) table21_row(22, 20, 10000, 450, 100) table21()
24b3bd5c7f2ad5138bbf74dbdce571b4235404af
tests/test_user_management.py
tests/test_user_management.py
from __future__ import print_function # Use print() instead of print from app.core.models import User from app import db from app.startup.create_users import find_or_create_user def test_user_management(client): # allows user to register user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') assert user # allows user to reset password new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.password = u'Password2' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert user.password == 'Password2' # allow user to edit title new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.title = u'Dr' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert user.title == 'Dr' # allow user to edit first_name and last_name new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.first_name = u'Jeremy' new_user.last_name = u'Skipper' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert (user.first_name == 'Jeremy' and user.last_name == 'Skipper')
Add test cases for user management
Add test cases for user management
Python
bsd-2-clause
UCL-CS35/incdb-user,UCL-CS35/incdb-user,UCL-CS35/incdb-user
Add test cases for user management
from __future__ import print_function # Use print() instead of print from app.core.models import User from app import db from app.startup.create_users import find_or_create_user def test_user_management(client): # allows user to register user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') assert user # allows user to reset password new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.password = u'Password2' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert user.password == 'Password2' # allow user to edit title new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.title = u'Dr' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert user.title == 'Dr' # allow user to edit first_name and last_name new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.first_name = u'Jeremy' new_user.last_name = u'Skipper' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert (user.first_name == 'Jeremy' and user.last_name == 'Skipper')
<commit_before><commit_msg>Add test cases for user management<commit_after>
from __future__ import print_function # Use print() instead of print from app.core.models import User from app import db from app.startup.create_users import find_or_create_user def test_user_management(client): # allows user to register user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') assert user # allows user to reset password new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.password = u'Password2' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert user.password == 'Password2' # allow user to edit title new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.title = u'Dr' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert user.title == 'Dr' # allow user to edit first_name and last_name new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.first_name = u'Jeremy' new_user.last_name = u'Skipper' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert (user.first_name == 'Jeremy' and user.last_name == 'Skipper')
Add test cases for user managementfrom __future__ import print_function # Use print() instead of print from app.core.models import User from app import db from app.startup.create_users import find_or_create_user def test_user_management(client): # allows user to register user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') assert user # allows user to reset password new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.password = u'Password2' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert user.password == 'Password2' # allow user to edit title new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.title = u'Dr' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert user.title == 'Dr' # allow user to edit first_name and last_name new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.first_name = u'Jeremy' new_user.last_name = u'Skipper' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert (user.first_name == 'Jeremy' and user.last_name == 'Skipper')
<commit_before><commit_msg>Add test cases for user management<commit_after>from __future__ import print_function # Use print() instead of print from app.core.models import User from app import db from app.startup.create_users import find_or_create_user def test_user_management(client): # allows user to register user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') assert user # allows user to reset password new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.password = u'Password2' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert user.password == 'Password2' # allow user to edit title new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.title = u'Dr' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert user.title == 'Dr' # allow user to edit first_name and last_name new_user = find_or_create_user(u'User2', u'Example', u'Mr', u'user2@example.com', 'Password1') new_user.first_name = u'Jeremy' new_user.last_name = u'Skipper' db.session.commit() user = User.query.filter(User.email == u'user2@example.com').first() assert (user.first_name == 'Jeremy' and user.last_name == 'Skipper')
a291e92f65d291f48281f131bd385976da65b12f
migrations/versions/3e6c454a6fc7_add_older_g_cloud_framewoks.py
migrations/versions/3e6c454a6fc7_add_older_g_cloud_framewoks.py
"""Add older G-Cloud Framewoks Revision ID: 3e6c454a6fc7 Revises: 3acf60608a7d Create Date: 2015-04-02 15:31:57.243449 """ # revision identifiers, used by Alembic. revision = '3e6c454a6fc7' down_revision = '3acf60608a7d' from alembic import op from sqlalchemy.sql import table, column from sqlalchemy import String, Boolean import sqlalchemy as sa frameworks = table('frameworks', column('name', String), column('expired', Boolean) ) def upgrade(): op.execute( frameworks.insert(). \ values({'name': op.inline_literal('G-Cloud 4'), 'expired': op.inline_literal(True)}) ) op.execute( frameworks.insert(). \ values({'name': op.inline_literal('G-Cloud 5'), 'expired': op.inline_literal(False)}) ) def downgrade(): op.execute( frameworks.delete().where(frameworks.c.name == 'G-Cloud 4') ) op.execute( frameworks.delete().where(frameworks.c.name == 'G-Cloud 5') )
Add G4 and G5 frameworks to database
Add G4 and G5 frameworks to database
Python
mit
alphagov/digitalmarketplace-api,mtekel/digitalmarketplace-api,alphagov/digitalmarketplace-api,mtekel/digitalmarketplace-api,mtekel/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,alphagov/digitalmarketplace-api,mtekel/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,RichardKnop/digitalmarketplace-api
Add G4 and G5 frameworks to database
"""Add older G-Cloud Framewoks Revision ID: 3e6c454a6fc7 Revises: 3acf60608a7d Create Date: 2015-04-02 15:31:57.243449 """ # revision identifiers, used by Alembic. revision = '3e6c454a6fc7' down_revision = '3acf60608a7d' from alembic import op from sqlalchemy.sql import table, column from sqlalchemy import String, Boolean import sqlalchemy as sa frameworks = table('frameworks', column('name', String), column('expired', Boolean) ) def upgrade(): op.execute( frameworks.insert(). \ values({'name': op.inline_literal('G-Cloud 4'), 'expired': op.inline_literal(True)}) ) op.execute( frameworks.insert(). \ values({'name': op.inline_literal('G-Cloud 5'), 'expired': op.inline_literal(False)}) ) def downgrade(): op.execute( frameworks.delete().where(frameworks.c.name == 'G-Cloud 4') ) op.execute( frameworks.delete().where(frameworks.c.name == 'G-Cloud 5') )
<commit_before><commit_msg>Add G4 and G5 frameworks to database<commit_after>
"""Add older G-Cloud Framewoks Revision ID: 3e6c454a6fc7 Revises: 3acf60608a7d Create Date: 2015-04-02 15:31:57.243449 """ # revision identifiers, used by Alembic. revision = '3e6c454a6fc7' down_revision = '3acf60608a7d' from alembic import op from sqlalchemy.sql import table, column from sqlalchemy import String, Boolean import sqlalchemy as sa frameworks = table('frameworks', column('name', String), column('expired', Boolean) ) def upgrade(): op.execute( frameworks.insert(). \ values({'name': op.inline_literal('G-Cloud 4'), 'expired': op.inline_literal(True)}) ) op.execute( frameworks.insert(). \ values({'name': op.inline_literal('G-Cloud 5'), 'expired': op.inline_literal(False)}) ) def downgrade(): op.execute( frameworks.delete().where(frameworks.c.name == 'G-Cloud 4') ) op.execute( frameworks.delete().where(frameworks.c.name == 'G-Cloud 5') )
Add G4 and G5 frameworks to database"""Add older G-Cloud Framewoks Revision ID: 3e6c454a6fc7 Revises: 3acf60608a7d Create Date: 2015-04-02 15:31:57.243449 """ # revision identifiers, used by Alembic. revision = '3e6c454a6fc7' down_revision = '3acf60608a7d' from alembic import op from sqlalchemy.sql import table, column from sqlalchemy import String, Boolean import sqlalchemy as sa frameworks = table('frameworks', column('name', String), column('expired', Boolean) ) def upgrade(): op.execute( frameworks.insert(). \ values({'name': op.inline_literal('G-Cloud 4'), 'expired': op.inline_literal(True)}) ) op.execute( frameworks.insert(). \ values({'name': op.inline_literal('G-Cloud 5'), 'expired': op.inline_literal(False)}) ) def downgrade(): op.execute( frameworks.delete().where(frameworks.c.name == 'G-Cloud 4') ) op.execute( frameworks.delete().where(frameworks.c.name == 'G-Cloud 5') )
<commit_before><commit_msg>Add G4 and G5 frameworks to database<commit_after>"""Add older G-Cloud Framewoks Revision ID: 3e6c454a6fc7 Revises: 3acf60608a7d Create Date: 2015-04-02 15:31:57.243449 """ # revision identifiers, used by Alembic. revision = '3e6c454a6fc7' down_revision = '3acf60608a7d' from alembic import op from sqlalchemy.sql import table, column from sqlalchemy import String, Boolean import sqlalchemy as sa frameworks = table('frameworks', column('name', String), column('expired', Boolean) ) def upgrade(): op.execute( frameworks.insert(). \ values({'name': op.inline_literal('G-Cloud 4'), 'expired': op.inline_literal(True)}) ) op.execute( frameworks.insert(). \ values({'name': op.inline_literal('G-Cloud 5'), 'expired': op.inline_literal(False)}) ) def downgrade(): op.execute( frameworks.delete().where(frameworks.c.name == 'G-Cloud 4') ) op.execute( frameworks.delete().where(frameworks.c.name == 'G-Cloud 5') )
d2419fc5a232a7775f444a805abcf74dbfd76299
marketpulse/main/migrations/0009_auto_20150224_1356.py
marketpulse/main/migrations/0009_auto_20150224_1356.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django_countries import countries CARRIERS = { 'Telefonica Digital': [{'Movistar': ['Spain', 'Colombia', 'Venezuela', 'Peru', 'Uruguay', 'Mexico', 'Chile', 'El Salvador', 'Nicaragua', 'Guatemala', 'Costa Rica']}, {'Vivo': ['Brazil']}, {'O2': ['Germany']}], 'Deutsche Telekom': [{'T-Mobile': ['Poland', 'Czech Republic', 'Macedonia']}, {'Congstar': ['Germany']}, {'Telekom': ['Hungary']}, {'Cosmote': ['Greece']}], 'Telenor': [{'Telenor': ['Hugnary', 'Serbia', 'Montenegro']}, {'Grameenphone': ['Bangladesh']}], 'Telecom Italia': [{'TIM': ['Italy']}], 'America Movil': [{'Telcel': ['Mexico']}], 'Retailer': [{'E.Leclerc': ['France']}], 'Snapdeal': [{'Spice': ['India']}], 'JB Hifi': [{'ZTE': ['Australia']}], 'Alcatel OneTouch': [{'TCL': ['India']}], 'www.flicpart.com': [{'ZEN Mobil': ['India']}], 'Megafon': [{'Megafon': ['Russia']}], 'Cherry Mobile': [{'Cherry Mobile': ['Philippines']}], 'Banglalink': [{'Banglalink': ['Bangladesh']}], 'KDDI': [{'KDDI': ['Japan']}] } COUNTRIES_DICT = {v: k for k, v in countries} def remove_carriers(apps, schema_editor): Carrier = apps.get_model('main', 'Carrier') Carrier.objects.all().delete() def add_carriers(apps, schema_editor): Carrier = apps.get_model('main', 'Carrier') for parent_operator, carriers in CARRIERS.items(): for carrier_data in carriers: for carrier, carrier_countries in carrier_data.items(): for carrier_country in carrier_countries: if carrier_country in COUNTRIES_DICT: Carrier.objects.create(parent_operator=parent_operator, name=carrier, country=COUNTRIES_DICT[carrier_country]) continue Carrier.objects.create(parent_operator='Other', name='Other') def backwards_method(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('main', '0008_auto_20150220_1830'), ] operations = [ migrations.RunPython(remove_carriers, backwards_method), migrations.RunPython(add_carriers, remove_carriers), ]
Add a list of carriers in the app.
Add a list of carriers in the app.
Python
mpl-2.0
johngian/marketpulse,mozilla/marketpulse,akatsoulas/marketpulse,johngian/marketpulse,akatsoulas/marketpulse,mozilla/marketpulse,akatsoulas/marketpulse,mozilla/marketpulse,johngian/marketpulse,mozilla/marketpulse,akatsoulas/marketpulse,johngian/marketpulse
Add a list of carriers in the app.
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django_countries import countries CARRIERS = { 'Telefonica Digital': [{'Movistar': ['Spain', 'Colombia', 'Venezuela', 'Peru', 'Uruguay', 'Mexico', 'Chile', 'El Salvador', 'Nicaragua', 'Guatemala', 'Costa Rica']}, {'Vivo': ['Brazil']}, {'O2': ['Germany']}], 'Deutsche Telekom': [{'T-Mobile': ['Poland', 'Czech Republic', 'Macedonia']}, {'Congstar': ['Germany']}, {'Telekom': ['Hungary']}, {'Cosmote': ['Greece']}], 'Telenor': [{'Telenor': ['Hugnary', 'Serbia', 'Montenegro']}, {'Grameenphone': ['Bangladesh']}], 'Telecom Italia': [{'TIM': ['Italy']}], 'America Movil': [{'Telcel': ['Mexico']}], 'Retailer': [{'E.Leclerc': ['France']}], 'Snapdeal': [{'Spice': ['India']}], 'JB Hifi': [{'ZTE': ['Australia']}], 'Alcatel OneTouch': [{'TCL': ['India']}], 'www.flicpart.com': [{'ZEN Mobil': ['India']}], 'Megafon': [{'Megafon': ['Russia']}], 'Cherry Mobile': [{'Cherry Mobile': ['Philippines']}], 'Banglalink': [{'Banglalink': ['Bangladesh']}], 'KDDI': [{'KDDI': ['Japan']}] } COUNTRIES_DICT = {v: k for k, v in countries} def remove_carriers(apps, schema_editor): Carrier = apps.get_model('main', 'Carrier') Carrier.objects.all().delete() def add_carriers(apps, schema_editor): Carrier = apps.get_model('main', 'Carrier') for parent_operator, carriers in CARRIERS.items(): for carrier_data in carriers: for carrier, carrier_countries in carrier_data.items(): for carrier_country in carrier_countries: if carrier_country in COUNTRIES_DICT: Carrier.objects.create(parent_operator=parent_operator, name=carrier, country=COUNTRIES_DICT[carrier_country]) continue Carrier.objects.create(parent_operator='Other', name='Other') def backwards_method(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('main', '0008_auto_20150220_1830'), ] operations = [ migrations.RunPython(remove_carriers, backwards_method), migrations.RunPython(add_carriers, remove_carriers), ]
<commit_before><commit_msg>Add a list of carriers in the app.<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django_countries import countries CARRIERS = { 'Telefonica Digital': [{'Movistar': ['Spain', 'Colombia', 'Venezuela', 'Peru', 'Uruguay', 'Mexico', 'Chile', 'El Salvador', 'Nicaragua', 'Guatemala', 'Costa Rica']}, {'Vivo': ['Brazil']}, {'O2': ['Germany']}], 'Deutsche Telekom': [{'T-Mobile': ['Poland', 'Czech Republic', 'Macedonia']}, {'Congstar': ['Germany']}, {'Telekom': ['Hungary']}, {'Cosmote': ['Greece']}], 'Telenor': [{'Telenor': ['Hugnary', 'Serbia', 'Montenegro']}, {'Grameenphone': ['Bangladesh']}], 'Telecom Italia': [{'TIM': ['Italy']}], 'America Movil': [{'Telcel': ['Mexico']}], 'Retailer': [{'E.Leclerc': ['France']}], 'Snapdeal': [{'Spice': ['India']}], 'JB Hifi': [{'ZTE': ['Australia']}], 'Alcatel OneTouch': [{'TCL': ['India']}], 'www.flicpart.com': [{'ZEN Mobil': ['India']}], 'Megafon': [{'Megafon': ['Russia']}], 'Cherry Mobile': [{'Cherry Mobile': ['Philippines']}], 'Banglalink': [{'Banglalink': ['Bangladesh']}], 'KDDI': [{'KDDI': ['Japan']}] } COUNTRIES_DICT = {v: k for k, v in countries} def remove_carriers(apps, schema_editor): Carrier = apps.get_model('main', 'Carrier') Carrier.objects.all().delete() def add_carriers(apps, schema_editor): Carrier = apps.get_model('main', 'Carrier') for parent_operator, carriers in CARRIERS.items(): for carrier_data in carriers: for carrier, carrier_countries in carrier_data.items(): for carrier_country in carrier_countries: if carrier_country in COUNTRIES_DICT: Carrier.objects.create(parent_operator=parent_operator, name=carrier, country=COUNTRIES_DICT[carrier_country]) continue Carrier.objects.create(parent_operator='Other', name='Other') def backwards_method(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('main', '0008_auto_20150220_1830'), ] operations = [ migrations.RunPython(remove_carriers, backwards_method), migrations.RunPython(add_carriers, remove_carriers), ]
Add a list of carriers in the app.# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django_countries import countries CARRIERS = { 'Telefonica Digital': [{'Movistar': ['Spain', 'Colombia', 'Venezuela', 'Peru', 'Uruguay', 'Mexico', 'Chile', 'El Salvador', 'Nicaragua', 'Guatemala', 'Costa Rica']}, {'Vivo': ['Brazil']}, {'O2': ['Germany']}], 'Deutsche Telekom': [{'T-Mobile': ['Poland', 'Czech Republic', 'Macedonia']}, {'Congstar': ['Germany']}, {'Telekom': ['Hungary']}, {'Cosmote': ['Greece']}], 'Telenor': [{'Telenor': ['Hugnary', 'Serbia', 'Montenegro']}, {'Grameenphone': ['Bangladesh']}], 'Telecom Italia': [{'TIM': ['Italy']}], 'America Movil': [{'Telcel': ['Mexico']}], 'Retailer': [{'E.Leclerc': ['France']}], 'Snapdeal': [{'Spice': ['India']}], 'JB Hifi': [{'ZTE': ['Australia']}], 'Alcatel OneTouch': [{'TCL': ['India']}], 'www.flicpart.com': [{'ZEN Mobil': ['India']}], 'Megafon': [{'Megafon': ['Russia']}], 'Cherry Mobile': [{'Cherry Mobile': ['Philippines']}], 'Banglalink': [{'Banglalink': ['Bangladesh']}], 'KDDI': [{'KDDI': ['Japan']}] } COUNTRIES_DICT = {v: k for k, v in countries} def remove_carriers(apps, schema_editor): Carrier = apps.get_model('main', 'Carrier') Carrier.objects.all().delete() def add_carriers(apps, schema_editor): Carrier = apps.get_model('main', 'Carrier') for parent_operator, carriers in CARRIERS.items(): for carrier_data in carriers: for carrier, carrier_countries in carrier_data.items(): for carrier_country in carrier_countries: if carrier_country in COUNTRIES_DICT: Carrier.objects.create(parent_operator=parent_operator, name=carrier, country=COUNTRIES_DICT[carrier_country]) continue Carrier.objects.create(parent_operator='Other', name='Other') def backwards_method(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('main', '0008_auto_20150220_1830'), ] operations = [ migrations.RunPython(remove_carriers, backwards_method), migrations.RunPython(add_carriers, remove_carriers), ]
<commit_before><commit_msg>Add a list of carriers in the app.<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django_countries import countries CARRIERS = { 'Telefonica Digital': [{'Movistar': ['Spain', 'Colombia', 'Venezuela', 'Peru', 'Uruguay', 'Mexico', 'Chile', 'El Salvador', 'Nicaragua', 'Guatemala', 'Costa Rica']}, {'Vivo': ['Brazil']}, {'O2': ['Germany']}], 'Deutsche Telekom': [{'T-Mobile': ['Poland', 'Czech Republic', 'Macedonia']}, {'Congstar': ['Germany']}, {'Telekom': ['Hungary']}, {'Cosmote': ['Greece']}], 'Telenor': [{'Telenor': ['Hugnary', 'Serbia', 'Montenegro']}, {'Grameenphone': ['Bangladesh']}], 'Telecom Italia': [{'TIM': ['Italy']}], 'America Movil': [{'Telcel': ['Mexico']}], 'Retailer': [{'E.Leclerc': ['France']}], 'Snapdeal': [{'Spice': ['India']}], 'JB Hifi': [{'ZTE': ['Australia']}], 'Alcatel OneTouch': [{'TCL': ['India']}], 'www.flicpart.com': [{'ZEN Mobil': ['India']}], 'Megafon': [{'Megafon': ['Russia']}], 'Cherry Mobile': [{'Cherry Mobile': ['Philippines']}], 'Banglalink': [{'Banglalink': ['Bangladesh']}], 'KDDI': [{'KDDI': ['Japan']}] } COUNTRIES_DICT = {v: k for k, v in countries} def remove_carriers(apps, schema_editor): Carrier = apps.get_model('main', 'Carrier') Carrier.objects.all().delete() def add_carriers(apps, schema_editor): Carrier = apps.get_model('main', 'Carrier') for parent_operator, carriers in CARRIERS.items(): for carrier_data in carriers: for carrier, carrier_countries in carrier_data.items(): for carrier_country in carrier_countries: if carrier_country in COUNTRIES_DICT: Carrier.objects.create(parent_operator=parent_operator, name=carrier, country=COUNTRIES_DICT[carrier_country]) continue Carrier.objects.create(parent_operator='Other', name='Other') def backwards_method(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('main', '0008_auto_20150220_1830'), ] operations = [ migrations.RunPython(remove_carriers, backwards_method), migrations.RunPython(add_carriers, remove_carriers), ]
ad7187dc6b24be4f41df2b8750dbe450fa597550
migrations/versions/530c22761e27_fix_column_lengths.py
migrations/versions/530c22761e27_fix_column_lengths.py
# -*- coding: utf-8 -*- """Fix column lengths Revision ID: 530c22761e27 Revises: e2b28adfa135 Create Date: 2020-04-20 16:19:22.597712 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '530c22761e27' down_revision = 'e2b28adfa135' branch_labels = None depends_on = None def upgrade(): op.alter_column( 'profile', 'name', existing_type=sa.Unicode(250), type_=sa.Unicode(63) ) op.alter_column( 'user', 'timezone', existing_type=sa.Unicode(40), type_=sa.Unicode(50) ) def downgrade(): op.alter_column( 'user', 'timezone', existing_type=sa.Unicode(50), type_=sa.Unicode(40) ) op.alter_column( 'profile', 'name', existing_type=sa.Unicode(63), type_=sa.Unicode(250) )
Fix column lengths to match schema
Fix column lengths to match schema
Python
agpl-3.0
hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel
Fix column lengths to match schema
# -*- coding: utf-8 -*- """Fix column lengths Revision ID: 530c22761e27 Revises: e2b28adfa135 Create Date: 2020-04-20 16:19:22.597712 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '530c22761e27' down_revision = 'e2b28adfa135' branch_labels = None depends_on = None def upgrade(): op.alter_column( 'profile', 'name', existing_type=sa.Unicode(250), type_=sa.Unicode(63) ) op.alter_column( 'user', 'timezone', existing_type=sa.Unicode(40), type_=sa.Unicode(50) ) def downgrade(): op.alter_column( 'user', 'timezone', existing_type=sa.Unicode(50), type_=sa.Unicode(40) ) op.alter_column( 'profile', 'name', existing_type=sa.Unicode(63), type_=sa.Unicode(250) )
<commit_before><commit_msg>Fix column lengths to match schema<commit_after>
# -*- coding: utf-8 -*- """Fix column lengths Revision ID: 530c22761e27 Revises: e2b28adfa135 Create Date: 2020-04-20 16:19:22.597712 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '530c22761e27' down_revision = 'e2b28adfa135' branch_labels = None depends_on = None def upgrade(): op.alter_column( 'profile', 'name', existing_type=sa.Unicode(250), type_=sa.Unicode(63) ) op.alter_column( 'user', 'timezone', existing_type=sa.Unicode(40), type_=sa.Unicode(50) ) def downgrade(): op.alter_column( 'user', 'timezone', existing_type=sa.Unicode(50), type_=sa.Unicode(40) ) op.alter_column( 'profile', 'name', existing_type=sa.Unicode(63), type_=sa.Unicode(250) )
Fix column lengths to match schema# -*- coding: utf-8 -*- """Fix column lengths Revision ID: 530c22761e27 Revises: e2b28adfa135 Create Date: 2020-04-20 16:19:22.597712 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '530c22761e27' down_revision = 'e2b28adfa135' branch_labels = None depends_on = None def upgrade(): op.alter_column( 'profile', 'name', existing_type=sa.Unicode(250), type_=sa.Unicode(63) ) op.alter_column( 'user', 'timezone', existing_type=sa.Unicode(40), type_=sa.Unicode(50) ) def downgrade(): op.alter_column( 'user', 'timezone', existing_type=sa.Unicode(50), type_=sa.Unicode(40) ) op.alter_column( 'profile', 'name', existing_type=sa.Unicode(63), type_=sa.Unicode(250) )
<commit_before><commit_msg>Fix column lengths to match schema<commit_after># -*- coding: utf-8 -*- """Fix column lengths Revision ID: 530c22761e27 Revises: e2b28adfa135 Create Date: 2020-04-20 16:19:22.597712 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '530c22761e27' down_revision = 'e2b28adfa135' branch_labels = None depends_on = None def upgrade(): op.alter_column( 'profile', 'name', existing_type=sa.Unicode(250), type_=sa.Unicode(63) ) op.alter_column( 'user', 'timezone', existing_type=sa.Unicode(40), type_=sa.Unicode(50) ) def downgrade(): op.alter_column( 'user', 'timezone', existing_type=sa.Unicode(50), type_=sa.Unicode(40) ) op.alter_column( 'profile', 'name', existing_type=sa.Unicode(63), type_=sa.Unicode(250) )
60a228c8d530e11b10d44747c87d6585e45d8ab6
play.py
play.py
#!/usr/bin/env python3 import os import csv import cv2 import numpy as np import sklearn import copy import random import keras import tensorflow as tf from keras.preprocessing import image from keras.models import Model, Sequential from keras.layers import Flatten, Dense, Dropout from keras.layers.convolutional import Conv2D from keras.layers.core import Lambda from sklearn.model_selection import train_test_split from skimage import draw from functools import lru_cache import matplotlib.pyplot as plt from helper import * # Take an array of boards, and array of who won - 0 if computer, 1 if human model = None def makeModel(): global model if model != None: return inputs = keras.layers.Input(shape=(2,3,3)) output = Flatten()(inputs) output = Dense(100, activation='relu')(inputs) output = Dropout(0.5)(output) output = Dense(50, activation='relu')(output) output = Dropout(0.5)(output) output = Dense(20, activation='relu')(output) output = Dropout(0.5)(output) output = Dense(1, activation='relu', use_bias=False)(output) model = Model(inputs=inputs, outputs=output) tbCallBack = keras.callbacks.TensorBoard( log_dir='./log', histogram_freq=1, write_graph=True, write_images=True, embeddings_freq=1, embeddings_layer_names=None, embeddings_metadata=None) checkpointCallback = keras.callbacks.ModelCheckpoint( 'model_running.h5', monitor='val_loss', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1) reduce_lr = keras.callbacks.ReduceLROnPlateau( monitor='val_loss', factor=0.2, patience=5, min_lr=0.0001) model.compile(loss='mse', optimizer=keras.optimizers.Adam(lr=0.001)) from keras.models import load_model #model = load_weights('model_running.h5') boardgames = [] whowon = [] def train(boardgames, whowon): global model makeModel() model.fit(boardgames, whowon, epochs=100, validation_split=0.2, shuffle=True, verbose=1, callbacks=[tbCallBack, checkpointCallback, reduce_lr]) # board[0,:,:] is for computer player. 0 if there's no piece and 1 if there is # board[1,:,:] is for other player. 0 if there's no piece and 1 if there is def find_next_best_move(board): global model
Add initial start - doesn't work
Add initial start - doesn't work
Python
mit
johnflux/deep-learning-tictactoe
Add initial start - doesn't work
#!/usr/bin/env python3 import os import csv import cv2 import numpy as np import sklearn import copy import random import keras import tensorflow as tf from keras.preprocessing import image from keras.models import Model, Sequential from keras.layers import Flatten, Dense, Dropout from keras.layers.convolutional import Conv2D from keras.layers.core import Lambda from sklearn.model_selection import train_test_split from skimage import draw from functools import lru_cache import matplotlib.pyplot as plt from helper import * # Take an array of boards, and array of who won - 0 if computer, 1 if human model = None def makeModel(): global model if model != None: return inputs = keras.layers.Input(shape=(2,3,3)) output = Flatten()(inputs) output = Dense(100, activation='relu')(inputs) output = Dropout(0.5)(output) output = Dense(50, activation='relu')(output) output = Dropout(0.5)(output) output = Dense(20, activation='relu')(output) output = Dropout(0.5)(output) output = Dense(1, activation='relu', use_bias=False)(output) model = Model(inputs=inputs, outputs=output) tbCallBack = keras.callbacks.TensorBoard( log_dir='./log', histogram_freq=1, write_graph=True, write_images=True, embeddings_freq=1, embeddings_layer_names=None, embeddings_metadata=None) checkpointCallback = keras.callbacks.ModelCheckpoint( 'model_running.h5', monitor='val_loss', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1) reduce_lr = keras.callbacks.ReduceLROnPlateau( monitor='val_loss', factor=0.2, patience=5, min_lr=0.0001) model.compile(loss='mse', optimizer=keras.optimizers.Adam(lr=0.001)) from keras.models import load_model #model = load_weights('model_running.h5') boardgames = [] whowon = [] def train(boardgames, whowon): global model makeModel() model.fit(boardgames, whowon, epochs=100, validation_split=0.2, shuffle=True, verbose=1, callbacks=[tbCallBack, checkpointCallback, reduce_lr]) # board[0,:,:] is for computer player. 0 if there's no piece and 1 if there is # board[1,:,:] is for other player. 0 if there's no piece and 1 if there is def find_next_best_move(board): global model
<commit_before><commit_msg>Add initial start - doesn't work<commit_after>
#!/usr/bin/env python3 import os import csv import cv2 import numpy as np import sklearn import copy import random import keras import tensorflow as tf from keras.preprocessing import image from keras.models import Model, Sequential from keras.layers import Flatten, Dense, Dropout from keras.layers.convolutional import Conv2D from keras.layers.core import Lambda from sklearn.model_selection import train_test_split from skimage import draw from functools import lru_cache import matplotlib.pyplot as plt from helper import * # Take an array of boards, and array of who won - 0 if computer, 1 if human model = None def makeModel(): global model if model != None: return inputs = keras.layers.Input(shape=(2,3,3)) output = Flatten()(inputs) output = Dense(100, activation='relu')(inputs) output = Dropout(0.5)(output) output = Dense(50, activation='relu')(output) output = Dropout(0.5)(output) output = Dense(20, activation='relu')(output) output = Dropout(0.5)(output) output = Dense(1, activation='relu', use_bias=False)(output) model = Model(inputs=inputs, outputs=output) tbCallBack = keras.callbacks.TensorBoard( log_dir='./log', histogram_freq=1, write_graph=True, write_images=True, embeddings_freq=1, embeddings_layer_names=None, embeddings_metadata=None) checkpointCallback = keras.callbacks.ModelCheckpoint( 'model_running.h5', monitor='val_loss', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1) reduce_lr = keras.callbacks.ReduceLROnPlateau( monitor='val_loss', factor=0.2, patience=5, min_lr=0.0001) model.compile(loss='mse', optimizer=keras.optimizers.Adam(lr=0.001)) from keras.models import load_model #model = load_weights('model_running.h5') boardgames = [] whowon = [] def train(boardgames, whowon): global model makeModel() model.fit(boardgames, whowon, epochs=100, validation_split=0.2, shuffle=True, verbose=1, callbacks=[tbCallBack, checkpointCallback, reduce_lr]) # board[0,:,:] is for computer player. 0 if there's no piece and 1 if there is # board[1,:,:] is for other player. 0 if there's no piece and 1 if there is def find_next_best_move(board): global model
Add initial start - doesn't work#!/usr/bin/env python3 import os import csv import cv2 import numpy as np import sklearn import copy import random import keras import tensorflow as tf from keras.preprocessing import image from keras.models import Model, Sequential from keras.layers import Flatten, Dense, Dropout from keras.layers.convolutional import Conv2D from keras.layers.core import Lambda from sklearn.model_selection import train_test_split from skimage import draw from functools import lru_cache import matplotlib.pyplot as plt from helper import * # Take an array of boards, and array of who won - 0 if computer, 1 if human model = None def makeModel(): global model if model != None: return inputs = keras.layers.Input(shape=(2,3,3)) output = Flatten()(inputs) output = Dense(100, activation='relu')(inputs) output = Dropout(0.5)(output) output = Dense(50, activation='relu')(output) output = Dropout(0.5)(output) output = Dense(20, activation='relu')(output) output = Dropout(0.5)(output) output = Dense(1, activation='relu', use_bias=False)(output) model = Model(inputs=inputs, outputs=output) tbCallBack = keras.callbacks.TensorBoard( log_dir='./log', histogram_freq=1, write_graph=True, write_images=True, embeddings_freq=1, embeddings_layer_names=None, embeddings_metadata=None) checkpointCallback = keras.callbacks.ModelCheckpoint( 'model_running.h5', monitor='val_loss', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1) reduce_lr = keras.callbacks.ReduceLROnPlateau( monitor='val_loss', factor=0.2, patience=5, min_lr=0.0001) model.compile(loss='mse', optimizer=keras.optimizers.Adam(lr=0.001)) from keras.models import load_model #model = load_weights('model_running.h5') boardgames = [] whowon = [] def train(boardgames, whowon): global model makeModel() model.fit(boardgames, whowon, epochs=100, validation_split=0.2, shuffle=True, verbose=1, callbacks=[tbCallBack, checkpointCallback, reduce_lr]) # board[0,:,:] is for computer player. 0 if there's no piece and 1 if there is # board[1,:,:] is for other player. 0 if there's no piece and 1 if there is def find_next_best_move(board): global model
<commit_before><commit_msg>Add initial start - doesn't work<commit_after>#!/usr/bin/env python3 import os import csv import cv2 import numpy as np import sklearn import copy import random import keras import tensorflow as tf from keras.preprocessing import image from keras.models import Model, Sequential from keras.layers import Flatten, Dense, Dropout from keras.layers.convolutional import Conv2D from keras.layers.core import Lambda from sklearn.model_selection import train_test_split from skimage import draw from functools import lru_cache import matplotlib.pyplot as plt from helper import * # Take an array of boards, and array of who won - 0 if computer, 1 if human model = None def makeModel(): global model if model != None: return inputs = keras.layers.Input(shape=(2,3,3)) output = Flatten()(inputs) output = Dense(100, activation='relu')(inputs) output = Dropout(0.5)(output) output = Dense(50, activation='relu')(output) output = Dropout(0.5)(output) output = Dense(20, activation='relu')(output) output = Dropout(0.5)(output) output = Dense(1, activation='relu', use_bias=False)(output) model = Model(inputs=inputs, outputs=output) tbCallBack = keras.callbacks.TensorBoard( log_dir='./log', histogram_freq=1, write_graph=True, write_images=True, embeddings_freq=1, embeddings_layer_names=None, embeddings_metadata=None) checkpointCallback = keras.callbacks.ModelCheckpoint( 'model_running.h5', monitor='val_loss', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1) reduce_lr = keras.callbacks.ReduceLROnPlateau( monitor='val_loss', factor=0.2, patience=5, min_lr=0.0001) model.compile(loss='mse', optimizer=keras.optimizers.Adam(lr=0.001)) from keras.models import load_model #model = load_weights('model_running.h5') boardgames = [] whowon = [] def train(boardgames, whowon): global model makeModel() model.fit(boardgames, whowon, epochs=100, validation_split=0.2, shuffle=True, verbose=1, callbacks=[tbCallBack, checkpointCallback, reduce_lr]) # board[0,:,:] is for computer player. 0 if there's no piece and 1 if there is # board[1,:,:] is for other player. 0 if there's no piece and 1 if there is def find_next_best_move(board): global model
4ce3502e1623ca24e43e01e4c580ee327e6192fa
django_extensions/management/commands/generate_secret_key.py
django_extensions/management/commands/generate_secret_key.py
# -*- coding: utf-8 -*- from random import choice from django.core.management.base import BaseCommand from django_extensions.management.utils import signalcommand class Command(BaseCommand): help = "Generates a new SECRET_KEY that can be used in a project settings file." requires_system_checks = False @signalcommand def handle(self, *args, **options): return ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
# -*- coding: utf-8 -*- from random import choice from django.core.management.base import BaseCommand from django.core.management.utils import get_random_secret_key from django_extensions.management.utils import signalcommand class Command(BaseCommand): help = "Generates a new SECRET_KEY that can be used in a project settings file." requires_system_checks = False @signalcommand def handle(self, *args, **options): return get_random_secret_key()
Use same algo to generate SECRET_KEY as Django
Use same algo to generate SECRET_KEY as Django Using random from standard library is not cryptographically secure.
Python
mit
haakenlid/django-extensions,haakenlid/django-extensions,django-extensions/django-extensions,linuxmaniac/django-extensions,linuxmaniac/django-extensions,django-extensions/django-extensions,haakenlid/django-extensions,django-extensions/django-extensions,linuxmaniac/django-extensions
# -*- coding: utf-8 -*- from random import choice from django.core.management.base import BaseCommand from django_extensions.management.utils import signalcommand class Command(BaseCommand): help = "Generates a new SECRET_KEY that can be used in a project settings file." requires_system_checks = False @signalcommand def handle(self, *args, **options): return ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) Use same algo to generate SECRET_KEY as Django Using random from standard library is not cryptographically secure.
# -*- coding: utf-8 -*- from random import choice from django.core.management.base import BaseCommand from django.core.management.utils import get_random_secret_key from django_extensions.management.utils import signalcommand class Command(BaseCommand): help = "Generates a new SECRET_KEY that can be used in a project settings file." requires_system_checks = False @signalcommand def handle(self, *args, **options): return get_random_secret_key()
<commit_before># -*- coding: utf-8 -*- from random import choice from django.core.management.base import BaseCommand from django_extensions.management.utils import signalcommand class Command(BaseCommand): help = "Generates a new SECRET_KEY that can be used in a project settings file." requires_system_checks = False @signalcommand def handle(self, *args, **options): return ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) <commit_msg>Use same algo to generate SECRET_KEY as Django Using random from standard library is not cryptographically secure.<commit_after>
# -*- coding: utf-8 -*- from random import choice from django.core.management.base import BaseCommand from django.core.management.utils import get_random_secret_key from django_extensions.management.utils import signalcommand class Command(BaseCommand): help = "Generates a new SECRET_KEY that can be used in a project settings file." requires_system_checks = False @signalcommand def handle(self, *args, **options): return get_random_secret_key()
# -*- coding: utf-8 -*- from random import choice from django.core.management.base import BaseCommand from django_extensions.management.utils import signalcommand class Command(BaseCommand): help = "Generates a new SECRET_KEY that can be used in a project settings file." requires_system_checks = False @signalcommand def handle(self, *args, **options): return ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) Use same algo to generate SECRET_KEY as Django Using random from standard library is not cryptographically secure.# -*- coding: utf-8 -*- from random import choice from django.core.management.base import BaseCommand from django.core.management.utils import get_random_secret_key from django_extensions.management.utils import signalcommand class Command(BaseCommand): help = "Generates a new SECRET_KEY that can be used in a project settings file." requires_system_checks = False @signalcommand def handle(self, *args, **options): return get_random_secret_key()
<commit_before># -*- coding: utf-8 -*- from random import choice from django.core.management.base import BaseCommand from django_extensions.management.utils import signalcommand class Command(BaseCommand): help = "Generates a new SECRET_KEY that can be used in a project settings file." requires_system_checks = False @signalcommand def handle(self, *args, **options): return ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)]) <commit_msg>Use same algo to generate SECRET_KEY as Django Using random from standard library is not cryptographically secure.<commit_after># -*- coding: utf-8 -*- from random import choice from django.core.management.base import BaseCommand from django.core.management.utils import get_random_secret_key from django_extensions.management.utils import signalcommand class Command(BaseCommand): help = "Generates a new SECRET_KEY that can be used in a project settings file." requires_system_checks = False @signalcommand def handle(self, *args, **options): return get_random_secret_key()
6fcffc327b641381ee960343adebb80688e90892
examples/svm/plot_weighted_classes.py
examples/svm/plot_weighted_classes.py
""" ================================================ SVM: Separating hyperplane with weighted classes ================================================ """ import numpy as np import pylab as pl from scikits.learn import svm # we create 40 separable points np.random.seed(0) nsamples_1 = 1000 nsamples_2 = 100 X = np.r_[1.5*np.random.randn(nsamples_1, 2), 0.5*np.random.randn(nsamples_2, 2) + [2, 2]] Y = [0]*(nsamples_1) + [1]*(nsamples_2) # fit the model and get the separating hyperplane clf = svm.SVC(kernel='linear') clf.fit(X, Y) w = clf.coef_[0] a = -w[0]/w[1] xx = np.linspace(-5, 5) yy = a*xx - (clf.intercept_[0])/w[1] # get the separating hyperplane using weighted classes wclf = svm.SVC(kernel='linear') wclf.fit(X, Y, {1: 10}) ww = wclf.coef_[0] wa = -ww[0]/ww[1] wyy = wa*xx - (wclf.intercept_[0])/ww[1] # plot separating hyperplanes and samples pl.set_cmap(pl.cm.Paired) pl.plot(xx, yy, 'k-') pl.plot(xx, wyy, 'k--') pl.scatter(X[:,0], X[:,1], c=Y) pl.axis('tight') pl.show()
Add an exmple of svm using weighted classes
Add an exmple of svm using weighted classes
Python
bsd-3-clause
davidgbe/scikit-learn,xuewei4d/scikit-learn,UNR-AERIAL/scikit-learn,siutanwong/scikit-learn,florian-f/sklearn,tmhm/scikit-learn,aminert/scikit-learn,gotomypc/scikit-learn,arjoly/scikit-learn,thientu/scikit-learn,cwu2011/scikit-learn,untom/scikit-learn,JPFrancoia/scikit-learn,thilbern/scikit-learn,mehdidc/scikit-learn,theoryno3/scikit-learn,kylerbrown/scikit-learn,jayflo/scikit-learn,jorge2703/scikit-learn,shenzebang/scikit-learn,ZenDevelopmentSystems/scikit-learn,abimannans/scikit-learn,yanlend/scikit-learn,sergeyf/scikit-learn,espg/scikit-learn,zorroblue/scikit-learn,iismd17/scikit-learn,PrashntS/scikit-learn,Windy-Ground/scikit-learn,joshloyal/scikit-learn,nhejazi/scikit-learn,saiwing-yeung/scikit-learn,ldirer/scikit-learn,pnedunuri/scikit-learn,NunoEdgarGub1/scikit-learn,krez13/scikit-learn,simon-pepin/scikit-learn,nvoron23/scikit-learn,3manuek/scikit-learn,andaag/scikit-learn,rvraghav93/scikit-learn,PatrickChrist/scikit-learn,Fireblend/scikit-learn,kmike/scikit-learn,sinhrks/scikit-learn,plissonf/scikit-learn,IshankGulati/scikit-learn,arahuja/scikit-learn,costypetrisor/scikit-learn,simon-pepin/scikit-learn,stylianos-kampakis/scikit-learn,jmschrei/scikit-learn,YinongLong/scikit-learn,tawsifkhan/scikit-learn,pypot/scikit-learn,yunfeilu/scikit-learn,ZenDevelopmentSystems/scikit-learn,andaag/scikit-learn,jblackburne/scikit-learn,JeanKossaifi/scikit-learn,victorbergelin/scikit-learn,mattilyra/scikit-learn,abimannans/scikit-learn,pkruskal/scikit-learn,jaidevd/scikit-learn,shahankhatch/scikit-learn,kashif/scikit-learn,Jimmy-Morzaria/scikit-learn,bnaul/scikit-learn,bikong2/scikit-learn,vermouthmjl/scikit-learn,florian-f/sklearn,jakirkham/scikit-learn,mlyundin/scikit-learn,depet/scikit-learn,treycausey/scikit-learn,carrillo/scikit-learn,anurag313/scikit-learn,NunoEdgarGub1/scikit-learn,walterreade/scikit-learn,mjudsp/Tsallis,roxyboy/scikit-learn,xzh86/scikit-learn,arahuja/scikit-learn,hugobowne/scikit-learn,anurag313/scikit-learn,rajat1994/scikit-learn,lbishal/scikit-learn,hugobowne/scikit-learn,MechCoder/scikit-learn,nmayorov/scikit-learn,rohanp/scikit-learn,walterreade/scikit-learn,espg/scikit-learn,PatrickOReilly/scikit-learn,ilyes14/scikit-learn,altairpearl/scikit-learn,manhhomienbienthuy/scikit-learn,wazeerzulfikar/scikit-learn,DonBeo/scikit-learn,sergeyf/scikit-learn,akionakamura/scikit-learn,mjgrav2001/scikit-learn,michigraber/scikit-learn,Adai0808/scikit-learn,Garrett-R/scikit-learn,russel1237/scikit-learn,spallavolu/scikit-learn,schets/scikit-learn,Titan-C/scikit-learn,herilalaina/scikit-learn,moutai/scikit-learn,CVML/scikit-learn,cybernet14/scikit-learn,rahuldhote/scikit-learn,gclenaghan/scikit-learn,mhdella/scikit-learn,davidgbe/scikit-learn,CVML/scikit-learn,xyguo/scikit-learn,eickenberg/scikit-learn,beepee14/scikit-learn,sarahgrogan/scikit-learn,robin-lai/scikit-learn,ChanderG/scikit-learn,carrillo/scikit-learn,xiaoxiamii/scikit-learn,depet/scikit-learn,cwu2011/scikit-learn,poryfly/scikit-learn,giorgiop/scikit-learn,liangz0707/scikit-learn,vshtanko/scikit-learn,walterreade/scikit-learn,kjung/scikit-learn,wanggang3333/scikit-learn,krez13/scikit-learn,mattgiguere/scikit-learn,imaculate/scikit-learn,NunoEdgarGub1/scikit-learn,ZenDevelopmentSystems/scikit-learn,dsquareindia/scikit-learn,aabadie/scikit-learn,pompiduskus/scikit-learn,lenovor/scikit-learn,treycausey/scikit-learn,q1ang/scikit-learn,dhruv13J/scikit-learn,spallavolu/scikit-learn,pnedunuri/scikit-learn,ssaeger/scikit-learn,Lawrence-Liu/scikit-learn,liyu1990/sklearn,mhdella/scikit-learn,pythonvietnam/scikit-learn,appapantula/scikit-learn,CVML/scikit-learn,terkkila/scikit-learn,nelson-liu/scikit-learn,sgenoud/scikit-learn,samuel1208/scikit-learn,Barmaley-exe/scikit-learn,manashmndl/scikit-learn,ningchi/scikit-learn,jorik041/scikit-learn,scikit-learn/scikit-learn,ndingwall/scikit-learn,evgchz/scikit-learn,vinayak-mehta/scikit-learn,ycaihua/scikit-learn,hsuantien/scikit-learn,rohanp/scikit-learn,ndingwall/scikit-learn,samzhang111/scikit-learn,manashmndl/scikit-learn,maheshakya/scikit-learn,gotomypc/scikit-learn,giorgiop/scikit-learn,xwolf12/scikit-learn,rishikksh20/scikit-learn,mjgrav2001/scikit-learn,procoder317/scikit-learn,Sentient07/scikit-learn,sergeyf/scikit-learn,abhishekgahlot/scikit-learn,loli/sklearn-ensembletrees,ominux/scikit-learn,NelisVerhoef/scikit-learn,phdowling/scikit-learn,madjelan/scikit-learn,xiaoxiamii/scikit-learn,dsullivan7/scikit-learn,ChanChiChoi/scikit-learn,vivekmishra1991/scikit-learn,mrshu/scikit-learn,jzt5132/scikit-learn,giorgiop/scikit-learn,sarahgrogan/scikit-learn,nelson-liu/scikit-learn,nesterione/scikit-learn,RPGOne/scikit-learn,hdmetor/scikit-learn,belltailjp/scikit-learn,alexsavio/scikit-learn,yunfeilu/scikit-learn,macks22/scikit-learn,procoder317/scikit-learn,qifeigit/scikit-learn,scikit-learn/scikit-learn,murali-munna/scikit-learn,btabibian/scikit-learn,fredhusser/scikit-learn,lucidfrontier45/scikit-learn,zorojean/scikit-learn,jpautom/scikit-learn,terkkila/scikit-learn,jereze/scikit-learn,gotomypc/scikit-learn,trungnt13/scikit-learn,lin-credible/scikit-learn,NelisVerhoef/scikit-learn,adamgreenhall/scikit-learn,carrillo/scikit-learn,mayblue9/scikit-learn,Windy-Ground/scikit-learn,rohanp/scikit-learn,equialgo/scikit-learn,deepesch/scikit-learn,depet/scikit-learn,mfjb/scikit-learn,shusenl/scikit-learn,schets/scikit-learn,massmutual/scikit-learn,liberatorqjw/scikit-learn,samuel1208/scikit-learn,joshloyal/scikit-learn,Fireblend/scikit-learn,r-mart/scikit-learn,JPFrancoia/scikit-learn,dingocuster/scikit-learn,anurag313/scikit-learn,mjudsp/Tsallis,shahankhatch/scikit-learn,beepee14/scikit-learn,jmschrei/scikit-learn,rsivapr/scikit-learn,olologin/scikit-learn,shahankhatch/scikit-learn,jorik041/scikit-learn,Akshay0724/scikit-learn,kaichogami/scikit-learn,mayblue9/scikit-learn,chrsrds/scikit-learn,mrshu/scikit-learn,fzalkow/scikit-learn,wazeerzulfikar/scikit-learn,zhenv5/scikit-learn,nvoron23/scikit-learn,shyamalschandra/scikit-learn,belltailjp/scikit-learn,jakobworldpeace/scikit-learn,r-mart/scikit-learn,murali-munna/scikit-learn,ngoix/OCRF,aflaxman/scikit-learn,jpautom/scikit-learn,michigraber/scikit-learn,hainm/scikit-learn,adamgreenhall/scikit-learn,jakobworldpeace/scikit-learn,maheshakya/scikit-learn,potash/scikit-learn,smartscheduling/scikit-learn-categorical-tree,rajat1994/scikit-learn,lin-credible/scikit-learn,quheng/scikit-learn,macks22/scikit-learn,zaxtax/scikit-learn,sgenoud/scikit-learn,ElDeveloper/scikit-learn,loli/sklearn-ensembletrees,lbishal/scikit-learn,AlexRobson/scikit-learn,Srisai85/scikit-learn,simon-pepin/scikit-learn,zaxtax/scikit-learn,jakirkham/scikit-learn,fyffyt/scikit-learn,nvoron23/scikit-learn,jorik041/scikit-learn,costypetrisor/scikit-learn,ishanic/scikit-learn,Jimmy-Morzaria/scikit-learn,mikebenfield/scikit-learn,rajat1994/scikit-learn,0asa/scikit-learn,herilalaina/scikit-learn,ngoix/OCRF,terkkila/scikit-learn,mugizico/scikit-learn,zorroblue/scikit-learn,ilyes14/scikit-learn,elkingtonmcb/scikit-learn,xavierwu/scikit-learn,ankurankan/scikit-learn,harshaneelhg/scikit-learn,jorik041/scikit-learn,maheshakya/scikit-learn,Sentient07/scikit-learn,ilyes14/scikit-learn,bthirion/scikit-learn,vigilv/scikit-learn,AnasGhrab/scikit-learn,wazeerzulfikar/scikit-learn,RayMick/scikit-learn,marcocaccin/scikit-learn,LohithBlaze/scikit-learn,gotomypc/scikit-learn,jlegendary/scikit-learn,kashif/scikit-learn,costypetrisor/scikit-learn,Obus/scikit-learn,lazywei/scikit-learn,xavierwu/scikit-learn,ssaeger/scikit-learn,CforED/Machine-Learning,rishikksh20/scikit-learn,0asa/scikit-learn,liangz0707/scikit-learn,IndraVikas/scikit-learn,JeanKossaifi/scikit-learn,tmhm/scikit-learn,sonnyhu/scikit-learn,liberatorqjw/scikit-learn,xavierwu/scikit-learn,jpautom/scikit-learn,betatim/scikit-learn,Djabbz/scikit-learn,kevin-intel/scikit-learn,Vimos/scikit-learn,JeanKossaifi/scikit-learn,ogrisel/scikit-learn,harshaneelhg/scikit-learn,nesterione/scikit-learn,icdishb/scikit-learn,sanketloke/scikit-learn,sinhrks/scikit-learn,fredhusser/scikit-learn,bikong2/scikit-learn,glennq/scikit-learn,IssamLaradji/scikit-learn,elkingtonmcb/scikit-learn,kashif/scikit-learn,henridwyer/scikit-learn,bnaul/scikit-learn,Vimos/scikit-learn,fzalkow/scikit-learn,fengzhyuan/scikit-learn,RachitKansal/scikit-learn,HolgerPeters/scikit-learn,Adai0808/scikit-learn,hsiaoyi0504/scikit-learn,jmetzen/scikit-learn,lenovor/scikit-learn,Aasmi/scikit-learn,deepesch/scikit-learn,ZENGXH/scikit-learn,Titan-C/scikit-learn,nrhine1/scikit-learn,Vimos/scikit-learn,RachitKansal/scikit-learn,victorbergelin/scikit-learn,wanggang3333/scikit-learn,dsullivan7/scikit-learn,rsivapr/scikit-learn,elkingtonmcb/scikit-learn,ldirer/scikit-learn,akionakamura/scikit-learn,shenzebang/scikit-learn,pompiduskus/scikit-learn,mwv/scikit-learn,jkarnows/scikit-learn,NelisVerhoef/scikit-learn,sumspr/scikit-learn,Windy-Ground/scikit-learn,rahul-c1/scikit-learn,IssamLaradji/scikit-learn,phdowling/scikit-learn,simon-pepin/scikit-learn,fbagirov/scikit-learn,terkkila/scikit-learn,billy-inn/scikit-learn,herilalaina/scikit-learn,henridwyer/scikit-learn,ashhher3/scikit-learn,elkingtonmcb/scikit-learn,treycausey/scikit-learn,zaxtax/scikit-learn,ldirer/scikit-learn,Djabbz/scikit-learn,mattgiguere/scikit-learn,evgchz/scikit-learn,IndraVikas/scikit-learn,shenzebang/scikit-learn,victorbergelin/scikit-learn,cainiaocome/scikit-learn,cainiaocome/scikit-learn,ltiao/scikit-learn,jereze/scikit-learn,nikitasingh981/scikit-learn,h2educ/scikit-learn,PatrickOReilly/scikit-learn,anntzer/scikit-learn,themrmax/scikit-learn,tosolveit/scikit-learn,jzt5132/scikit-learn,larsmans/scikit-learn,abhishekkrthakur/scikit-learn,khkaminska/scikit-learn,mblondel/scikit-learn,billy-inn/scikit-learn,DonBeo/scikit-learn,zhenv5/scikit-learn,nhejazi/scikit-learn,rsivapr/scikit-learn,AnasGhrab/scikit-learn,lucidfrontier45/scikit-learn,sanketloke/scikit-learn,andrewnc/scikit-learn,bhargav/scikit-learn,shikhardb/scikit-learn,mugizico/scikit-learn,tmhm/scikit-learn,aminert/scikit-learn,lesteve/scikit-learn,ningchi/scikit-learn,jereze/scikit-learn,mfjb/scikit-learn,altairpearl/scikit-learn,manashmndl/scikit-learn,deepesch/scikit-learn,shangwuhencc/scikit-learn,eickenberg/scikit-learn,ChanChiChoi/scikit-learn,untom/scikit-learn,icdishb/scikit-learn,robin-lai/scikit-learn,kagayakidan/scikit-learn,russel1237/scikit-learn,jayflo/scikit-learn,Jimmy-Morzaria/scikit-learn,Myasuka/scikit-learn,themrmax/scikit-learn,BiaDarkia/scikit-learn,etkirsch/scikit-learn,mblondel/scikit-learn,quheng/scikit-learn,ivannz/scikit-learn,r-mart/scikit-learn,JsNoNo/scikit-learn,theoryno3/scikit-learn,RayMick/scikit-learn,shyamalschandra/scikit-learn,arabenjamin/scikit-learn,imaculate/scikit-learn,ashhher3/scikit-learn,huzq/scikit-learn,ycaihua/scikit-learn,heli522/scikit-learn,shyamalschandra/scikit-learn,mattilyra/scikit-learn,0x0all/scikit-learn,thilbern/scikit-learn,kmike/scikit-learn,idlead/scikit-learn,djgagne/scikit-learn,nesterione/scikit-learn,wzbozon/scikit-learn,mwv/scikit-learn,vibhorag/scikit-learn,ky822/scikit-learn,andrewnc/scikit-learn,vinayak-mehta/scikit-learn,kaichogami/scikit-learn,joernhees/scikit-learn,jaidevd/scikit-learn,ashhher3/scikit-learn,nikitasingh981/scikit-learn,khkaminska/scikit-learn,tawsifkhan/scikit-learn,ishanic/scikit-learn,MartinSavc/scikit-learn,kylerbrown/scikit-learn,xubenben/scikit-learn,yyjiang/scikit-learn,aetilley/scikit-learn,manhhomienbienthuy/scikit-learn,wzbozon/scikit-learn,sinhrks/scikit-learn,fyffyt/scikit-learn,Nyker510/scikit-learn,ngoix/OCRF,etkirsch/scikit-learn,BiaDarkia/scikit-learn,gclenaghan/scikit-learn,vshtanko/scikit-learn,q1ang/scikit-learn,kaichogami/scikit-learn,idlead/scikit-learn,0x0all/scikit-learn,abimannans/scikit-learn,ltiao/scikit-learn,quheng/scikit-learn,fabioticconi/scikit-learn,abhishekkrthakur/scikit-learn,0asa/scikit-learn,florian-f/sklearn,smartscheduling/scikit-learn-categorical-tree,madjelan/scikit-learn,Akshay0724/scikit-learn,LohithBlaze/scikit-learn,poryfly/scikit-learn,yanlend/scikit-learn,Myasuka/scikit-learn,ssaeger/scikit-learn,ogrisel/scikit-learn,jm-begon/scikit-learn,shangwuhencc/scikit-learn,equialgo/scikit-learn,UNR-AERIAL/scikit-learn,madjelan/scikit-learn,eg-zhang/scikit-learn,hitszxp/scikit-learn,dsquareindia/scikit-learn,bikong2/scikit-learn,Obus/scikit-learn,hrjn/scikit-learn,devanshdalal/scikit-learn,ClimbsRocks/scikit-learn,RachitKansal/scikit-learn,tomlof/scikit-learn,pkruskal/scikit-learn,huzq/scikit-learn,RomainBrault/scikit-learn,RayMick/scikit-learn,zuku1985/scikit-learn,tosolveit/scikit-learn,0x0all/scikit-learn,belltailjp/scikit-learn,shangwuhencc/scikit-learn,alexeyum/scikit-learn,wlamond/scikit-learn,tosolveit/scikit-learn,loli/sklearn-ensembletrees,btabibian/scikit-learn,AlexRobson/scikit-learn,nhejazi/scikit-learn,mojoboss/scikit-learn,saiwing-yeung/scikit-learn,hainm/scikit-learn,Windy-Ground/scikit-learn,IssamLaradji/scikit-learn,rohanp/scikit-learn,phdowling/scikit-learn,jzt5132/scikit-learn,466152112/scikit-learn,cauchycui/scikit-learn,eg-zhang/scikit-learn,f3r/scikit-learn,vermouthmjl/scikit-learn,roxyboy/scikit-learn,vermouthmjl/scikit-learn,frank-tancf/scikit-learn,hlin117/scikit-learn,lucidfrontier45/scikit-learn,fabianp/scikit-learn,rrohan/scikit-learn,heli522/scikit-learn,Obus/scikit-learn,davidgbe/scikit-learn,abhishekgahlot/scikit-learn,3manuek/scikit-learn,tawsifkhan/scikit-learn,krez13/scikit-learn,bigdataelephants/scikit-learn,B3AU/waveTree,petosegan/scikit-learn,pratapvardhan/scikit-learn,imaculate/scikit-learn,wazeerzulfikar/scikit-learn,xyguo/scikit-learn,pythonvietnam/scikit-learn,xuewei4d/scikit-learn,hainm/scikit-learn,petosegan/scikit-learn,eickenberg/scikit-learn,smartscheduling/scikit-learn-categorical-tree,zihua/scikit-learn,DSLituiev/scikit-learn,liyu1990/sklearn,manashmndl/scikit-learn,loli/semisupervisedforests,hrjn/scikit-learn,Clyde-fare/scikit-learn,anntzer/scikit-learn,lesteve/scikit-learn,tomlof/scikit-learn,thientu/scikit-learn,lazywei/scikit-learn,espg/scikit-learn,arabenjamin/scikit-learn,raghavrv/scikit-learn,Nyker510/scikit-learn,sumspr/scikit-learn,depet/scikit-learn,IshankGulati/scikit-learn,Achuth17/scikit-learn,rexshihaoren/scikit-learn,PatrickOReilly/scikit-learn,idlead/scikit-learn,cdegroc/scikit-learn,JosmanPS/scikit-learn,carrillo/scikit-learn,alvarofierroclavero/scikit-learn,MartinDelzant/scikit-learn,ycaihua/scikit-learn,petosegan/scikit-learn,cl4rke/scikit-learn,olologin/scikit-learn,RayMick/scikit-learn,RomainBrault/scikit-learn,jakirkham/scikit-learn,plissonf/scikit-learn,AlexandreAbraham/scikit-learn,Barmaley-exe/scikit-learn,cauchycui/scikit-learn,vigilv/scikit-learn,pkruskal/scikit-learn,xwolf12/scikit-learn,maheshakya/scikit-learn,jaidevd/scikit-learn,jjx02230808/project0223,jjx02230808/project0223,zihua/scikit-learn,pratapvardhan/scikit-learn,mugizico/scikit-learn,Titan-C/scikit-learn,nikitasingh981/scikit-learn,trungnt13/scikit-learn,mxjl620/scikit-learn,aewhatley/scikit-learn,moutai/scikit-learn,massmutual/scikit-learn,jjx02230808/project0223,cwu2011/scikit-learn,pv/scikit-learn,alexsavio/scikit-learn,JsNoNo/scikit-learn,hsiaoyi0504/scikit-learn,arjoly/scikit-learn,lesteve/scikit-learn,ChanderG/scikit-learn,chrsrds/scikit-learn,ssaeger/scikit-learn,cauchycui/scikit-learn,jseabold/scikit-learn,CforED/Machine-Learning,vybstat/scikit-learn,Nyker510/scikit-learn,billy-inn/scikit-learn,cl4rke/scikit-learn,potash/scikit-learn,loli/sklearn-ensembletrees,pratapvardhan/scikit-learn,yanlend/scikit-learn,Srisai85/scikit-learn,nmayorov/scikit-learn,zaxtax/scikit-learn,khkaminska/scikit-learn,kjung/scikit-learn,B3AU/waveTree,Akshay0724/scikit-learn,AnasGhrab/scikit-learn,jmetzen/scikit-learn,waterponey/scikit-learn,mwv/scikit-learn,Djabbz/scikit-learn,hsuantien/scikit-learn,jmetzen/scikit-learn,cainiaocome/scikit-learn,toastedcornflakes/scikit-learn,jlegendary/scikit-learn,zorojean/scikit-learn,mjudsp/Tsallis,billy-inn/scikit-learn,pianomania/scikit-learn,yonglehou/scikit-learn,fabioticconi/scikit-learn,jblackburne/scikit-learn,voxlol/scikit-learn,HolgerPeters/scikit-learn,Lawrence-Liu/scikit-learn,MohammedWasim/scikit-learn,nelson-liu/scikit-learn,xavierwu/scikit-learn,robin-lai/scikit-learn,meduz/scikit-learn,jjx02230808/project0223,alexsavio/scikit-learn,ZENGXH/scikit-learn,xubenben/scikit-learn,kmike/scikit-learn,mblondel/scikit-learn,jm-begon/scikit-learn,vshtanko/scikit-learn,LohithBlaze/scikit-learn,trungnt13/scikit-learn,henridwyer/scikit-learn,ahoyosid/scikit-learn,ElDeveloper/scikit-learn,amueller/scikit-learn,spallavolu/scikit-learn,thientu/scikit-learn,anirudhjayaraman/scikit-learn,cl4rke/scikit-learn,iismd17/scikit-learn,MatthieuBizien/scikit-learn,sonnyhu/scikit-learn,robbymeals/scikit-learn,ilo10/scikit-learn,russel1237/scikit-learn,jakirkham/scikit-learn,IndraVikas/scikit-learn,tomlof/scikit-learn,Srisai85/scikit-learn,bthirion/scikit-learn,rrohan/scikit-learn,andaag/scikit-learn,iismd17/scikit-learn,mxjl620/scikit-learn,ivannz/scikit-learn,xzh86/scikit-learn,henrykironde/scikit-learn,yyjiang/scikit-learn,IndraVikas/scikit-learn,huzq/scikit-learn,eg-zhang/scikit-learn,B3AU/waveTree,toastedcornflakes/scikit-learn,glemaitre/scikit-learn,kevin-intel/scikit-learn,JosmanPS/scikit-learn,clemkoa/scikit-learn,heli522/scikit-learn,rahul-c1/scikit-learn,beepee14/scikit-learn,voxlol/scikit-learn,clemkoa/scikit-learn,shenzebang/scikit-learn,bthirion/scikit-learn,rexshihaoren/scikit-learn,marcocaccin/scikit-learn,harshaneelhg/scikit-learn,ChanChiChoi/scikit-learn,h2educ/scikit-learn,rvraghav93/scikit-learn,mfjb/scikit-learn,marcocaccin/scikit-learn,henrykironde/scikit-learn,dsullivan7/scikit-learn,cwu2011/scikit-learn,devanshdalal/scikit-learn,glouppe/scikit-learn,PrashntS/scikit-learn,voxlol/scikit-learn,samuel1208/scikit-learn,bnaul/scikit-learn,mikebenfield/scikit-learn,betatim/scikit-learn,rishikksh20/scikit-learn,AlexanderFabisch/scikit-learn,mattgiguere/scikit-learn,victorbergelin/scikit-learn,samuel1208/scikit-learn,khkaminska/scikit-learn,AIML/scikit-learn,glouppe/scikit-learn,ycaihua/scikit-learn,pnedunuri/scikit-learn,larsmans/scikit-learn,glouppe/scikit-learn,vibhorag/scikit-learn,anntzer/scikit-learn,bnaul/scikit-learn,Barmaley-exe/scikit-learn,yunfeilu/scikit-learn,vigilv/scikit-learn,tmhm/scikit-learn,chrisburr/scikit-learn,glemaitre/scikit-learn,alexeyum/scikit-learn,devanshdalal/scikit-learn,yask123/scikit-learn,AIML/scikit-learn,NunoEdgarGub1/scikit-learn,rrohan/scikit-learn,wlamond/scikit-learn,rsivapr/scikit-learn,Adai0808/scikit-learn,NelisVerhoef/scikit-learn,RachitKansal/scikit-learn,poryfly/scikit-learn,fbagirov/scikit-learn,etkirsch/scikit-learn,vigilv/scikit-learn,jseabold/scikit-learn,quheng/scikit-learn,liangz0707/scikit-learn,B3AU/waveTree,hitszxp/scikit-learn,hugobowne/scikit-learn,dhruv13J/scikit-learn,frank-tancf/scikit-learn,rahul-c1/scikit-learn,joernhees/scikit-learn,fbagirov/scikit-learn,PatrickOReilly/scikit-learn,JeanKossaifi/scikit-learn,Sentient07/scikit-learn,huzq/scikit-learn,luo66/scikit-learn,wzbozon/scikit-learn,Adai0808/scikit-learn,AIML/scikit-learn,meduz/scikit-learn,zorroblue/scikit-learn,Fireblend/scikit-learn,andrewnc/scikit-learn,aetilley/scikit-learn,wlamond/scikit-learn,ycaihua/scikit-learn,hrjn/scikit-learn,bigdataelephants/scikit-learn,Clyde-fare/scikit-learn,spallavolu/scikit-learn,lazywei/scikit-learn,abhishekgahlot/scikit-learn,vybstat/scikit-learn,glemaitre/scikit-learn,justincassidy/scikit-learn,jakobworldpeace/scikit-learn,ZENGXH/scikit-learn,justincassidy/scikit-learn,pv/scikit-learn,pompiduskus/scikit-learn,hlin117/scikit-learn,appapantula/scikit-learn,waterponey/scikit-learn,q1ang/scikit-learn,fzalkow/scikit-learn,shikhardb/scikit-learn,jorge2703/scikit-learn,petosegan/scikit-learn,olologin/scikit-learn,themrmax/scikit-learn,cdegroc/scikit-learn,RPGOne/scikit-learn,fredhusser/scikit-learn,PrashntS/scikit-learn,mhue/scikit-learn,MechCoder/scikit-learn,ChanderG/scikit-learn,fengzhyuan/scikit-learn,sumspr/scikit-learn,fabianp/scikit-learn,cdegroc/scikit-learn,icdishb/scikit-learn,mehdidc/scikit-learn,hitszxp/scikit-learn,TomDLT/scikit-learn,pypot/scikit-learn,chrisburr/scikit-learn,mlyundin/scikit-learn,kmike/scikit-learn,mhdella/scikit-learn,pythonvietnam/scikit-learn,Nyker510/scikit-learn,loli/semisupervisedforests,akionakamura/scikit-learn,smartscheduling/scikit-learn-categorical-tree,sgenoud/scikit-learn,LiaoPan/scikit-learn,robin-lai/scikit-learn,tdhopper/scikit-learn,nmayorov/scikit-learn,bhargav/scikit-learn,hitszxp/scikit-learn,xwolf12/scikit-learn,frank-tancf/scikit-learn,aflaxman/scikit-learn,jblackburne/scikit-learn,alexeyum/scikit-learn,aflaxman/scikit-learn,vybstat/scikit-learn,bhargav/scikit-learn,samzhang111/scikit-learn,marcocaccin/scikit-learn,mojoboss/scikit-learn,bthirion/scikit-learn,jkarnows/scikit-learn,aewhatley/scikit-learn,hsuantien/scikit-learn,bikong2/scikit-learn,mlyundin/scikit-learn,LiaoPan/scikit-learn,sergeyf/scikit-learn,equialgo/scikit-learn,lenovor/scikit-learn,aetilley/scikit-learn,fengzhyuan/scikit-learn,zihua/scikit-learn,ky822/scikit-learn,nesterione/scikit-learn,Myasuka/scikit-learn,fabioticconi/scikit-learn,macks22/scikit-learn,mugizico/scikit-learn,mattilyra/scikit-learn,lesteve/scikit-learn,zuku1985/scikit-learn,liangz0707/scikit-learn,Aasmi/scikit-learn,BiaDarkia/scikit-learn,PatrickChrist/scikit-learn,hsiaoyi0504/scikit-learn,BiaDarkia/scikit-learn,manhhomienbienthuy/scikit-learn,deepesch/scikit-learn,loli/semisupervisedforests,DSLituiev/scikit-learn,ilo10/scikit-learn,equialgo/scikit-learn,zuku1985/scikit-learn,vivekmishra1991/scikit-learn,ogrisel/scikit-learn,ahoyosid/scikit-learn,shusenl/scikit-learn,icdishb/scikit-learn,yask123/scikit-learn,r-mart/scikit-learn,CforED/Machine-Learning,yonglehou/scikit-learn,ngoix/OCRF,toastedcornflakes/scikit-learn,AlexandreAbraham/scikit-learn,stylianos-kampakis/scikit-learn,ltiao/scikit-learn,Achuth17/scikit-learn,UNR-AERIAL/scikit-learn,eg-zhang/scikit-learn,mehdidc/scikit-learn,yyjiang/scikit-learn,abhishekgahlot/scikit-learn,eickenberg/scikit-learn,liberatorqjw/scikit-learn,AlexanderFabisch/scikit-learn,AlexanderFabisch/scikit-learn,Clyde-fare/scikit-learn,idlead/scikit-learn,MohammedWasim/scikit-learn,ominux/scikit-learn,siutanwong/scikit-learn,chrisburr/scikit-learn,lucidfrontier45/scikit-learn,yanlend/scikit-learn,fabioticconi/scikit-learn,tosolveit/scikit-learn,betatim/scikit-learn,AlexanderFabisch/scikit-learn,MartinSavc/scikit-learn,aabadie/scikit-learn,bigdataelephants/scikit-learn,fengzhyuan/scikit-learn,potash/scikit-learn,mehdidc/scikit-learn,nomadcube/scikit-learn,waterponey/scikit-learn,mojoboss/scikit-learn,fredhusser/scikit-learn,chrisburr/scikit-learn,nrhine1/scikit-learn,joshloyal/scikit-learn,JosmanPS/scikit-learn,fabianp/scikit-learn,yask123/scikit-learn,shikhardb/scikit-learn,dhruv13J/scikit-learn,RomainBrault/scikit-learn,pnedunuri/scikit-learn,raghavrv/scikit-learn,btabibian/scikit-learn,LohithBlaze/scikit-learn,themrmax/scikit-learn,dsquareindia/scikit-learn,kagayakidan/scikit-learn,LiaoPan/scikit-learn,arabenjamin/scikit-learn,ahoyosid/scikit-learn,0x0all/scikit-learn,alvarofierroclavero/scikit-learn,Achuth17/scikit-learn,saiwing-yeung/scikit-learn,vybstat/scikit-learn,scikit-learn/scikit-learn,Achuth17/scikit-learn,RomainBrault/scikit-learn,Obus/scikit-learn,Srisai85/scikit-learn,nvoron23/scikit-learn,vortex-ape/scikit-learn,yunfeilu/scikit-learn,ilo10/scikit-learn,djgagne/scikit-learn,mhue/scikit-learn,waterponey/scikit-learn,madjelan/scikit-learn,qifeigit/scikit-learn,Fireblend/scikit-learn,shyamalschandra/scikit-learn,UNR-AERIAL/scikit-learn,f3r/scikit-learn,zorroblue/scikit-learn,hsuantien/scikit-learn,arjoly/scikit-learn,hlin117/scikit-learn,vshtanko/scikit-learn,DonBeo/scikit-learn,mrshu/scikit-learn,ankurankan/scikit-learn,pianomania/scikit-learn,vivekmishra1991/scikit-learn,HolgerPeters/scikit-learn,YinongLong/scikit-learn,mhdella/scikit-learn,treycausey/scikit-learn,ZenDevelopmentSystems/scikit-learn,kmike/scikit-learn,nmayorov/scikit-learn,justincassidy/scikit-learn,betatim/scikit-learn,IshankGulati/scikit-learn,aflaxman/scikit-learn,alexeyum/scikit-learn,mfjb/scikit-learn,HolgerPeters/scikit-learn,sonnyhu/scikit-learn,voxlol/scikit-learn,krez13/scikit-learn,B3AU/waveTree,mayblue9/scikit-learn,devanshdalal/scikit-learn,vinayak-mehta/scikit-learn,TomDLT/scikit-learn,vermouthmjl/scikit-learn,potash/scikit-learn,lbishal/scikit-learn,robbymeals/scikit-learn,kjung/scikit-learn,lenovor/scikit-learn,adamgreenhall/scikit-learn,ashhher3/scikit-learn,alvarofierroclavero/scikit-learn,aabadie/scikit-learn,cauchycui/scikit-learn,michigraber/scikit-learn,sanketloke/scikit-learn,jseabold/scikit-learn,mikebenfield/scikit-learn,jm-begon/scikit-learn,lbishal/scikit-learn,dingocuster/scikit-learn,theoryno3/scikit-learn,rexshihaoren/scikit-learn,yask123/scikit-learn,amueller/scikit-learn,wanggang3333/scikit-learn,AlexRobson/scikit-learn,joernhees/scikit-learn,ivannz/scikit-learn,florian-f/sklearn,aminert/scikit-learn,Aasmi/scikit-learn,cybernet14/scikit-learn,466152112/scikit-learn,iismd17/scikit-learn,PatrickChrist/scikit-learn,hsiaoyi0504/scikit-learn,JosmanPS/scikit-learn,justincassidy/scikit-learn,abhishekgahlot/scikit-learn,JsNoNo/scikit-learn,JPFrancoia/scikit-learn,arahuja/scikit-learn,glennq/scikit-learn,jorge2703/scikit-learn,gclenaghan/scikit-learn,zorojean/scikit-learn,davidgbe/scikit-learn,sanketloke/scikit-learn,akionakamura/scikit-learn,huobaowangxi/scikit-learn,aewhatley/scikit-learn,jereze/scikit-learn,chrsrds/scikit-learn,kevin-intel/scikit-learn,ephes/scikit-learn,sinhrks/scikit-learn,Jimmy-Morzaria/scikit-learn,nomadcube/scikit-learn,mjgrav2001/scikit-learn,sonnyhu/scikit-learn,shangwuhencc/scikit-learn,sarahgrogan/scikit-learn,glennq/scikit-learn,appapantula/scikit-learn,hdmetor/scikit-learn,ltiao/scikit-learn,ndingwall/scikit-learn,Myasuka/scikit-learn,anirudhjayaraman/scikit-learn,mblondel/scikit-learn,Djabbz/scikit-learn,luo66/scikit-learn,ahoyosid/scikit-learn,AnasGhrab/scikit-learn,ephes/scikit-learn,schets/scikit-learn,anirudhjayaraman/scikit-learn,MartinDelzant/scikit-learn,pianomania/scikit-learn,liyu1990/sklearn,lin-credible/scikit-learn,aminert/scikit-learn,belltailjp/scikit-learn,tdhopper/scikit-learn,moutai/scikit-learn,cl4rke/scikit-learn,yyjiang/scikit-learn,rahuldhote/scikit-learn,Vimos/scikit-learn,treycausey/scikit-learn,kjung/scikit-learn,TomDLT/scikit-learn,procoder317/scikit-learn,henrykironde/scikit-learn,Garrett-R/scikit-learn,thientu/scikit-learn,ningchi/scikit-learn,xzh86/scikit-learn,djgagne/scikit-learn,dsullivan7/scikit-learn,macks22/scikit-learn,hdmetor/scikit-learn,bigdataelephants/scikit-learn,samzhang111/scikit-learn,fyffyt/scikit-learn,ominux/scikit-learn,MohammedWasim/scikit-learn,qifeigit/scikit-learn,evgchz/scikit-learn,gclenaghan/scikit-learn,pythonvietnam/scikit-learn,ky822/scikit-learn,jzt5132/scikit-learn,lazywei/scikit-learn,trungnt13/scikit-learn,aewhatley/scikit-learn,vortex-ape/scikit-learn,MartinSavc/scikit-learn,arjoly/scikit-learn,mjudsp/Tsallis,anurag313/scikit-learn,vortex-ape/scikit-learn,CforED/Machine-Learning,pratapvardhan/scikit-learn,IssamLaradji/scikit-learn,stylianos-kampakis/scikit-learn,anntzer/scikit-learn,fyffyt/scikit-learn,nomadcube/scikit-learn,bhargav/scikit-learn,ngoix/OCRF,mhue/scikit-learn,massmutual/scikit-learn,fbagirov/scikit-learn,depet/scikit-learn,3manuek/scikit-learn,anirudhjayaraman/scikit-learn,beepee14/scikit-learn,xubenben/scikit-learn,thilbern/scikit-learn,ankurankan/scikit-learn,siutanwong/scikit-learn,andaag/scikit-learn,larsmans/scikit-learn,ivannz/scikit-learn,abimannans/scikit-learn,ominux/scikit-learn,larsmans/scikit-learn,henrykironde/scikit-learn,rvraghav93/scikit-learn,russel1237/scikit-learn,glennq/scikit-learn,alvarofierroclavero/scikit-learn,ChanChiChoi/scikit-learn,jseabold/scikit-learn,hlin117/scikit-learn,lucidfrontier45/scikit-learn,ilo10/scikit-learn,arabenjamin/scikit-learn,mrshu/scikit-learn,LiaoPan/scikit-learn,schets/scikit-learn,mikebenfield/scikit-learn,RPGOne/scikit-learn,henridwyer/scikit-learn,dhruv13J/scikit-learn,mhue/scikit-learn,florian-f/sklearn,trankmichael/scikit-learn,zhenv5/scikit-learn,raghavrv/scikit-learn,evgchz/scikit-learn,moutai/scikit-learn,adamgreenhall/scikit-learn,aetilley/scikit-learn,pypot/scikit-learn,Aasmi/scikit-learn,clemkoa/scikit-learn,mwv/scikit-learn,harshaneelhg/scikit-learn,lin-credible/scikit-learn,ky822/scikit-learn,joshloyal/scikit-learn,OshynSong/scikit-learn,saiwing-yeung/scikit-learn,liberatorqjw/scikit-learn,jkarnows/scikit-learn,altairpearl/scikit-learn,roxyboy/scikit-learn,etkirsch/scikit-learn,robbymeals/scikit-learn,loli/sklearn-ensembletrees,roxyboy/scikit-learn,clemkoa/scikit-learn,murali-munna/scikit-learn,rexshihaoren/scikit-learn,MatthieuBizien/scikit-learn,MechCoder/scikit-learn,poryfly/scikit-learn,xiaoxiamii/scikit-learn,rahul-c1/scikit-learn,JsNoNo/scikit-learn,Garrett-R/scikit-learn,rajat1994/scikit-learn,shahankhatch/scikit-learn,imaculate/scikit-learn,xuewei4d/scikit-learn,jmetzen/scikit-learn,Garrett-R/scikit-learn,mattgiguere/scikit-learn,andrewnc/scikit-learn,sgenoud/scikit-learn,alexsavio/scikit-learn,untom/scikit-learn,Lawrence-Liu/scikit-learn,huobaowangxi/scikit-learn,rvraghav93/scikit-learn,Sentient07/scikit-learn,nhejazi/scikit-learn,mayblue9/scikit-learn,dingocuster/scikit-learn,rahuldhote/scikit-learn,mattilyra/scikit-learn,3manuek/scikit-learn,mxjl620/scikit-learn,aabadie/scikit-learn,shusenl/scikit-learn,stylianos-kampakis/scikit-learn,rsivapr/scikit-learn,YinongLong/scikit-learn,scikit-learn/scikit-learn,MartinDelzant/scikit-learn,ogrisel/scikit-learn,luo66/scikit-learn,jlegendary/scikit-learn,pkruskal/scikit-learn,nomadcube/scikit-learn,OshynSong/scikit-learn,wlamond/scikit-learn,0x0all/scikit-learn,hitszxp/scikit-learn,wanggang3333/scikit-learn,nelson-liu/scikit-learn,PatrickChrist/scikit-learn,pv/scikit-learn,joernhees/scikit-learn,Clyde-fare/scikit-learn,Lawrence-Liu/scikit-learn,466152112/scikit-learn,massmutual/scikit-learn,h2educ/scikit-learn,dingocuster/scikit-learn,samzhang111/scikit-learn,tdhopper/scikit-learn,TomDLT/scikit-learn,ldirer/scikit-learn,ZENGXH/scikit-learn,xiaoxiamii/scikit-learn,ephes/scikit-learn,abhishekkrthakur/scikit-learn,ChanderG/scikit-learn,AIML/scikit-learn,hrjn/scikit-learn,kylerbrown/scikit-learn,OshynSong/scikit-learn,ClimbsRocks/scikit-learn,robbymeals/scikit-learn,zhenv5/scikit-learn,ningchi/scikit-learn,IshankGulati/scikit-learn,tawsifkhan/scikit-learn,manhhomienbienthuy/scikit-learn,xwolf12/scikit-learn,larsmans/scikit-learn,fabianp/scikit-learn,mrshu/scikit-learn,zuku1985/scikit-learn,mxjl620/scikit-learn,jayflo/scikit-learn,glemaitre/scikit-learn,h2educ/scikit-learn,jlegendary/scikit-learn,siutanwong/scikit-learn,zihua/scikit-learn,amueller/scikit-learn,mjgrav2001/scikit-learn,zorojean/scikit-learn,jakobworldpeace/scikit-learn,tdhopper/scikit-learn,frank-tancf/scikit-learn,ankurankan/scikit-learn,arahuja/scikit-learn,qifeigit/scikit-learn,rrohan/scikit-learn,f3r/scikit-learn,espg/scikit-learn,pypot/scikit-learn,sarahgrogan/scikit-learn,mojoboss/scikit-learn,ClimbsRocks/scikit-learn,heli522/scikit-learn,ndingwall/scikit-learn,RPGOne/scikit-learn,kylerbrown/scikit-learn,MatthieuBizien/scikit-learn,appapantula/scikit-learn,jblackburne/scikit-learn,pianomania/scikit-learn,michigraber/scikit-learn,amueller/scikit-learn,huobaowangxi/scikit-learn,nrhine1/scikit-learn,q1ang/scikit-learn,rahuldhote/scikit-learn,sgenoud/scikit-learn,olologin/scikit-learn,eickenberg/scikit-learn,xuewei4d/scikit-learn,xzh86/scikit-learn,MechCoder/scikit-learn,huobaowangxi/scikit-learn,JPFrancoia/scikit-learn,vinayak-mehta/scikit-learn,ClimbsRocks/scikit-learn,ishanic/scikit-learn,vibhorag/scikit-learn,hainm/scikit-learn,glouppe/scikit-learn,yonglehou/scikit-learn,costypetrisor/scikit-learn,jkarnows/scikit-learn,pompiduskus/scikit-learn,luo66/scikit-learn,jaidevd/scikit-learn,procoder317/scikit-learn,trankmichael/scikit-learn,phdowling/scikit-learn,hugobowne/scikit-learn,mjudsp/Tsallis,cainiaocome/scikit-learn,cybernet14/scikit-learn,Titan-C/scikit-learn,xubenben/scikit-learn,mlyundin/scikit-learn,PrashntS/scikit-learn,AlexandreAbraham/scikit-learn,btabibian/scikit-learn,djgagne/scikit-learn,plissonf/scikit-learn,altairpearl/scikit-learn,fzalkow/scikit-learn,CVML/scikit-learn,toastedcornflakes/scikit-learn,DonBeo/scikit-learn,untom/scikit-learn,f3r/scikit-learn,Barmaley-exe/scikit-learn,shusenl/scikit-learn,vortex-ape/scikit-learn,jpautom/scikit-learn,MartinSavc/scikit-learn,AlexRobson/scikit-learn,jayflo/scikit-learn,herilalaina/scikit-learn,trankmichael/scikit-learn,ngoix/OCRF,rishikksh20/scikit-learn,nrhine1/scikit-learn,Akshay0724/scikit-learn,vivekmishra1991/scikit-learn,meduz/scikit-learn,hdmetor/scikit-learn,meduz/scikit-learn,0asa/scikit-learn,pv/scikit-learn,giorgiop/scikit-learn,YinongLong/scikit-learn,xyguo/scikit-learn,yonglehou/scikit-learn,ephes/scikit-learn,ankurankan/scikit-learn,kashif/scikit-learn,wzbozon/scikit-learn,0asa/scikit-learn,walterreade/scikit-learn,nikitasingh981/scikit-learn,trankmichael/scikit-learn,MartinDelzant/scikit-learn,maheshakya/scikit-learn,AlexandreAbraham/scikit-learn,evgchz/scikit-learn,OshynSong/scikit-learn,ishanic/scikit-learn,DSLituiev/scikit-learn,chrsrds/scikit-learn,jmschrei/scikit-learn,kagayakidan/scikit-learn,kevin-intel/scikit-learn,jorge2703/scikit-learn,tomlof/scikit-learn,MohammedWasim/scikit-learn,ilyes14/scikit-learn,murali-munna/scikit-learn,cybernet14/scikit-learn,MatthieuBizien/scikit-learn,thilbern/scikit-learn,vibhorag/scikit-learn,jm-begon/scikit-learn,plissonf/scikit-learn,ElDeveloper/scikit-learn,sumspr/scikit-learn,abhishekkrthakur/scikit-learn,DSLituiev/scikit-learn,kagayakidan/scikit-learn,shikhardb/scikit-learn,dsquareindia/scikit-learn,raghavrv/scikit-learn,theoryno3/scikit-learn,kaichogami/scikit-learn,ElDeveloper/scikit-learn,liyu1990/sklearn,mattilyra/scikit-learn,jmschrei/scikit-learn,xyguo/scikit-learn,466152112/scikit-learn,cdegroc/scikit-learn,loli/semisupervisedforests,Garrett-R/scikit-learn
Add an exmple of svm using weighted classes
""" ================================================ SVM: Separating hyperplane with weighted classes ================================================ """ import numpy as np import pylab as pl from scikits.learn import svm # we create 40 separable points np.random.seed(0) nsamples_1 = 1000 nsamples_2 = 100 X = np.r_[1.5*np.random.randn(nsamples_1, 2), 0.5*np.random.randn(nsamples_2, 2) + [2, 2]] Y = [0]*(nsamples_1) + [1]*(nsamples_2) # fit the model and get the separating hyperplane clf = svm.SVC(kernel='linear') clf.fit(X, Y) w = clf.coef_[0] a = -w[0]/w[1] xx = np.linspace(-5, 5) yy = a*xx - (clf.intercept_[0])/w[1] # get the separating hyperplane using weighted classes wclf = svm.SVC(kernel='linear') wclf.fit(X, Y, {1: 10}) ww = wclf.coef_[0] wa = -ww[0]/ww[1] wyy = wa*xx - (wclf.intercept_[0])/ww[1] # plot separating hyperplanes and samples pl.set_cmap(pl.cm.Paired) pl.plot(xx, yy, 'k-') pl.plot(xx, wyy, 'k--') pl.scatter(X[:,0], X[:,1], c=Y) pl.axis('tight') pl.show()
<commit_before><commit_msg>Add an exmple of svm using weighted classes<commit_after>
""" ================================================ SVM: Separating hyperplane with weighted classes ================================================ """ import numpy as np import pylab as pl from scikits.learn import svm # we create 40 separable points np.random.seed(0) nsamples_1 = 1000 nsamples_2 = 100 X = np.r_[1.5*np.random.randn(nsamples_1, 2), 0.5*np.random.randn(nsamples_2, 2) + [2, 2]] Y = [0]*(nsamples_1) + [1]*(nsamples_2) # fit the model and get the separating hyperplane clf = svm.SVC(kernel='linear') clf.fit(X, Y) w = clf.coef_[0] a = -w[0]/w[1] xx = np.linspace(-5, 5) yy = a*xx - (clf.intercept_[0])/w[1] # get the separating hyperplane using weighted classes wclf = svm.SVC(kernel='linear') wclf.fit(X, Y, {1: 10}) ww = wclf.coef_[0] wa = -ww[0]/ww[1] wyy = wa*xx - (wclf.intercept_[0])/ww[1] # plot separating hyperplanes and samples pl.set_cmap(pl.cm.Paired) pl.plot(xx, yy, 'k-') pl.plot(xx, wyy, 'k--') pl.scatter(X[:,0], X[:,1], c=Y) pl.axis('tight') pl.show()
Add an exmple of svm using weighted classes""" ================================================ SVM: Separating hyperplane with weighted classes ================================================ """ import numpy as np import pylab as pl from scikits.learn import svm # we create 40 separable points np.random.seed(0) nsamples_1 = 1000 nsamples_2 = 100 X = np.r_[1.5*np.random.randn(nsamples_1, 2), 0.5*np.random.randn(nsamples_2, 2) + [2, 2]] Y = [0]*(nsamples_1) + [1]*(nsamples_2) # fit the model and get the separating hyperplane clf = svm.SVC(kernel='linear') clf.fit(X, Y) w = clf.coef_[0] a = -w[0]/w[1] xx = np.linspace(-5, 5) yy = a*xx - (clf.intercept_[0])/w[1] # get the separating hyperplane using weighted classes wclf = svm.SVC(kernel='linear') wclf.fit(X, Y, {1: 10}) ww = wclf.coef_[0] wa = -ww[0]/ww[1] wyy = wa*xx - (wclf.intercept_[0])/ww[1] # plot separating hyperplanes and samples pl.set_cmap(pl.cm.Paired) pl.plot(xx, yy, 'k-') pl.plot(xx, wyy, 'k--') pl.scatter(X[:,0], X[:,1], c=Y) pl.axis('tight') pl.show()
<commit_before><commit_msg>Add an exmple of svm using weighted classes<commit_after>""" ================================================ SVM: Separating hyperplane with weighted classes ================================================ """ import numpy as np import pylab as pl from scikits.learn import svm # we create 40 separable points np.random.seed(0) nsamples_1 = 1000 nsamples_2 = 100 X = np.r_[1.5*np.random.randn(nsamples_1, 2), 0.5*np.random.randn(nsamples_2, 2) + [2, 2]] Y = [0]*(nsamples_1) + [1]*(nsamples_2) # fit the model and get the separating hyperplane clf = svm.SVC(kernel='linear') clf.fit(X, Y) w = clf.coef_[0] a = -w[0]/w[1] xx = np.linspace(-5, 5) yy = a*xx - (clf.intercept_[0])/w[1] # get the separating hyperplane using weighted classes wclf = svm.SVC(kernel='linear') wclf.fit(X, Y, {1: 10}) ww = wclf.coef_[0] wa = -ww[0]/ww[1] wyy = wa*xx - (wclf.intercept_[0])/ww[1] # plot separating hyperplanes and samples pl.set_cmap(pl.cm.Paired) pl.plot(xx, yy, 'k-') pl.plot(xx, wyy, 'k--') pl.scatter(X[:,0], X[:,1], c=Y) pl.axis('tight') pl.show()
b7b2d59cfb05f06b8ce4e64db90528753a003c8c
astroquery/tests/test_fermi.py
astroquery/tests/test_fermi.py
from astroquery import fermi def test_query(): query = fermi.FermiLAT_Query() result = query('M31') print result if __name__ == '__main__': test_query()
Add a test for the Fermi query
Add a test for the Fermi query
Python
bsd-3-clause
imbasimba/astroquery,imbasimba/astroquery,ceb8/astroquery,ceb8/astroquery
Add a test for the Fermi query
from astroquery import fermi def test_query(): query = fermi.FermiLAT_Query() result = query('M31') print result if __name__ == '__main__': test_query()
<commit_before><commit_msg>Add a test for the Fermi query<commit_after>
from astroquery import fermi def test_query(): query = fermi.FermiLAT_Query() result = query('M31') print result if __name__ == '__main__': test_query()
Add a test for the Fermi queryfrom astroquery import fermi def test_query(): query = fermi.FermiLAT_Query() result = query('M31') print result if __name__ == '__main__': test_query()
<commit_before><commit_msg>Add a test for the Fermi query<commit_after>from astroquery import fermi def test_query(): query = fermi.FermiLAT_Query() result = query('M31') print result if __name__ == '__main__': test_query()
b210ca7692f9aad908b9ebe9558964bcfc0f6d4d
cfp/migrations/0049_use_correct_uk_country_code.py
cfp/migrations/0049_use_correct_uk_country_code.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations def fix_conference_country_code(apps, schema_editor): Conference = apps.get_model("cfp", "Conference") for conference in Conference.objects.all(): if conference.country == "UK": conference.country = "GB" conference.save() class Migration(migrations.Migration): dependencies = [ ('cfp', '0048_auto_20150412_0740'), ] operations = [ migrations.RunPython(fix_conference_country_code) ]
Add a migration to patch up old 'UK'-coded conferences
Add a migration to patch up old 'UK'-coded conferences
Python
mit
kyleconroy/speakers,kyleconroy/speakers,kyleconroy/speakers
Add a migration to patch up old 'UK'-coded conferences
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations def fix_conference_country_code(apps, schema_editor): Conference = apps.get_model("cfp", "Conference") for conference in Conference.objects.all(): if conference.country == "UK": conference.country = "GB" conference.save() class Migration(migrations.Migration): dependencies = [ ('cfp', '0048_auto_20150412_0740'), ] operations = [ migrations.RunPython(fix_conference_country_code) ]
<commit_before><commit_msg>Add a migration to patch up old 'UK'-coded conferences<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations def fix_conference_country_code(apps, schema_editor): Conference = apps.get_model("cfp", "Conference") for conference in Conference.objects.all(): if conference.country == "UK": conference.country = "GB" conference.save() class Migration(migrations.Migration): dependencies = [ ('cfp', '0048_auto_20150412_0740'), ] operations = [ migrations.RunPython(fix_conference_country_code) ]
Add a migration to patch up old 'UK'-coded conferences# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations def fix_conference_country_code(apps, schema_editor): Conference = apps.get_model("cfp", "Conference") for conference in Conference.objects.all(): if conference.country == "UK": conference.country = "GB" conference.save() class Migration(migrations.Migration): dependencies = [ ('cfp', '0048_auto_20150412_0740'), ] operations = [ migrations.RunPython(fix_conference_country_code) ]
<commit_before><commit_msg>Add a migration to patch up old 'UK'-coded conferences<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations def fix_conference_country_code(apps, schema_editor): Conference = apps.get_model("cfp", "Conference") for conference in Conference.objects.all(): if conference.country == "UK": conference.country = "GB" conference.save() class Migration(migrations.Migration): dependencies = [ ('cfp', '0048_auto_20150412_0740'), ] operations = [ migrations.RunPython(fix_conference_country_code) ]
707897dce2221a41292353eb127cc4d6b05bec4f
solutions/uri/1028/1028.py
solutions/uri/1028/1028.py
import sys def gcd(a, b): while b > 0: a, b = b, a % b return a n = int(input()) for line in range(n): a, b = map(int, input().split()) print(gcd(a, b))
Solve Collectable Cards in python
Solve Collectable Cards in python
Python
mit
deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground
Solve Collectable Cards in python
import sys def gcd(a, b): while b > 0: a, b = b, a % b return a n = int(input()) for line in range(n): a, b = map(int, input().split()) print(gcd(a, b))
<commit_before><commit_msg>Solve Collectable Cards in python<commit_after>
import sys def gcd(a, b): while b > 0: a, b = b, a % b return a n = int(input()) for line in range(n): a, b = map(int, input().split()) print(gcd(a, b))
Solve Collectable Cards in pythonimport sys def gcd(a, b): while b > 0: a, b = b, a % b return a n = int(input()) for line in range(n): a, b = map(int, input().split()) print(gcd(a, b))
<commit_before><commit_msg>Solve Collectable Cards in python<commit_after>import sys def gcd(a, b): while b > 0: a, b = b, a % b return a n = int(input()) for line in range(n): a, b = map(int, input().split()) print(gcd(a, b))
edd8c163c4fc45cd77b89bf86cf13f9f9518ebec
pytac/lattice_length.py
pytac/lattice_length.py
import pytac.load_csv import pytac.epics def main(): lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem()) print lattice.get_length() if __name__=='__main__': main()
Print the length of the lattice
Print the length of the lattice
Python
apache-2.0
razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects
Print the length of the lattice
import pytac.load_csv import pytac.epics def main(): lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem()) print lattice.get_length() if __name__=='__main__': main()
<commit_before><commit_msg>Print the length of the lattice<commit_after>
import pytac.load_csv import pytac.epics def main(): lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem()) print lattice.get_length() if __name__=='__main__': main()
Print the length of the latticeimport pytac.load_csv import pytac.epics def main(): lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem()) print lattice.get_length() if __name__=='__main__': main()
<commit_before><commit_msg>Print the length of the lattice<commit_after>import pytac.load_csv import pytac.epics def main(): lattice = pytac.load_csv.load('VMX', pytac.epics.EpicsControlSystem()) print lattice.get_length() if __name__=='__main__': main()
15d248dfe94a69605deb8c3ab11af55213129f8e
capture_audio.py
capture_audio.py
import pyaudio FORMAT = pyaudio.paInt16 CHANNELS = 1 def capture_buffers(num_buffers, chunk, rate, skip=None): if skip == None: skip = rate / 2 p = pyaudio.PyAudio() stream = p.open(format=FORMAT, channels=CHANNELS, rate=rate, input=True, frames_per_buffer=chunk) # ignore some data at the beginning as it is usually weird if skip > 0: data = stream.read(skip) buffers = [stream.read(chunk) for i in range(0, num_buffers)] # close the audio stream stream.stop_stream() stream.close() p.terminate() return buffers def capture_seconds(num_seconds, chunksize, rate, width): num_buffers = int(float(num_seconds * rate) / chunksize) return capture_buffers(num_buffers, chunksize, rate, width)
Revert "Removes unused capture audio helper."
Revert "Removes unused capture audio helper." This reverts commit 2996177950985a96e4fb4c4179cd43bd6ea53f6c.
Python
mit
Katee/quietnet,richo/quietnet
Revert "Removes unused capture audio helper." This reverts commit 2996177950985a96e4fb4c4179cd43bd6ea53f6c.
import pyaudio FORMAT = pyaudio.paInt16 CHANNELS = 1 def capture_buffers(num_buffers, chunk, rate, skip=None): if skip == None: skip = rate / 2 p = pyaudio.PyAudio() stream = p.open(format=FORMAT, channels=CHANNELS, rate=rate, input=True, frames_per_buffer=chunk) # ignore some data at the beginning as it is usually weird if skip > 0: data = stream.read(skip) buffers = [stream.read(chunk) for i in range(0, num_buffers)] # close the audio stream stream.stop_stream() stream.close() p.terminate() return buffers def capture_seconds(num_seconds, chunksize, rate, width): num_buffers = int(float(num_seconds * rate) / chunksize) return capture_buffers(num_buffers, chunksize, rate, width)
<commit_before><commit_msg>Revert "Removes unused capture audio helper." This reverts commit 2996177950985a96e4fb4c4179cd43bd6ea53f6c.<commit_after>
import pyaudio FORMAT = pyaudio.paInt16 CHANNELS = 1 def capture_buffers(num_buffers, chunk, rate, skip=None): if skip == None: skip = rate / 2 p = pyaudio.PyAudio() stream = p.open(format=FORMAT, channels=CHANNELS, rate=rate, input=True, frames_per_buffer=chunk) # ignore some data at the beginning as it is usually weird if skip > 0: data = stream.read(skip) buffers = [stream.read(chunk) for i in range(0, num_buffers)] # close the audio stream stream.stop_stream() stream.close() p.terminate() return buffers def capture_seconds(num_seconds, chunksize, rate, width): num_buffers = int(float(num_seconds * rate) / chunksize) return capture_buffers(num_buffers, chunksize, rate, width)
Revert "Removes unused capture audio helper." This reverts commit 2996177950985a96e4fb4c4179cd43bd6ea53f6c.import pyaudio FORMAT = pyaudio.paInt16 CHANNELS = 1 def capture_buffers(num_buffers, chunk, rate, skip=None): if skip == None: skip = rate / 2 p = pyaudio.PyAudio() stream = p.open(format=FORMAT, channels=CHANNELS, rate=rate, input=True, frames_per_buffer=chunk) # ignore some data at the beginning as it is usually weird if skip > 0: data = stream.read(skip) buffers = [stream.read(chunk) for i in range(0, num_buffers)] # close the audio stream stream.stop_stream() stream.close() p.terminate() return buffers def capture_seconds(num_seconds, chunksize, rate, width): num_buffers = int(float(num_seconds * rate) / chunksize) return capture_buffers(num_buffers, chunksize, rate, width)
<commit_before><commit_msg>Revert "Removes unused capture audio helper." This reverts commit 2996177950985a96e4fb4c4179cd43bd6ea53f6c.<commit_after>import pyaudio FORMAT = pyaudio.paInt16 CHANNELS = 1 def capture_buffers(num_buffers, chunk, rate, skip=None): if skip == None: skip = rate / 2 p = pyaudio.PyAudio() stream = p.open(format=FORMAT, channels=CHANNELS, rate=rate, input=True, frames_per_buffer=chunk) # ignore some data at the beginning as it is usually weird if skip > 0: data = stream.read(skip) buffers = [stream.read(chunk) for i in range(0, num_buffers)] # close the audio stream stream.stop_stream() stream.close() p.terminate() return buffers def capture_seconds(num_seconds, chunksize, rate, width): num_buffers = int(float(num_seconds * rate) / chunksize) return capture_buffers(num_buffers, chunksize, rate, width)
3cbd8125028cca7ad18388f8c07202865847f242
qual/tests/test_date.py
qual/tests/test_date.py
import unittest from datetime import date from qual.calendars import DateWithCalendar class TestDateWtihCalendar(unittest.TestCase): def setUp(self): date_dt = date(2010, 8, 1) self.date_wc = DateWithCalendar(None, date_dt) def test_comparisons(self): self.assertTrue(self.date_wc < date(2010, 8, 2)) self.assertFalse(self.date_wc < date(2010, 7, 31)) self.assertTrue(self.date_wc > date(2010, 7, 2)) self.assertFalse(self.date_wc > date(2010, 8, 31))
Add test for date comparison.
Add test for date comparison.
Python
apache-2.0
jwg4/qual,jwg4/calexicon
Add test for date comparison.
import unittest from datetime import date from qual.calendars import DateWithCalendar class TestDateWtihCalendar(unittest.TestCase): def setUp(self): date_dt = date(2010, 8, 1) self.date_wc = DateWithCalendar(None, date_dt) def test_comparisons(self): self.assertTrue(self.date_wc < date(2010, 8, 2)) self.assertFalse(self.date_wc < date(2010, 7, 31)) self.assertTrue(self.date_wc > date(2010, 7, 2)) self.assertFalse(self.date_wc > date(2010, 8, 31))
<commit_before><commit_msg>Add test for date comparison.<commit_after>
import unittest from datetime import date from qual.calendars import DateWithCalendar class TestDateWtihCalendar(unittest.TestCase): def setUp(self): date_dt = date(2010, 8, 1) self.date_wc = DateWithCalendar(None, date_dt) def test_comparisons(self): self.assertTrue(self.date_wc < date(2010, 8, 2)) self.assertFalse(self.date_wc < date(2010, 7, 31)) self.assertTrue(self.date_wc > date(2010, 7, 2)) self.assertFalse(self.date_wc > date(2010, 8, 31))
Add test for date comparison.import unittest from datetime import date from qual.calendars import DateWithCalendar class TestDateWtihCalendar(unittest.TestCase): def setUp(self): date_dt = date(2010, 8, 1) self.date_wc = DateWithCalendar(None, date_dt) def test_comparisons(self): self.assertTrue(self.date_wc < date(2010, 8, 2)) self.assertFalse(self.date_wc < date(2010, 7, 31)) self.assertTrue(self.date_wc > date(2010, 7, 2)) self.assertFalse(self.date_wc > date(2010, 8, 31))
<commit_before><commit_msg>Add test for date comparison.<commit_after>import unittest from datetime import date from qual.calendars import DateWithCalendar class TestDateWtihCalendar(unittest.TestCase): def setUp(self): date_dt = date(2010, 8, 1) self.date_wc = DateWithCalendar(None, date_dt) def test_comparisons(self): self.assertTrue(self.date_wc < date(2010, 8, 2)) self.assertFalse(self.date_wc < date(2010, 7, 31)) self.assertTrue(self.date_wc > date(2010, 7, 2)) self.assertFalse(self.date_wc > date(2010, 8, 31))
4f0415f5cb7f8322a0738cb1d55c7102464d3aef
openedx/core/djangoapps/discussions/tests/test_views.py
openedx/core/djangoapps/discussions/tests/test_views.py
""" Test app view logic """ # pylint: disable=test-inherits-tests import unittest from django.conf import settings from django.urls import reverse from opaque_keys.edx.keys import CourseKey from rest_framework import status from rest_framework.test import APITestCase from common.djangoapps.student.tests.factories import UserFactory from lms.djangoapps.courseware.tests.factories import GlobalStaffFactory from lms.djangoapps.courseware.tests.factories import StaffFactory @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'URLs are only configured in LMS') class ApiTest(APITestCase): """ Test basic API operations """ def setUp(self): super().setUp() self.course_key = CourseKey.from_string('course-v1:Test+Course+Configured') self.url = reverse( 'discussions', kwargs={ 'course_key_string': str(self.course_key), } ) self.password = 'password' self.user_student = UserFactory(username='dummy', password=self.password) self.user_staff_course = StaffFactory(course_key=self.course_key, password=self.password) self.user_staff_global = GlobalStaffFactory(password=self.password) class UnauthorizedApiTest(ApiTest): """ Logged-out users should _not_ have any access """ expected_response_code = status.HTTP_401_UNAUTHORIZED def test_access_get(self): response = self.client.get(self.url) assert response.status_code == self.expected_response_code def test_access_patch(self): response = self.client.patch(self.url) assert response.status_code == self.expected_response_code def test_access_post(self): response = self.client.post(self.url) assert response.status_code == self.expected_response_code def test_access_put(self): response = self.client.put(self.url) assert response.status_code == self.expected_response_code class AuthenticatedApiTest(UnauthorizedApiTest): """ Logged-in users should _not_ have any access """ expected_response_code = status.HTTP_403_FORBIDDEN def setUp(self): super().setUp() self._login() def _login(self): self.client.login(username=self.user_student.username, password=self.password) class AuthorizedApiTest(AuthenticatedApiTest): """ Global Staff should have access to all supported methods """ expected_response_code = status.HTTP_200_OK def _login(self): self.client.login(username=self.user_staff_global.username, password=self.password) def test_access_patch(self): response = self.client.patch(self.url) assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED def test_access_put(self): response = self.client.put(self.url) assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
Add tests for discussions API access
test: Add tests for discussions API access This checks for expected API access [1]; data integrity will be checked later [2]. This work exposes that the code currently does _not_ grant access to _course_ staff, only _global_ staff. This is being addressed next [3]. Fix: TNL-8229 [1] - [1] https://openedx.atlassian.net/browse/TNL-8229 - [2] https://openedx.atlassian.net/browse/TNL-8230 - [3] https://openedx.atlassian.net/browse/TNL-8231
Python
agpl-3.0
edx/edx-platform,angelapper/edx-platform,eduNEXT/edx-platform,arbrandes/edx-platform,eduNEXT/edx-platform,angelapper/edx-platform,arbrandes/edx-platform,angelapper/edx-platform,arbrandes/edx-platform,edx/edx-platform,arbrandes/edx-platform,eduNEXT/edx-platform,eduNEXT/edx-platform,edx/edx-platform,edx/edx-platform,angelapper/edx-platform
test: Add tests for discussions API access This checks for expected API access [1]; data integrity will be checked later [2]. This work exposes that the code currently does _not_ grant access to _course_ staff, only _global_ staff. This is being addressed next [3]. Fix: TNL-8229 [1] - [1] https://openedx.atlassian.net/browse/TNL-8229 - [2] https://openedx.atlassian.net/browse/TNL-8230 - [3] https://openedx.atlassian.net/browse/TNL-8231
""" Test app view logic """ # pylint: disable=test-inherits-tests import unittest from django.conf import settings from django.urls import reverse from opaque_keys.edx.keys import CourseKey from rest_framework import status from rest_framework.test import APITestCase from common.djangoapps.student.tests.factories import UserFactory from lms.djangoapps.courseware.tests.factories import GlobalStaffFactory from lms.djangoapps.courseware.tests.factories import StaffFactory @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'URLs are only configured in LMS') class ApiTest(APITestCase): """ Test basic API operations """ def setUp(self): super().setUp() self.course_key = CourseKey.from_string('course-v1:Test+Course+Configured') self.url = reverse( 'discussions', kwargs={ 'course_key_string': str(self.course_key), } ) self.password = 'password' self.user_student = UserFactory(username='dummy', password=self.password) self.user_staff_course = StaffFactory(course_key=self.course_key, password=self.password) self.user_staff_global = GlobalStaffFactory(password=self.password) class UnauthorizedApiTest(ApiTest): """ Logged-out users should _not_ have any access """ expected_response_code = status.HTTP_401_UNAUTHORIZED def test_access_get(self): response = self.client.get(self.url) assert response.status_code == self.expected_response_code def test_access_patch(self): response = self.client.patch(self.url) assert response.status_code == self.expected_response_code def test_access_post(self): response = self.client.post(self.url) assert response.status_code == self.expected_response_code def test_access_put(self): response = self.client.put(self.url) assert response.status_code == self.expected_response_code class AuthenticatedApiTest(UnauthorizedApiTest): """ Logged-in users should _not_ have any access """ expected_response_code = status.HTTP_403_FORBIDDEN def setUp(self): super().setUp() self._login() def _login(self): self.client.login(username=self.user_student.username, password=self.password) class AuthorizedApiTest(AuthenticatedApiTest): """ Global Staff should have access to all supported methods """ expected_response_code = status.HTTP_200_OK def _login(self): self.client.login(username=self.user_staff_global.username, password=self.password) def test_access_patch(self): response = self.client.patch(self.url) assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED def test_access_put(self): response = self.client.put(self.url) assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
<commit_before><commit_msg>test: Add tests for discussions API access This checks for expected API access [1]; data integrity will be checked later [2]. This work exposes that the code currently does _not_ grant access to _course_ staff, only _global_ staff. This is being addressed next [3]. Fix: TNL-8229 [1] - [1] https://openedx.atlassian.net/browse/TNL-8229 - [2] https://openedx.atlassian.net/browse/TNL-8230 - [3] https://openedx.atlassian.net/browse/TNL-8231<commit_after>
""" Test app view logic """ # pylint: disable=test-inherits-tests import unittest from django.conf import settings from django.urls import reverse from opaque_keys.edx.keys import CourseKey from rest_framework import status from rest_framework.test import APITestCase from common.djangoapps.student.tests.factories import UserFactory from lms.djangoapps.courseware.tests.factories import GlobalStaffFactory from lms.djangoapps.courseware.tests.factories import StaffFactory @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'URLs are only configured in LMS') class ApiTest(APITestCase): """ Test basic API operations """ def setUp(self): super().setUp() self.course_key = CourseKey.from_string('course-v1:Test+Course+Configured') self.url = reverse( 'discussions', kwargs={ 'course_key_string': str(self.course_key), } ) self.password = 'password' self.user_student = UserFactory(username='dummy', password=self.password) self.user_staff_course = StaffFactory(course_key=self.course_key, password=self.password) self.user_staff_global = GlobalStaffFactory(password=self.password) class UnauthorizedApiTest(ApiTest): """ Logged-out users should _not_ have any access """ expected_response_code = status.HTTP_401_UNAUTHORIZED def test_access_get(self): response = self.client.get(self.url) assert response.status_code == self.expected_response_code def test_access_patch(self): response = self.client.patch(self.url) assert response.status_code == self.expected_response_code def test_access_post(self): response = self.client.post(self.url) assert response.status_code == self.expected_response_code def test_access_put(self): response = self.client.put(self.url) assert response.status_code == self.expected_response_code class AuthenticatedApiTest(UnauthorizedApiTest): """ Logged-in users should _not_ have any access """ expected_response_code = status.HTTP_403_FORBIDDEN def setUp(self): super().setUp() self._login() def _login(self): self.client.login(username=self.user_student.username, password=self.password) class AuthorizedApiTest(AuthenticatedApiTest): """ Global Staff should have access to all supported methods """ expected_response_code = status.HTTP_200_OK def _login(self): self.client.login(username=self.user_staff_global.username, password=self.password) def test_access_patch(self): response = self.client.patch(self.url) assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED def test_access_put(self): response = self.client.put(self.url) assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
test: Add tests for discussions API access This checks for expected API access [1]; data integrity will be checked later [2]. This work exposes that the code currently does _not_ grant access to _course_ staff, only _global_ staff. This is being addressed next [3]. Fix: TNL-8229 [1] - [1] https://openedx.atlassian.net/browse/TNL-8229 - [2] https://openedx.atlassian.net/browse/TNL-8230 - [3] https://openedx.atlassian.net/browse/TNL-8231""" Test app view logic """ # pylint: disable=test-inherits-tests import unittest from django.conf import settings from django.urls import reverse from opaque_keys.edx.keys import CourseKey from rest_framework import status from rest_framework.test import APITestCase from common.djangoapps.student.tests.factories import UserFactory from lms.djangoapps.courseware.tests.factories import GlobalStaffFactory from lms.djangoapps.courseware.tests.factories import StaffFactory @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'URLs are only configured in LMS') class ApiTest(APITestCase): """ Test basic API operations """ def setUp(self): super().setUp() self.course_key = CourseKey.from_string('course-v1:Test+Course+Configured') self.url = reverse( 'discussions', kwargs={ 'course_key_string': str(self.course_key), } ) self.password = 'password' self.user_student = UserFactory(username='dummy', password=self.password) self.user_staff_course = StaffFactory(course_key=self.course_key, password=self.password) self.user_staff_global = GlobalStaffFactory(password=self.password) class UnauthorizedApiTest(ApiTest): """ Logged-out users should _not_ have any access """ expected_response_code = status.HTTP_401_UNAUTHORIZED def test_access_get(self): response = self.client.get(self.url) assert response.status_code == self.expected_response_code def test_access_patch(self): response = self.client.patch(self.url) assert response.status_code == self.expected_response_code def test_access_post(self): response = self.client.post(self.url) assert response.status_code == self.expected_response_code def test_access_put(self): response = self.client.put(self.url) assert response.status_code == self.expected_response_code class AuthenticatedApiTest(UnauthorizedApiTest): """ Logged-in users should _not_ have any access """ expected_response_code = status.HTTP_403_FORBIDDEN def setUp(self): super().setUp() self._login() def _login(self): self.client.login(username=self.user_student.username, password=self.password) class AuthorizedApiTest(AuthenticatedApiTest): """ Global Staff should have access to all supported methods """ expected_response_code = status.HTTP_200_OK def _login(self): self.client.login(username=self.user_staff_global.username, password=self.password) def test_access_patch(self): response = self.client.patch(self.url) assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED def test_access_put(self): response = self.client.put(self.url) assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
<commit_before><commit_msg>test: Add tests for discussions API access This checks for expected API access [1]; data integrity will be checked later [2]. This work exposes that the code currently does _not_ grant access to _course_ staff, only _global_ staff. This is being addressed next [3]. Fix: TNL-8229 [1] - [1] https://openedx.atlassian.net/browse/TNL-8229 - [2] https://openedx.atlassian.net/browse/TNL-8230 - [3] https://openedx.atlassian.net/browse/TNL-8231<commit_after>""" Test app view logic """ # pylint: disable=test-inherits-tests import unittest from django.conf import settings from django.urls import reverse from opaque_keys.edx.keys import CourseKey from rest_framework import status from rest_framework.test import APITestCase from common.djangoapps.student.tests.factories import UserFactory from lms.djangoapps.courseware.tests.factories import GlobalStaffFactory from lms.djangoapps.courseware.tests.factories import StaffFactory @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'URLs are only configured in LMS') class ApiTest(APITestCase): """ Test basic API operations """ def setUp(self): super().setUp() self.course_key = CourseKey.from_string('course-v1:Test+Course+Configured') self.url = reverse( 'discussions', kwargs={ 'course_key_string': str(self.course_key), } ) self.password = 'password' self.user_student = UserFactory(username='dummy', password=self.password) self.user_staff_course = StaffFactory(course_key=self.course_key, password=self.password) self.user_staff_global = GlobalStaffFactory(password=self.password) class UnauthorizedApiTest(ApiTest): """ Logged-out users should _not_ have any access """ expected_response_code = status.HTTP_401_UNAUTHORIZED def test_access_get(self): response = self.client.get(self.url) assert response.status_code == self.expected_response_code def test_access_patch(self): response = self.client.patch(self.url) assert response.status_code == self.expected_response_code def test_access_post(self): response = self.client.post(self.url) assert response.status_code == self.expected_response_code def test_access_put(self): response = self.client.put(self.url) assert response.status_code == self.expected_response_code class AuthenticatedApiTest(UnauthorizedApiTest): """ Logged-in users should _not_ have any access """ expected_response_code = status.HTTP_403_FORBIDDEN def setUp(self): super().setUp() self._login() def _login(self): self.client.login(username=self.user_student.username, password=self.password) class AuthorizedApiTest(AuthenticatedApiTest): """ Global Staff should have access to all supported methods """ expected_response_code = status.HTTP_200_OK def _login(self): self.client.login(username=self.user_staff_global.username, password=self.password) def test_access_patch(self): response = self.client.patch(self.url) assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED def test_access_put(self): response = self.client.put(self.url) assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
5d9d89577e34612bcdcee6b48fad3a6d615d2316
scripts/nplm-training/reduce_ngrams.py
scripts/nplm-training/reduce_ngrams.py
#!/usr/bin/env python3 """Reduces an ngrams file for training nplm to a smaller version of it with less ngrams""" from sys import argv if len(argv) != 5: print("Wrong number of args, got: " + str(len(argv) - 1) + " expected 4.") print("Usage: reduce_ngrams.py INFILE OUTFILE START_IDX NGRAMS") exit() INFILE = open(argv[1], 'r') OUTFILE = open(argv[2], 'w') START_IDX = int(argv[3]) NGRAMS = int(argv[4]) for line in INFILE: line = line.split() line = line[START_IDX:START_IDX+NGRAMS] linetowrite = "" for token in line: linetowrite = linetowrite + token + " " #Strip final empty space and add newline linetowrite = linetowrite[:-1] linetowrite = linetowrite + '\n' OUTFILE.write(linetowrite) INFILE.close() OUTFILE.close()
Add option to reduce the ngrams from already prepared .ngrams file to train a model with smaller number of ngrams
Add option to reduce the ngrams from already prepared .ngrams file to train a model with smaller number of ngrams
Python
lgpl-2.1
hychyc07/mosesdecoder,KonceptGeek/mosesdecoder,alvations/mosesdecoder,KonceptGeek/mosesdecoder,alvations/mosesdecoder,moses-smt/mosesdecoder,alvations/mosesdecoder,alvations/mosesdecoder,alvations/mosesdecoder,hychyc07/mosesdecoder,moses-smt/mosesdecoder,alvations/mosesdecoder,alvations/mosesdecoder,tofula/mosesdecoder,tofula/mosesdecoder,alvations/mosesdecoder,alvations/mosesdecoder,pjwilliams/mosesdecoder,hychyc07/mosesdecoder,alvations/mosesdecoder,KonceptGeek/mosesdecoder,moses-smt/mosesdecoder,hychyc07/mosesdecoder,KonceptGeek/mosesdecoder,pjwilliams/mosesdecoder,moses-smt/mosesdecoder,moses-smt/mosesdecoder,hychyc07/mosesdecoder,moses-smt/mosesdecoder,hychyc07/mosesdecoder,emjotde/mosesdecoder_nmt,emjotde/mosesdecoder_nmt,pjwilliams/mosesdecoder,pjwilliams/mosesdecoder,hychyc07/mosesdecoder,alvations/mosesdecoder,tofula/mosesdecoder,emjotde/mosesdecoder_nmt,tofula/mosesdecoder,KonceptGeek/mosesdecoder,tofula/mosesdecoder,pjwilliams/mosesdecoder,tofula/mosesdecoder,hychyc07/mosesdecoder,KonceptGeek/mosesdecoder,KonceptGeek/mosesdecoder,moses-smt/mosesdecoder,emjotde/mosesdecoder_nmt,emjotde/mosesdecoder_nmt,tofula/mosesdecoder,moses-smt/mosesdecoder,emjotde/mosesdecoder_nmt,emjotde/mosesdecoder_nmt,pjwilliams/mosesdecoder,hychyc07/mosesdecoder,hychyc07/mosesdecoder,moses-smt/mosesdecoder,pjwilliams/mosesdecoder,emjotde/mosesdecoder_nmt,moses-smt/mosesdecoder,pjwilliams/mosesdecoder,pjwilliams/mosesdecoder,tofula/mosesdecoder,emjotde/mosesdecoder_nmt,KonceptGeek/mosesdecoder,KonceptGeek/mosesdecoder,emjotde/mosesdecoder_nmt,tofula/mosesdecoder,pjwilliams/mosesdecoder,tofula/mosesdecoder,tofula/mosesdecoder,moses-smt/mosesdecoder,KonceptGeek/mosesdecoder
Add option to reduce the ngrams from already prepared .ngrams file to train a model with smaller number of ngrams
#!/usr/bin/env python3 """Reduces an ngrams file for training nplm to a smaller version of it with less ngrams""" from sys import argv if len(argv) != 5: print("Wrong number of args, got: " + str(len(argv) - 1) + " expected 4.") print("Usage: reduce_ngrams.py INFILE OUTFILE START_IDX NGRAMS") exit() INFILE = open(argv[1], 'r') OUTFILE = open(argv[2], 'w') START_IDX = int(argv[3]) NGRAMS = int(argv[4]) for line in INFILE: line = line.split() line = line[START_IDX:START_IDX+NGRAMS] linetowrite = "" for token in line: linetowrite = linetowrite + token + " " #Strip final empty space and add newline linetowrite = linetowrite[:-1] linetowrite = linetowrite + '\n' OUTFILE.write(linetowrite) INFILE.close() OUTFILE.close()
<commit_before><commit_msg>Add option to reduce the ngrams from already prepared .ngrams file to train a model with smaller number of ngrams<commit_after>
#!/usr/bin/env python3 """Reduces an ngrams file for training nplm to a smaller version of it with less ngrams""" from sys import argv if len(argv) != 5: print("Wrong number of args, got: " + str(len(argv) - 1) + " expected 4.") print("Usage: reduce_ngrams.py INFILE OUTFILE START_IDX NGRAMS") exit() INFILE = open(argv[1], 'r') OUTFILE = open(argv[2], 'w') START_IDX = int(argv[3]) NGRAMS = int(argv[4]) for line in INFILE: line = line.split() line = line[START_IDX:START_IDX+NGRAMS] linetowrite = "" for token in line: linetowrite = linetowrite + token + " " #Strip final empty space and add newline linetowrite = linetowrite[:-1] linetowrite = linetowrite + '\n' OUTFILE.write(linetowrite) INFILE.close() OUTFILE.close()
Add option to reduce the ngrams from already prepared .ngrams file to train a model with smaller number of ngrams#!/usr/bin/env python3 """Reduces an ngrams file for training nplm to a smaller version of it with less ngrams""" from sys import argv if len(argv) != 5: print("Wrong number of args, got: " + str(len(argv) - 1) + " expected 4.") print("Usage: reduce_ngrams.py INFILE OUTFILE START_IDX NGRAMS") exit() INFILE = open(argv[1], 'r') OUTFILE = open(argv[2], 'w') START_IDX = int(argv[3]) NGRAMS = int(argv[4]) for line in INFILE: line = line.split() line = line[START_IDX:START_IDX+NGRAMS] linetowrite = "" for token in line: linetowrite = linetowrite + token + " " #Strip final empty space and add newline linetowrite = linetowrite[:-1] linetowrite = linetowrite + '\n' OUTFILE.write(linetowrite) INFILE.close() OUTFILE.close()
<commit_before><commit_msg>Add option to reduce the ngrams from already prepared .ngrams file to train a model with smaller number of ngrams<commit_after>#!/usr/bin/env python3 """Reduces an ngrams file for training nplm to a smaller version of it with less ngrams""" from sys import argv if len(argv) != 5: print("Wrong number of args, got: " + str(len(argv) - 1) + " expected 4.") print("Usage: reduce_ngrams.py INFILE OUTFILE START_IDX NGRAMS") exit() INFILE = open(argv[1], 'r') OUTFILE = open(argv[2], 'w') START_IDX = int(argv[3]) NGRAMS = int(argv[4]) for line in INFILE: line = line.split() line = line[START_IDX:START_IDX+NGRAMS] linetowrite = "" for token in line: linetowrite = linetowrite + token + " " #Strip final empty space and add newline linetowrite = linetowrite[:-1] linetowrite = linetowrite + '\n' OUTFILE.write(linetowrite) INFILE.close() OUTFILE.close()
a0b1bd7c43ab6d8683fd7cb18c5a9ca692ee5e19
busstops/management/commands/update_search_indexes.py
busstops/management/commands/update_search_indexes.py
from django.core.management.base import BaseCommand from ...search_indexes import ServiceIndex from ...models import Service class Command(BaseCommand): def handle(self, *args, **options): service_index = ServiceIndex() for service in Service.objects.filter(current=False): service_index.remove_object(service)
Add command to remove archived services from search index
Add command to remove archived services from search index
Python
mpl-2.0
stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk
Add command to remove archived services from search index
from django.core.management.base import BaseCommand from ...search_indexes import ServiceIndex from ...models import Service class Command(BaseCommand): def handle(self, *args, **options): service_index = ServiceIndex() for service in Service.objects.filter(current=False): service_index.remove_object(service)
<commit_before><commit_msg>Add command to remove archived services from search index<commit_after>
from django.core.management.base import BaseCommand from ...search_indexes import ServiceIndex from ...models import Service class Command(BaseCommand): def handle(self, *args, **options): service_index = ServiceIndex() for service in Service.objects.filter(current=False): service_index.remove_object(service)
Add command to remove archived services from search indexfrom django.core.management.base import BaseCommand from ...search_indexes import ServiceIndex from ...models import Service class Command(BaseCommand): def handle(self, *args, **options): service_index = ServiceIndex() for service in Service.objects.filter(current=False): service_index.remove_object(service)
<commit_before><commit_msg>Add command to remove archived services from search index<commit_after>from django.core.management.base import BaseCommand from ...search_indexes import ServiceIndex from ...models import Service class Command(BaseCommand): def handle(self, *args, **options): service_index = ServiceIndex() for service in Service.objects.filter(current=False): service_index.remove_object(service)
41c1d4a829aa47e5403757d8670e1ed9e5b3d1f6
cityhallmonitor/migrations/0020_auto_20151214_1329.py
cityhallmonitor/migrations/0020_auto_20151214_1329.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import connection, models, migrations import cityhallmonitor.models def add_document_gin_index_wt(apps, schema_editor): Document = apps.get_model('cityhallmonitor', 'Document') db_table = Document._meta.db_table with connection.cursor() as c: sql = "CREATE INDEX cityhallmonitor_document_text_vector_wt_gin ON %s USING gin(text_vector_weighted)" \ % db_table c.execute(sql) class Migration(migrations.Migration): dependencies = [ ('cityhallmonitor', '0019_auto_20151211_1424'), ] operations = [ migrations.RunPython(add_document_gin_index_wt), ]
Add migration command to create index on weighted vector field
Add migration command to create index on weighted vector field
Python
mit
NUKnightLab/cityhallmonitor,NUKnightLab/cityhallmonitor,NUKnightLab/cityhallmonitor,NUKnightLab/cityhallmonitor
Add migration command to create index on weighted vector field
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import connection, models, migrations import cityhallmonitor.models def add_document_gin_index_wt(apps, schema_editor): Document = apps.get_model('cityhallmonitor', 'Document') db_table = Document._meta.db_table with connection.cursor() as c: sql = "CREATE INDEX cityhallmonitor_document_text_vector_wt_gin ON %s USING gin(text_vector_weighted)" \ % db_table c.execute(sql) class Migration(migrations.Migration): dependencies = [ ('cityhallmonitor', '0019_auto_20151211_1424'), ] operations = [ migrations.RunPython(add_document_gin_index_wt), ]
<commit_before><commit_msg>Add migration command to create index on weighted vector field<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import connection, models, migrations import cityhallmonitor.models def add_document_gin_index_wt(apps, schema_editor): Document = apps.get_model('cityhallmonitor', 'Document') db_table = Document._meta.db_table with connection.cursor() as c: sql = "CREATE INDEX cityhallmonitor_document_text_vector_wt_gin ON %s USING gin(text_vector_weighted)" \ % db_table c.execute(sql) class Migration(migrations.Migration): dependencies = [ ('cityhallmonitor', '0019_auto_20151211_1424'), ] operations = [ migrations.RunPython(add_document_gin_index_wt), ]
Add migration command to create index on weighted vector field# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import connection, models, migrations import cityhallmonitor.models def add_document_gin_index_wt(apps, schema_editor): Document = apps.get_model('cityhallmonitor', 'Document') db_table = Document._meta.db_table with connection.cursor() as c: sql = "CREATE INDEX cityhallmonitor_document_text_vector_wt_gin ON %s USING gin(text_vector_weighted)" \ % db_table c.execute(sql) class Migration(migrations.Migration): dependencies = [ ('cityhallmonitor', '0019_auto_20151211_1424'), ] operations = [ migrations.RunPython(add_document_gin_index_wt), ]
<commit_before><commit_msg>Add migration command to create index on weighted vector field<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import connection, models, migrations import cityhallmonitor.models def add_document_gin_index_wt(apps, schema_editor): Document = apps.get_model('cityhallmonitor', 'Document') db_table = Document._meta.db_table with connection.cursor() as c: sql = "CREATE INDEX cityhallmonitor_document_text_vector_wt_gin ON %s USING gin(text_vector_weighted)" \ % db_table c.execute(sql) class Migration(migrations.Migration): dependencies = [ ('cityhallmonitor', '0019_auto_20151211_1424'), ] operations = [ migrations.RunPython(add_document_gin_index_wt), ]
dd9411db392df3c7f5026fe3695e2ec9fc9b6dbe
FlaskMedia/forms.py
FlaskMedia/forms.py
from flask_wtf import FlaskForm as Form from wtforms import StringField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length class EditMovieForm(Form): title = StringField('title', validators=[DataRequired()]) plot = TextAreaField('plot', validators=[Length(min=0, max=2047)])
Edit movie form, title and plot only
Edit movie form, title and plot only
Python
mit
samcheck/PyMedia,samcheck/PyMedia,samcheck/PyMedia
Edit movie form, title and plot only
from flask_wtf import FlaskForm as Form from wtforms import StringField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length class EditMovieForm(Form): title = StringField('title', validators=[DataRequired()]) plot = TextAreaField('plot', validators=[Length(min=0, max=2047)])
<commit_before><commit_msg>Edit movie form, title and plot only<commit_after>
from flask_wtf import FlaskForm as Form from wtforms import StringField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length class EditMovieForm(Form): title = StringField('title', validators=[DataRequired()]) plot = TextAreaField('plot', validators=[Length(min=0, max=2047)])
Edit movie form, title and plot onlyfrom flask_wtf import FlaskForm as Form from wtforms import StringField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length class EditMovieForm(Form): title = StringField('title', validators=[DataRequired()]) plot = TextAreaField('plot', validators=[Length(min=0, max=2047)])
<commit_before><commit_msg>Edit movie form, title and plot only<commit_after>from flask_wtf import FlaskForm as Form from wtforms import StringField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length class EditMovieForm(Form): title = StringField('title', validators=[DataRequired()]) plot = TextAreaField('plot', validators=[Length(min=0, max=2047)])
9dfd925f049df7ecbecdf848d1f0758f885909a3
deleteSnapshotSize8GWeekago.py
deleteSnapshotSize8GWeekago.py
import boto import datetime import dateutil from dateutil import parser from boto import ec2 connection=ec2.connect_to_region("ap-southeast-1") snapshotsID=connection.get_all_snapshots(filters={'owner-id':611762388050,'volume-size':8}) timeLimit=datetime.datetime.now() - datetime.timedelta(days=7) count=0 for sID in snapshotsID: if parser.parse(sID.start_time).date() < timeLimit.date(): if "Created by CreateImage" in sID.description: print "Do thing" else: print "Deleting Snapshot %s " %(sID.id) connection.delete_snapshot(sID.id)
Delete Snapshot having Size 8 GiB and a week older
Delete Snapshot having Size 8 GiB and a week older
Python
apache-2.0
hiteshBhatia/aws-boto-scripts
Delete Snapshot having Size 8 GiB and a week older
import boto import datetime import dateutil from dateutil import parser from boto import ec2 connection=ec2.connect_to_region("ap-southeast-1") snapshotsID=connection.get_all_snapshots(filters={'owner-id':611762388050,'volume-size':8}) timeLimit=datetime.datetime.now() - datetime.timedelta(days=7) count=0 for sID in snapshotsID: if parser.parse(sID.start_time).date() < timeLimit.date(): if "Created by CreateImage" in sID.description: print "Do thing" else: print "Deleting Snapshot %s " %(sID.id) connection.delete_snapshot(sID.id)
<commit_before><commit_msg>Delete Snapshot having Size 8 GiB and a week older<commit_after>
import boto import datetime import dateutil from dateutil import parser from boto import ec2 connection=ec2.connect_to_region("ap-southeast-1") snapshotsID=connection.get_all_snapshots(filters={'owner-id':611762388050,'volume-size':8}) timeLimit=datetime.datetime.now() - datetime.timedelta(days=7) count=0 for sID in snapshotsID: if parser.parse(sID.start_time).date() < timeLimit.date(): if "Created by CreateImage" in sID.description: print "Do thing" else: print "Deleting Snapshot %s " %(sID.id) connection.delete_snapshot(sID.id)
Delete Snapshot having Size 8 GiB and a week olderimport boto import datetime import dateutil from dateutil import parser from boto import ec2 connection=ec2.connect_to_region("ap-southeast-1") snapshotsID=connection.get_all_snapshots(filters={'owner-id':611762388050,'volume-size':8}) timeLimit=datetime.datetime.now() - datetime.timedelta(days=7) count=0 for sID in snapshotsID: if parser.parse(sID.start_time).date() < timeLimit.date(): if "Created by CreateImage" in sID.description: print "Do thing" else: print "Deleting Snapshot %s " %(sID.id) connection.delete_snapshot(sID.id)
<commit_before><commit_msg>Delete Snapshot having Size 8 GiB and a week older<commit_after>import boto import datetime import dateutil from dateutil import parser from boto import ec2 connection=ec2.connect_to_region("ap-southeast-1") snapshotsID=connection.get_all_snapshots(filters={'owner-id':611762388050,'volume-size':8}) timeLimit=datetime.datetime.now() - datetime.timedelta(days=7) count=0 for sID in snapshotsID: if parser.parse(sID.start_time).date() < timeLimit.date(): if "Created by CreateImage" in sID.description: print "Do thing" else: print "Deleting Snapshot %s " %(sID.id) connection.delete_snapshot(sID.id)
9e63413e040f0e10327651bb2d54edc2df438de5
salt/states/ssh_auth.py
salt/states/ssh_auth.py
''' Allows for state management of ssh authorized keys ''' def present( name, user, enc='ssh-rsa', comment='', options=[], config='.ssh/authorized_keys'): ''' Verifies that the specified ssh key is present for the specified user ''' ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''} data = __salt__['ssh.set_auth_key']( user, name, enc, comment, options, config) if data == 'replace': ret['changes'][name] = 'Updated' ret['comment'] = 'The authorized host key {0} for user {1} was updated'.format(name, user) return ret elif data == 'no change': ret['comment'] = 'The authorized host key {0} is already present for user {1}'.format(name, user) elif data == 'new': ret['changes'][name] = 'New' ret['comment'] = 'The authorized host key {0} for user {1} was added'.format(name, user) return ret
Add the ssh authorized key state, still needs absent state
Add the ssh authorized key state, still needs absent state
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
Add the ssh authorized key state, still needs absent state
''' Allows for state management of ssh authorized keys ''' def present( name, user, enc='ssh-rsa', comment='', options=[], config='.ssh/authorized_keys'): ''' Verifies that the specified ssh key is present for the specified user ''' ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''} data = __salt__['ssh.set_auth_key']( user, name, enc, comment, options, config) if data == 'replace': ret['changes'][name] = 'Updated' ret['comment'] = 'The authorized host key {0} for user {1} was updated'.format(name, user) return ret elif data == 'no change': ret['comment'] = 'The authorized host key {0} is already present for user {1}'.format(name, user) elif data == 'new': ret['changes'][name] = 'New' ret['comment'] = 'The authorized host key {0} for user {1} was added'.format(name, user) return ret
<commit_before><commit_msg>Add the ssh authorized key state, still needs absent state<commit_after>
''' Allows for state management of ssh authorized keys ''' def present( name, user, enc='ssh-rsa', comment='', options=[], config='.ssh/authorized_keys'): ''' Verifies that the specified ssh key is present for the specified user ''' ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''} data = __salt__['ssh.set_auth_key']( user, name, enc, comment, options, config) if data == 'replace': ret['changes'][name] = 'Updated' ret['comment'] = 'The authorized host key {0} for user {1} was updated'.format(name, user) return ret elif data == 'no change': ret['comment'] = 'The authorized host key {0} is already present for user {1}'.format(name, user) elif data == 'new': ret['changes'][name] = 'New' ret['comment'] = 'The authorized host key {0} for user {1} was added'.format(name, user) return ret
Add the ssh authorized key state, still needs absent state''' Allows for state management of ssh authorized keys ''' def present( name, user, enc='ssh-rsa', comment='', options=[], config='.ssh/authorized_keys'): ''' Verifies that the specified ssh key is present for the specified user ''' ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''} data = __salt__['ssh.set_auth_key']( user, name, enc, comment, options, config) if data == 'replace': ret['changes'][name] = 'Updated' ret['comment'] = 'The authorized host key {0} for user {1} was updated'.format(name, user) return ret elif data == 'no change': ret['comment'] = 'The authorized host key {0} is already present for user {1}'.format(name, user) elif data == 'new': ret['changes'][name] = 'New' ret['comment'] = 'The authorized host key {0} for user {1} was added'.format(name, user) return ret
<commit_before><commit_msg>Add the ssh authorized key state, still needs absent state<commit_after>''' Allows for state management of ssh authorized keys ''' def present( name, user, enc='ssh-rsa', comment='', options=[], config='.ssh/authorized_keys'): ''' Verifies that the specified ssh key is present for the specified user ''' ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''} data = __salt__['ssh.set_auth_key']( user, name, enc, comment, options, config) if data == 'replace': ret['changes'][name] = 'Updated' ret['comment'] = 'The authorized host key {0} for user {1} was updated'.format(name, user) return ret elif data == 'no change': ret['comment'] = 'The authorized host key {0} is already present for user {1}'.format(name, user) elif data == 'new': ret['changes'][name] = 'New' ret['comment'] = 'The authorized host key {0} for user {1} was added'.format(name, user) return ret
ba814b1f519d1a1ceb19e5fe88c6fe11737a07be
dci/alembic/versions/980e18983453_sync_database.py
dci/alembic/versions/980e18983453_sync_database.py
# # Copyright (C) 2022 Red Hat, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """sync database Revision ID: 980e18983453 Revises: 24963e101d13 Create Date: 2022-02-27 02:35:02.516736 """ # revision identifiers, used by Alembic. revision = "980e18983453" down_revision = "24963e101d13" branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column( "components", "etag", existing_type=sa.VARCHAR(length=40), nullable=False ) op.alter_column( "components", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=False ) def downgrade(): op.alter_column( "components", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=True ) op.alter_column( "components", "etag", existing_type=sa.VARCHAR(length=40), nullable=True )
Set etag and updated_at components fields not nullable
Set etag and updated_at components fields not nullable Today in production database updated_at and etag are nullable. Table "public.components" Column | Type | Modifiers ------------------------+-----------------------------+----------- updated_at | timestamp without time zone | etag | character varying(40) | But in the code they are not. In this patch we alter the column to set those 2 fields nullable to False. Change-Id: I2dc4d184d02c8adde1684df1b62c8269a30fb0b1
Python
apache-2.0
redhat-cip/dci-control-server,redhat-cip/dci-control-server
Set etag and updated_at components fields not nullable Today in production database updated_at and etag are nullable. Table "public.components" Column | Type | Modifiers ------------------------+-----------------------------+----------- updated_at | timestamp without time zone | etag | character varying(40) | But in the code they are not. In this patch we alter the column to set those 2 fields nullable to False. Change-Id: I2dc4d184d02c8adde1684df1b62c8269a30fb0b1
# # Copyright (C) 2022 Red Hat, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """sync database Revision ID: 980e18983453 Revises: 24963e101d13 Create Date: 2022-02-27 02:35:02.516736 """ # revision identifiers, used by Alembic. revision = "980e18983453" down_revision = "24963e101d13" branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column( "components", "etag", existing_type=sa.VARCHAR(length=40), nullable=False ) op.alter_column( "components", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=False ) def downgrade(): op.alter_column( "components", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=True ) op.alter_column( "components", "etag", existing_type=sa.VARCHAR(length=40), nullable=True )
<commit_before><commit_msg>Set etag and updated_at components fields not nullable Today in production database updated_at and etag are nullable. Table "public.components" Column | Type | Modifiers ------------------------+-----------------------------+----------- updated_at | timestamp without time zone | etag | character varying(40) | But in the code they are not. In this patch we alter the column to set those 2 fields nullable to False. Change-Id: I2dc4d184d02c8adde1684df1b62c8269a30fb0b1<commit_after>
# # Copyright (C) 2022 Red Hat, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """sync database Revision ID: 980e18983453 Revises: 24963e101d13 Create Date: 2022-02-27 02:35:02.516736 """ # revision identifiers, used by Alembic. revision = "980e18983453" down_revision = "24963e101d13" branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column( "components", "etag", existing_type=sa.VARCHAR(length=40), nullable=False ) op.alter_column( "components", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=False ) def downgrade(): op.alter_column( "components", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=True ) op.alter_column( "components", "etag", existing_type=sa.VARCHAR(length=40), nullable=True )
Set etag and updated_at components fields not nullable Today in production database updated_at and etag are nullable. Table "public.components" Column | Type | Modifiers ------------------------+-----------------------------+----------- updated_at | timestamp without time zone | etag | character varying(40) | But in the code they are not. In this patch we alter the column to set those 2 fields nullable to False. Change-Id: I2dc4d184d02c8adde1684df1b62c8269a30fb0b1# # Copyright (C) 2022 Red Hat, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """sync database Revision ID: 980e18983453 Revises: 24963e101d13 Create Date: 2022-02-27 02:35:02.516736 """ # revision identifiers, used by Alembic. revision = "980e18983453" down_revision = "24963e101d13" branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column( "components", "etag", existing_type=sa.VARCHAR(length=40), nullable=False ) op.alter_column( "components", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=False ) def downgrade(): op.alter_column( "components", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=True ) op.alter_column( "components", "etag", existing_type=sa.VARCHAR(length=40), nullable=True )
<commit_before><commit_msg>Set etag and updated_at components fields not nullable Today in production database updated_at and etag are nullable. Table "public.components" Column | Type | Modifiers ------------------------+-----------------------------+----------- updated_at | timestamp without time zone | etag | character varying(40) | But in the code they are not. In this patch we alter the column to set those 2 fields nullable to False. Change-Id: I2dc4d184d02c8adde1684df1b62c8269a30fb0b1<commit_after># # Copyright (C) 2022 Red Hat, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """sync database Revision ID: 980e18983453 Revises: 24963e101d13 Create Date: 2022-02-27 02:35:02.516736 """ # revision identifiers, used by Alembic. revision = "980e18983453" down_revision = "24963e101d13" branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column( "components", "etag", existing_type=sa.VARCHAR(length=40), nullable=False ) op.alter_column( "components", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=False ) def downgrade(): op.alter_column( "components", "updated_at", existing_type=postgresql.TIMESTAMP(), nullable=True ) op.alter_column( "components", "etag", existing_type=sa.VARCHAR(length=40), nullable=True )
76edf4ea679ca3c117c7cff21e05f18044b5f51a
education/management/commands/schedule_all_scripts.py
education/management/commands/schedule_all_scripts.py
from django.core.management.base import BaseCommand from education.scheduling import schedule_script from script.models import Script class Command(BaseCommand): def handle(self, **options): for script in Script.objects.all(): schedule_script(script) self.stdout.write('Done!\n')
Add a command for scheduling all scripts.
Add a command for scheduling all scripts.
Python
bsd-3-clause
unicefuganda/edtrac,unicefuganda/edtrac,unicefuganda/edtrac
Add a command for scheduling all scripts.
from django.core.management.base import BaseCommand from education.scheduling import schedule_script from script.models import Script class Command(BaseCommand): def handle(self, **options): for script in Script.objects.all(): schedule_script(script) self.stdout.write('Done!\n')
<commit_before><commit_msg>Add a command for scheduling all scripts.<commit_after>
from django.core.management.base import BaseCommand from education.scheduling import schedule_script from script.models import Script class Command(BaseCommand): def handle(self, **options): for script in Script.objects.all(): schedule_script(script) self.stdout.write('Done!\n')
Add a command for scheduling all scripts.from django.core.management.base import BaseCommand from education.scheduling import schedule_script from script.models import Script class Command(BaseCommand): def handle(self, **options): for script in Script.objects.all(): schedule_script(script) self.stdout.write('Done!\n')
<commit_before><commit_msg>Add a command for scheduling all scripts.<commit_after>from django.core.management.base import BaseCommand from education.scheduling import schedule_script from script.models import Script class Command(BaseCommand): def handle(self, **options): for script in Script.objects.all(): schedule_script(script) self.stdout.write('Done!\n')
806f64880b7eb9ff72026f4ee2ebd12cf1ef723d
infcommon/generic_factory_test.py
infcommon/generic_factory_test.py
# -*- coding: utf-8 -*- import sys import types import importlib import traceback import os import fnmatch import re from datetime import datetime TOTALS_TESTS_PASSED = 0 LAST_CALL = None GREEN_COLOR = "\033[0;32m" WHITE_COLOR = "\033[0;39m" RED_COLOR = "\033[91m" def find_and_call_functions_from(): global TOTALS_TESTS_PASSED global LAST_CALL factories = [] current_working_directory = os.getcwd() for root, _, filenames in os.walk(current_working_directory): for filename in fnmatch.filter(filenames, '*factory.py'): factory_relative_path = ".{}".format(os.path.join(root, filename).replace(current_working_directory, '')) if 'src' not in factory_relative_path and 'build' not in factory_relative_path: factories.append(factory_relative_path) initial_time = datetime.utcnow() for factory_file in factories: file_to_import = re.sub('\./.+?/', '', factory_file).replace('/', '.').replace('.py', '') #https://stackoverflow.com/questions/4821104/python-dynamic-instantiation-from-string-name-of-a-class-in-dynamically-imported a_factory = importlib.import_module(file_to_import) for element_name in dir(a_factory): element = getattr(a_factory, element_name) if callable(element): if isinstance(element, types.FunctionType) and not element_name.startswith('__'): LAST_CALL = "===> Exception in Factory file: {} Testing to call: {}".format(factory_file, element_name) element() TOTALS_TESTS_PASSED += 1 sys.stdout.write(GREEN_COLOR) sys.stdout.write(".") sys.stdout.write(WHITE_COLOR) elapsed_time = datetime.utcnow() - initial_time print print GREEN_COLOR print "{} examples ran in {:.4f} seconds{}".format(TOTALS_TESTS_PASSED, elapsed_time.total_seconds(), WHITE_COLOR) if __name__ == "__main__": try: find_and_call_functions_from() sys.exit(0) except Exception as exc: print print RED_COLOR print "{} -> {}".format(LAST_CALL, exc) print traceback.print_exc() print WHITE_COLOR print sys.exit(1)
Add generic factory test runner
Add generic factory test runner
Python
mit
aleasoluciones/infcommon,aleasoluciones/infcommon
Add generic factory test runner
# -*- coding: utf-8 -*- import sys import types import importlib import traceback import os import fnmatch import re from datetime import datetime TOTALS_TESTS_PASSED = 0 LAST_CALL = None GREEN_COLOR = "\033[0;32m" WHITE_COLOR = "\033[0;39m" RED_COLOR = "\033[91m" def find_and_call_functions_from(): global TOTALS_TESTS_PASSED global LAST_CALL factories = [] current_working_directory = os.getcwd() for root, _, filenames in os.walk(current_working_directory): for filename in fnmatch.filter(filenames, '*factory.py'): factory_relative_path = ".{}".format(os.path.join(root, filename).replace(current_working_directory, '')) if 'src' not in factory_relative_path and 'build' not in factory_relative_path: factories.append(factory_relative_path) initial_time = datetime.utcnow() for factory_file in factories: file_to_import = re.sub('\./.+?/', '', factory_file).replace('/', '.').replace('.py', '') #https://stackoverflow.com/questions/4821104/python-dynamic-instantiation-from-string-name-of-a-class-in-dynamically-imported a_factory = importlib.import_module(file_to_import) for element_name in dir(a_factory): element = getattr(a_factory, element_name) if callable(element): if isinstance(element, types.FunctionType) and not element_name.startswith('__'): LAST_CALL = "===> Exception in Factory file: {} Testing to call: {}".format(factory_file, element_name) element() TOTALS_TESTS_PASSED += 1 sys.stdout.write(GREEN_COLOR) sys.stdout.write(".") sys.stdout.write(WHITE_COLOR) elapsed_time = datetime.utcnow() - initial_time print print GREEN_COLOR print "{} examples ran in {:.4f} seconds{}".format(TOTALS_TESTS_PASSED, elapsed_time.total_seconds(), WHITE_COLOR) if __name__ == "__main__": try: find_and_call_functions_from() sys.exit(0) except Exception as exc: print print RED_COLOR print "{} -> {}".format(LAST_CALL, exc) print traceback.print_exc() print WHITE_COLOR print sys.exit(1)
<commit_before><commit_msg>Add generic factory test runner<commit_after>
# -*- coding: utf-8 -*- import sys import types import importlib import traceback import os import fnmatch import re from datetime import datetime TOTALS_TESTS_PASSED = 0 LAST_CALL = None GREEN_COLOR = "\033[0;32m" WHITE_COLOR = "\033[0;39m" RED_COLOR = "\033[91m" def find_and_call_functions_from(): global TOTALS_TESTS_PASSED global LAST_CALL factories = [] current_working_directory = os.getcwd() for root, _, filenames in os.walk(current_working_directory): for filename in fnmatch.filter(filenames, '*factory.py'): factory_relative_path = ".{}".format(os.path.join(root, filename).replace(current_working_directory, '')) if 'src' not in factory_relative_path and 'build' not in factory_relative_path: factories.append(factory_relative_path) initial_time = datetime.utcnow() for factory_file in factories: file_to_import = re.sub('\./.+?/', '', factory_file).replace('/', '.').replace('.py', '') #https://stackoverflow.com/questions/4821104/python-dynamic-instantiation-from-string-name-of-a-class-in-dynamically-imported a_factory = importlib.import_module(file_to_import) for element_name in dir(a_factory): element = getattr(a_factory, element_name) if callable(element): if isinstance(element, types.FunctionType) and not element_name.startswith('__'): LAST_CALL = "===> Exception in Factory file: {} Testing to call: {}".format(factory_file, element_name) element() TOTALS_TESTS_PASSED += 1 sys.stdout.write(GREEN_COLOR) sys.stdout.write(".") sys.stdout.write(WHITE_COLOR) elapsed_time = datetime.utcnow() - initial_time print print GREEN_COLOR print "{} examples ran in {:.4f} seconds{}".format(TOTALS_TESTS_PASSED, elapsed_time.total_seconds(), WHITE_COLOR) if __name__ == "__main__": try: find_and_call_functions_from() sys.exit(0) except Exception as exc: print print RED_COLOR print "{} -> {}".format(LAST_CALL, exc) print traceback.print_exc() print WHITE_COLOR print sys.exit(1)
Add generic factory test runner# -*- coding: utf-8 -*- import sys import types import importlib import traceback import os import fnmatch import re from datetime import datetime TOTALS_TESTS_PASSED = 0 LAST_CALL = None GREEN_COLOR = "\033[0;32m" WHITE_COLOR = "\033[0;39m" RED_COLOR = "\033[91m" def find_and_call_functions_from(): global TOTALS_TESTS_PASSED global LAST_CALL factories = [] current_working_directory = os.getcwd() for root, _, filenames in os.walk(current_working_directory): for filename in fnmatch.filter(filenames, '*factory.py'): factory_relative_path = ".{}".format(os.path.join(root, filename).replace(current_working_directory, '')) if 'src' not in factory_relative_path and 'build' not in factory_relative_path: factories.append(factory_relative_path) initial_time = datetime.utcnow() for factory_file in factories: file_to_import = re.sub('\./.+?/', '', factory_file).replace('/', '.').replace('.py', '') #https://stackoverflow.com/questions/4821104/python-dynamic-instantiation-from-string-name-of-a-class-in-dynamically-imported a_factory = importlib.import_module(file_to_import) for element_name in dir(a_factory): element = getattr(a_factory, element_name) if callable(element): if isinstance(element, types.FunctionType) and not element_name.startswith('__'): LAST_CALL = "===> Exception in Factory file: {} Testing to call: {}".format(factory_file, element_name) element() TOTALS_TESTS_PASSED += 1 sys.stdout.write(GREEN_COLOR) sys.stdout.write(".") sys.stdout.write(WHITE_COLOR) elapsed_time = datetime.utcnow() - initial_time print print GREEN_COLOR print "{} examples ran in {:.4f} seconds{}".format(TOTALS_TESTS_PASSED, elapsed_time.total_seconds(), WHITE_COLOR) if __name__ == "__main__": try: find_and_call_functions_from() sys.exit(0) except Exception as exc: print print RED_COLOR print "{} -> {}".format(LAST_CALL, exc) print traceback.print_exc() print WHITE_COLOR print sys.exit(1)
<commit_before><commit_msg>Add generic factory test runner<commit_after># -*- coding: utf-8 -*- import sys import types import importlib import traceback import os import fnmatch import re from datetime import datetime TOTALS_TESTS_PASSED = 0 LAST_CALL = None GREEN_COLOR = "\033[0;32m" WHITE_COLOR = "\033[0;39m" RED_COLOR = "\033[91m" def find_and_call_functions_from(): global TOTALS_TESTS_PASSED global LAST_CALL factories = [] current_working_directory = os.getcwd() for root, _, filenames in os.walk(current_working_directory): for filename in fnmatch.filter(filenames, '*factory.py'): factory_relative_path = ".{}".format(os.path.join(root, filename).replace(current_working_directory, '')) if 'src' not in factory_relative_path and 'build' not in factory_relative_path: factories.append(factory_relative_path) initial_time = datetime.utcnow() for factory_file in factories: file_to_import = re.sub('\./.+?/', '', factory_file).replace('/', '.').replace('.py', '') #https://stackoverflow.com/questions/4821104/python-dynamic-instantiation-from-string-name-of-a-class-in-dynamically-imported a_factory = importlib.import_module(file_to_import) for element_name in dir(a_factory): element = getattr(a_factory, element_name) if callable(element): if isinstance(element, types.FunctionType) and not element_name.startswith('__'): LAST_CALL = "===> Exception in Factory file: {} Testing to call: {}".format(factory_file, element_name) element() TOTALS_TESTS_PASSED += 1 sys.stdout.write(GREEN_COLOR) sys.stdout.write(".") sys.stdout.write(WHITE_COLOR) elapsed_time = datetime.utcnow() - initial_time print print GREEN_COLOR print "{} examples ran in {:.4f} seconds{}".format(TOTALS_TESTS_PASSED, elapsed_time.total_seconds(), WHITE_COLOR) if __name__ == "__main__": try: find_and_call_functions_from() sys.exit(0) except Exception as exc: print print RED_COLOR print "{} -> {}".format(LAST_CALL, exc) print traceback.print_exc() print WHITE_COLOR print sys.exit(1)
517e8f16dbc24af3371a287e69c4d1361c1744f6
python_scripts/azure_sense.py
python_scripts/azure_sense.py
#!/usr/bin/env python """ sends temperature, humidity and pressure gathered from Sense Hat on Raspberry Pi2 to Azure Table Storage only python works with Azure , not python3, sudo pip install azure-storage invoke (no sudo required): python azure_sense.py """ import time from sense_hat import SenseHat from datetime import datetime from azure.storage.table import TableService __author__ = "Anatoly Mironov @mirontoli" sense = SenseHat() table_service = TableService(account_name='tolle', account_key='ho2zakf/8rmDckS3pGOTPWwIwCzNwVJxd5hDb3R15wms2fZJG/aX53PDsTWBYsuTPwF7802IKk2QcrJ5FO7i6w==') table_name = 'climateData' table_service.create_table(table_name, False) while True: date = datetime.now() iso_date = date.isoformat() temp = "{0:.2f}".format(sense.temp) humidity = "{0:.2f}".format(sense.humidity) pressure = "{0:.2f}".format(sense.pressure) entry = {'PartitionKey': 'climate', 'RowKey': iso_date, 'Temperature': temp, 'Humidity':humidity, 'Pressure':pressure} table_service.insert_entity(table_name, entry) time.sleep(60) # wait one minute
Add script for sending sense info to azure table storage
Add script for sending sense info to azure table storage
Python
mit
mirontoli/tolle-rasp,mirontoli/tolle-rasp,mirontoli/tolle-rasp,mirontoli/tolle-rasp,mirontoli/tolle-rasp
Add script for sending sense info to azure table storage
#!/usr/bin/env python """ sends temperature, humidity and pressure gathered from Sense Hat on Raspberry Pi2 to Azure Table Storage only python works with Azure , not python3, sudo pip install azure-storage invoke (no sudo required): python azure_sense.py """ import time from sense_hat import SenseHat from datetime import datetime from azure.storage.table import TableService __author__ = "Anatoly Mironov @mirontoli" sense = SenseHat() table_service = TableService(account_name='tolle', account_key='ho2zakf/8rmDckS3pGOTPWwIwCzNwVJxd5hDb3R15wms2fZJG/aX53PDsTWBYsuTPwF7802IKk2QcrJ5FO7i6w==') table_name = 'climateData' table_service.create_table(table_name, False) while True: date = datetime.now() iso_date = date.isoformat() temp = "{0:.2f}".format(sense.temp) humidity = "{0:.2f}".format(sense.humidity) pressure = "{0:.2f}".format(sense.pressure) entry = {'PartitionKey': 'climate', 'RowKey': iso_date, 'Temperature': temp, 'Humidity':humidity, 'Pressure':pressure} table_service.insert_entity(table_name, entry) time.sleep(60) # wait one minute
<commit_before><commit_msg>Add script for sending sense info to azure table storage<commit_after>
#!/usr/bin/env python """ sends temperature, humidity and pressure gathered from Sense Hat on Raspberry Pi2 to Azure Table Storage only python works with Azure , not python3, sudo pip install azure-storage invoke (no sudo required): python azure_sense.py """ import time from sense_hat import SenseHat from datetime import datetime from azure.storage.table import TableService __author__ = "Anatoly Mironov @mirontoli" sense = SenseHat() table_service = TableService(account_name='tolle', account_key='ho2zakf/8rmDckS3pGOTPWwIwCzNwVJxd5hDb3R15wms2fZJG/aX53PDsTWBYsuTPwF7802IKk2QcrJ5FO7i6w==') table_name = 'climateData' table_service.create_table(table_name, False) while True: date = datetime.now() iso_date = date.isoformat() temp = "{0:.2f}".format(sense.temp) humidity = "{0:.2f}".format(sense.humidity) pressure = "{0:.2f}".format(sense.pressure) entry = {'PartitionKey': 'climate', 'RowKey': iso_date, 'Temperature': temp, 'Humidity':humidity, 'Pressure':pressure} table_service.insert_entity(table_name, entry) time.sleep(60) # wait one minute
Add script for sending sense info to azure table storage#!/usr/bin/env python """ sends temperature, humidity and pressure gathered from Sense Hat on Raspberry Pi2 to Azure Table Storage only python works with Azure , not python3, sudo pip install azure-storage invoke (no sudo required): python azure_sense.py """ import time from sense_hat import SenseHat from datetime import datetime from azure.storage.table import TableService __author__ = "Anatoly Mironov @mirontoli" sense = SenseHat() table_service = TableService(account_name='tolle', account_key='ho2zakf/8rmDckS3pGOTPWwIwCzNwVJxd5hDb3R15wms2fZJG/aX53PDsTWBYsuTPwF7802IKk2QcrJ5FO7i6w==') table_name = 'climateData' table_service.create_table(table_name, False) while True: date = datetime.now() iso_date = date.isoformat() temp = "{0:.2f}".format(sense.temp) humidity = "{0:.2f}".format(sense.humidity) pressure = "{0:.2f}".format(sense.pressure) entry = {'PartitionKey': 'climate', 'RowKey': iso_date, 'Temperature': temp, 'Humidity':humidity, 'Pressure':pressure} table_service.insert_entity(table_name, entry) time.sleep(60) # wait one minute
<commit_before><commit_msg>Add script for sending sense info to azure table storage<commit_after>#!/usr/bin/env python """ sends temperature, humidity and pressure gathered from Sense Hat on Raspberry Pi2 to Azure Table Storage only python works with Azure , not python3, sudo pip install azure-storage invoke (no sudo required): python azure_sense.py """ import time from sense_hat import SenseHat from datetime import datetime from azure.storage.table import TableService __author__ = "Anatoly Mironov @mirontoli" sense = SenseHat() table_service = TableService(account_name='tolle', account_key='ho2zakf/8rmDckS3pGOTPWwIwCzNwVJxd5hDb3R15wms2fZJG/aX53PDsTWBYsuTPwF7802IKk2QcrJ5FO7i6w==') table_name = 'climateData' table_service.create_table(table_name, False) while True: date = datetime.now() iso_date = date.isoformat() temp = "{0:.2f}".format(sense.temp) humidity = "{0:.2f}".format(sense.humidity) pressure = "{0:.2f}".format(sense.pressure) entry = {'PartitionKey': 'climate', 'RowKey': iso_date, 'Temperature': temp, 'Humidity':humidity, 'Pressure':pressure} table_service.insert_entity(table_name, entry) time.sleep(60) # wait one minute
40b8b6baaab21aa294eea0ffe07a6bfbd85d1a5c
create_black_square_picture.py
create_black_square_picture.py
# Create plain black square picture from PIL import Image img = Image.new('RGB', (225, 225), color='black') img.save('temp/black_square.png')
Create plain black square picture
Create plain black square picture
Python
mit
foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard,foobar167/junkyard
Create plain black square picture
# Create plain black square picture from PIL import Image img = Image.new('RGB', (225, 225), color='black') img.save('temp/black_square.png')
<commit_before><commit_msg>Create plain black square picture<commit_after>
# Create plain black square picture from PIL import Image img = Image.new('RGB', (225, 225), color='black') img.save('temp/black_square.png')
Create plain black square picture# Create plain black square picture from PIL import Image img = Image.new('RGB', (225, 225), color='black') img.save('temp/black_square.png')
<commit_before><commit_msg>Create plain black square picture<commit_after># Create plain black square picture from PIL import Image img = Image.new('RGB', (225, 225), color='black') img.save('temp/black_square.png')
29f8849f74b10490561177668ef30dd267445ffb
test/test_speed.py
test/test_speed.py
from nose.tools import assert_greater, assert_less import collections import time import cProfile import pstats import pandas as pd import numpy as np from numpy.random import normal from phip import hit_calling def test_speed(profile=False): starts = collections.OrderedDict() timings = collections.OrderedDict() profilers = collections.OrderedDict() def start(name): starts[name] = time.time() if profile: profilers[name] = cProfile.Profile() profilers[name].enable() def end(name): timings[name] = time.time() - starts[name] if profile: profilers[name].disable() num_clones = 100000 num_beads_only = 8 num_pull_down = 200 num_hits = 1000 data_df = pd.DataFrame(index=[ "clone_%d" % (i + 1) for i in range(num_clones) ]) means = np.random.normal(0, 10, num_clones)**2 for i in range(num_beads_only): data_df["beads_only_%d" % (i + 1)] = np.random.poisson(means) for i in range(num_pull_down): data_df["pull_down_%d" % (i + 1)] = np.random.poisson(means) beads_only_samples = [c for c in data_df if c.startswith("beads")] pull_down_samples = [c for c in data_df if c.startswith("pull_down")] # Add some hits hit_pairs = set() # set of (sample, clone) while len(hit_pairs) < num_hits: sample = np.random.choice(pull_down_samples) clone = np.random.choice(data_df.index) data_df.loc[clone, sample] = ( data_df.loc[clone, sample]**2 + 100) hit_pairs.add((sample, clone)) # Also to test that normalization works, make one beads-only sample way # bigger than th eother columns. data_df["beads_only_1"] *= 1e6 start("hit_calling") hit_calling.do_hit_calling(data_df, beads_only_samples) end("hit_calling") print("SPEED BENCHMARK") print("Results:\n%s" % str(pd.Series(timings))) return dict( (key, pstats.Stats(value)) for (key, value) in profilers.items()) if __name__ == '__main__': # If run directly from python, do profiling and leave the user in a shell # to explore results. result = test_speed(profile=True) for (name, stats) in result.items(): print("**** %s ****" % name) stats.sort_stats("cumtime").reverse_order().print_stats() print("") # Leave in ipython # locals().update(result) # import ipdb # pylint: disable=import-error # ipdb.set_trace()
Add speed test for hit calling
Add speed test for hit calling
Python
apache-2.0
laserson/phip-stat,lasersonlab/phip-stat
Add speed test for hit calling
from nose.tools import assert_greater, assert_less import collections import time import cProfile import pstats import pandas as pd import numpy as np from numpy.random import normal from phip import hit_calling def test_speed(profile=False): starts = collections.OrderedDict() timings = collections.OrderedDict() profilers = collections.OrderedDict() def start(name): starts[name] = time.time() if profile: profilers[name] = cProfile.Profile() profilers[name].enable() def end(name): timings[name] = time.time() - starts[name] if profile: profilers[name].disable() num_clones = 100000 num_beads_only = 8 num_pull_down = 200 num_hits = 1000 data_df = pd.DataFrame(index=[ "clone_%d" % (i + 1) for i in range(num_clones) ]) means = np.random.normal(0, 10, num_clones)**2 for i in range(num_beads_only): data_df["beads_only_%d" % (i + 1)] = np.random.poisson(means) for i in range(num_pull_down): data_df["pull_down_%d" % (i + 1)] = np.random.poisson(means) beads_only_samples = [c for c in data_df if c.startswith("beads")] pull_down_samples = [c for c in data_df if c.startswith("pull_down")] # Add some hits hit_pairs = set() # set of (sample, clone) while len(hit_pairs) < num_hits: sample = np.random.choice(pull_down_samples) clone = np.random.choice(data_df.index) data_df.loc[clone, sample] = ( data_df.loc[clone, sample]**2 + 100) hit_pairs.add((sample, clone)) # Also to test that normalization works, make one beads-only sample way # bigger than th eother columns. data_df["beads_only_1"] *= 1e6 start("hit_calling") hit_calling.do_hit_calling(data_df, beads_only_samples) end("hit_calling") print("SPEED BENCHMARK") print("Results:\n%s" % str(pd.Series(timings))) return dict( (key, pstats.Stats(value)) for (key, value) in profilers.items()) if __name__ == '__main__': # If run directly from python, do profiling and leave the user in a shell # to explore results. result = test_speed(profile=True) for (name, stats) in result.items(): print("**** %s ****" % name) stats.sort_stats("cumtime").reverse_order().print_stats() print("") # Leave in ipython # locals().update(result) # import ipdb # pylint: disable=import-error # ipdb.set_trace()
<commit_before><commit_msg>Add speed test for hit calling<commit_after>
from nose.tools import assert_greater, assert_less import collections import time import cProfile import pstats import pandas as pd import numpy as np from numpy.random import normal from phip import hit_calling def test_speed(profile=False): starts = collections.OrderedDict() timings = collections.OrderedDict() profilers = collections.OrderedDict() def start(name): starts[name] = time.time() if profile: profilers[name] = cProfile.Profile() profilers[name].enable() def end(name): timings[name] = time.time() - starts[name] if profile: profilers[name].disable() num_clones = 100000 num_beads_only = 8 num_pull_down = 200 num_hits = 1000 data_df = pd.DataFrame(index=[ "clone_%d" % (i + 1) for i in range(num_clones) ]) means = np.random.normal(0, 10, num_clones)**2 for i in range(num_beads_only): data_df["beads_only_%d" % (i + 1)] = np.random.poisson(means) for i in range(num_pull_down): data_df["pull_down_%d" % (i + 1)] = np.random.poisson(means) beads_only_samples = [c for c in data_df if c.startswith("beads")] pull_down_samples = [c for c in data_df if c.startswith("pull_down")] # Add some hits hit_pairs = set() # set of (sample, clone) while len(hit_pairs) < num_hits: sample = np.random.choice(pull_down_samples) clone = np.random.choice(data_df.index) data_df.loc[clone, sample] = ( data_df.loc[clone, sample]**2 + 100) hit_pairs.add((sample, clone)) # Also to test that normalization works, make one beads-only sample way # bigger than th eother columns. data_df["beads_only_1"] *= 1e6 start("hit_calling") hit_calling.do_hit_calling(data_df, beads_only_samples) end("hit_calling") print("SPEED BENCHMARK") print("Results:\n%s" % str(pd.Series(timings))) return dict( (key, pstats.Stats(value)) for (key, value) in profilers.items()) if __name__ == '__main__': # If run directly from python, do profiling and leave the user in a shell # to explore results. result = test_speed(profile=True) for (name, stats) in result.items(): print("**** %s ****" % name) stats.sort_stats("cumtime").reverse_order().print_stats() print("") # Leave in ipython # locals().update(result) # import ipdb # pylint: disable=import-error # ipdb.set_trace()
Add speed test for hit callingfrom nose.tools import assert_greater, assert_less import collections import time import cProfile import pstats import pandas as pd import numpy as np from numpy.random import normal from phip import hit_calling def test_speed(profile=False): starts = collections.OrderedDict() timings = collections.OrderedDict() profilers = collections.OrderedDict() def start(name): starts[name] = time.time() if profile: profilers[name] = cProfile.Profile() profilers[name].enable() def end(name): timings[name] = time.time() - starts[name] if profile: profilers[name].disable() num_clones = 100000 num_beads_only = 8 num_pull_down = 200 num_hits = 1000 data_df = pd.DataFrame(index=[ "clone_%d" % (i + 1) for i in range(num_clones) ]) means = np.random.normal(0, 10, num_clones)**2 for i in range(num_beads_only): data_df["beads_only_%d" % (i + 1)] = np.random.poisson(means) for i in range(num_pull_down): data_df["pull_down_%d" % (i + 1)] = np.random.poisson(means) beads_only_samples = [c for c in data_df if c.startswith("beads")] pull_down_samples = [c for c in data_df if c.startswith("pull_down")] # Add some hits hit_pairs = set() # set of (sample, clone) while len(hit_pairs) < num_hits: sample = np.random.choice(pull_down_samples) clone = np.random.choice(data_df.index) data_df.loc[clone, sample] = ( data_df.loc[clone, sample]**2 + 100) hit_pairs.add((sample, clone)) # Also to test that normalization works, make one beads-only sample way # bigger than th eother columns. data_df["beads_only_1"] *= 1e6 start("hit_calling") hit_calling.do_hit_calling(data_df, beads_only_samples) end("hit_calling") print("SPEED BENCHMARK") print("Results:\n%s" % str(pd.Series(timings))) return dict( (key, pstats.Stats(value)) for (key, value) in profilers.items()) if __name__ == '__main__': # If run directly from python, do profiling and leave the user in a shell # to explore results. result = test_speed(profile=True) for (name, stats) in result.items(): print("**** %s ****" % name) stats.sort_stats("cumtime").reverse_order().print_stats() print("") # Leave in ipython # locals().update(result) # import ipdb # pylint: disable=import-error # ipdb.set_trace()
<commit_before><commit_msg>Add speed test for hit calling<commit_after>from nose.tools import assert_greater, assert_less import collections import time import cProfile import pstats import pandas as pd import numpy as np from numpy.random import normal from phip import hit_calling def test_speed(profile=False): starts = collections.OrderedDict() timings = collections.OrderedDict() profilers = collections.OrderedDict() def start(name): starts[name] = time.time() if profile: profilers[name] = cProfile.Profile() profilers[name].enable() def end(name): timings[name] = time.time() - starts[name] if profile: profilers[name].disable() num_clones = 100000 num_beads_only = 8 num_pull_down = 200 num_hits = 1000 data_df = pd.DataFrame(index=[ "clone_%d" % (i + 1) for i in range(num_clones) ]) means = np.random.normal(0, 10, num_clones)**2 for i in range(num_beads_only): data_df["beads_only_%d" % (i + 1)] = np.random.poisson(means) for i in range(num_pull_down): data_df["pull_down_%d" % (i + 1)] = np.random.poisson(means) beads_only_samples = [c for c in data_df if c.startswith("beads")] pull_down_samples = [c for c in data_df if c.startswith("pull_down")] # Add some hits hit_pairs = set() # set of (sample, clone) while len(hit_pairs) < num_hits: sample = np.random.choice(pull_down_samples) clone = np.random.choice(data_df.index) data_df.loc[clone, sample] = ( data_df.loc[clone, sample]**2 + 100) hit_pairs.add((sample, clone)) # Also to test that normalization works, make one beads-only sample way # bigger than th eother columns. data_df["beads_only_1"] *= 1e6 start("hit_calling") hit_calling.do_hit_calling(data_df, beads_only_samples) end("hit_calling") print("SPEED BENCHMARK") print("Results:\n%s" % str(pd.Series(timings))) return dict( (key, pstats.Stats(value)) for (key, value) in profilers.items()) if __name__ == '__main__': # If run directly from python, do profiling and leave the user in a shell # to explore results. result = test_speed(profile=True) for (name, stats) in result.items(): print("**** %s ****" % name) stats.sort_stats("cumtime").reverse_order().print_stats() print("") # Leave in ipython # locals().update(result) # import ipdb # pylint: disable=import-error # ipdb.set_trace()
2202662b311ae8e1e21b9f5816debc40fefa85a1
bulb/migrations/0027_readathon_team.py
bulb/migrations/0027_readathon_team.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models def add_teams(apps, schema_editor): Team = apps.get_model('clubs', 'Team') Club = apps.get_model('clubs', 'Club') StudentClubYear = apps.get_model('core', 'StudentClubYear') year_2016_2017 = StudentClubYear.objects.get(start_date__year=2016, end_date__year=2017) bulb_riyadh_female = Club.objects.get(english_name="Bulb", year=year_2016_2017, city="R", gender='F') Team.objects.create(name="فريق الريديثون", code_name="readathon", year=year_2016_2017, club=bulb_riyadh_female, city="", gender="") def remove_teams(apps, schema_editor): Team = apps.get_model('clubs', 'Team') StudentClubYear = apps.get_model('core', 'StudentClubYear') year_2016_2017 = StudentClubYear.objects.get(start_date__year=2016, end_date__year=2017) Team.objects.filter(code_name="readathon", year=year_2016_2017).delete() class Migration(migrations.Migration): dependencies = [ ('bulb', '0026_culturalproduct_debate_debatecomment'), ('clubs', '0047_team'), ] operations = [ migrations.RunPython( add_teams, reverse_code=remove_teams), ]
Add migrations to add readathon teams
Add migrations to add readathon teams
Python
agpl-3.0
enjaz/enjaz,enjaz/enjaz,osamak/student-portal,osamak/student-portal,osamak/student-portal,enjaz/enjaz,osamak/student-portal,enjaz/enjaz,enjaz/enjaz,osamak/student-portal
Add migrations to add readathon teams
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models def add_teams(apps, schema_editor): Team = apps.get_model('clubs', 'Team') Club = apps.get_model('clubs', 'Club') StudentClubYear = apps.get_model('core', 'StudentClubYear') year_2016_2017 = StudentClubYear.objects.get(start_date__year=2016, end_date__year=2017) bulb_riyadh_female = Club.objects.get(english_name="Bulb", year=year_2016_2017, city="R", gender='F') Team.objects.create(name="فريق الريديثون", code_name="readathon", year=year_2016_2017, club=bulb_riyadh_female, city="", gender="") def remove_teams(apps, schema_editor): Team = apps.get_model('clubs', 'Team') StudentClubYear = apps.get_model('core', 'StudentClubYear') year_2016_2017 = StudentClubYear.objects.get(start_date__year=2016, end_date__year=2017) Team.objects.filter(code_name="readathon", year=year_2016_2017).delete() class Migration(migrations.Migration): dependencies = [ ('bulb', '0026_culturalproduct_debate_debatecomment'), ('clubs', '0047_team'), ] operations = [ migrations.RunPython( add_teams, reverse_code=remove_teams), ]
<commit_before><commit_msg>Add migrations to add readathon teams<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models def add_teams(apps, schema_editor): Team = apps.get_model('clubs', 'Team') Club = apps.get_model('clubs', 'Club') StudentClubYear = apps.get_model('core', 'StudentClubYear') year_2016_2017 = StudentClubYear.objects.get(start_date__year=2016, end_date__year=2017) bulb_riyadh_female = Club.objects.get(english_name="Bulb", year=year_2016_2017, city="R", gender='F') Team.objects.create(name="فريق الريديثون", code_name="readathon", year=year_2016_2017, club=bulb_riyadh_female, city="", gender="") def remove_teams(apps, schema_editor): Team = apps.get_model('clubs', 'Team') StudentClubYear = apps.get_model('core', 'StudentClubYear') year_2016_2017 = StudentClubYear.objects.get(start_date__year=2016, end_date__year=2017) Team.objects.filter(code_name="readathon", year=year_2016_2017).delete() class Migration(migrations.Migration): dependencies = [ ('bulb', '0026_culturalproduct_debate_debatecomment'), ('clubs', '0047_team'), ] operations = [ migrations.RunPython( add_teams, reverse_code=remove_teams), ]
Add migrations to add readathon teams# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models def add_teams(apps, schema_editor): Team = apps.get_model('clubs', 'Team') Club = apps.get_model('clubs', 'Club') StudentClubYear = apps.get_model('core', 'StudentClubYear') year_2016_2017 = StudentClubYear.objects.get(start_date__year=2016, end_date__year=2017) bulb_riyadh_female = Club.objects.get(english_name="Bulb", year=year_2016_2017, city="R", gender='F') Team.objects.create(name="فريق الريديثون", code_name="readathon", year=year_2016_2017, club=bulb_riyadh_female, city="", gender="") def remove_teams(apps, schema_editor): Team = apps.get_model('clubs', 'Team') StudentClubYear = apps.get_model('core', 'StudentClubYear') year_2016_2017 = StudentClubYear.objects.get(start_date__year=2016, end_date__year=2017) Team.objects.filter(code_name="readathon", year=year_2016_2017).delete() class Migration(migrations.Migration): dependencies = [ ('bulb', '0026_culturalproduct_debate_debatecomment'), ('clubs', '0047_team'), ] operations = [ migrations.RunPython( add_teams, reverse_code=remove_teams), ]
<commit_before><commit_msg>Add migrations to add readathon teams<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models def add_teams(apps, schema_editor): Team = apps.get_model('clubs', 'Team') Club = apps.get_model('clubs', 'Club') StudentClubYear = apps.get_model('core', 'StudentClubYear') year_2016_2017 = StudentClubYear.objects.get(start_date__year=2016, end_date__year=2017) bulb_riyadh_female = Club.objects.get(english_name="Bulb", year=year_2016_2017, city="R", gender='F') Team.objects.create(name="فريق الريديثون", code_name="readathon", year=year_2016_2017, club=bulb_riyadh_female, city="", gender="") def remove_teams(apps, schema_editor): Team = apps.get_model('clubs', 'Team') StudentClubYear = apps.get_model('core', 'StudentClubYear') year_2016_2017 = StudentClubYear.objects.get(start_date__year=2016, end_date__year=2017) Team.objects.filter(code_name="readathon", year=year_2016_2017).delete() class Migration(migrations.Migration): dependencies = [ ('bulb', '0026_culturalproduct_debate_debatecomment'), ('clubs', '0047_team'), ] operations = [ migrations.RunPython( add_teams, reverse_code=remove_teams), ]
1378c45ba1d3f2f3291c71390798e70abb52b87a
migrations/versions/0144_add_notification_reply_to.py
migrations/versions/0144_add_notification_reply_to.py
""" Revision ID: 0144_add_notification_reply_to Revises: 0143_remove_reply_to Create Date: 2017-11-22 14:23:48.806781 """ from alembic import op import sqlalchemy as sa revision = '0144_add_notification_reply_to' down_revision = '0143_remove_reply_to' def upgrade(): op.add_column('notifications', sa.Column('reply_to_text', sa.String(), nullable=True)) def downgrade(): op.drop_column('notifications', 'reply_to_text')
Add a column to Notifications to store the reply_to_text. This is text value of the sender_id, depending on the channel this will be a SMS sender, email reply to address or a letter contact block. This is the first PR in a series to refactor how we send the "reply_to" the provider, eventually we can eliminate the notification_to_sms_sender and notification_to_email_to tables.
Add a column to Notifications to store the reply_to_text. This is text value of the sender_id, depending on the channel this will be a SMS sender, email reply to address or a letter contact block. This is the first PR in a series to refactor how we send the "reply_to" the provider, eventually we can eliminate the notification_to_sms_sender and notification_to_email_to tables.
Python
mit
alphagov/notifications-api,alphagov/notifications-api
Add a column to Notifications to store the reply_to_text. This is text value of the sender_id, depending on the channel this will be a SMS sender, email reply to address or a letter contact block. This is the first PR in a series to refactor how we send the "reply_to" the provider, eventually we can eliminate the notification_to_sms_sender and notification_to_email_to tables.
""" Revision ID: 0144_add_notification_reply_to Revises: 0143_remove_reply_to Create Date: 2017-11-22 14:23:48.806781 """ from alembic import op import sqlalchemy as sa revision = '0144_add_notification_reply_to' down_revision = '0143_remove_reply_to' def upgrade(): op.add_column('notifications', sa.Column('reply_to_text', sa.String(), nullable=True)) def downgrade(): op.drop_column('notifications', 'reply_to_text')
<commit_before><commit_msg>Add a column to Notifications to store the reply_to_text. This is text value of the sender_id, depending on the channel this will be a SMS sender, email reply to address or a letter contact block. This is the first PR in a series to refactor how we send the "reply_to" the provider, eventually we can eliminate the notification_to_sms_sender and notification_to_email_to tables.<commit_after>
""" Revision ID: 0144_add_notification_reply_to Revises: 0143_remove_reply_to Create Date: 2017-11-22 14:23:48.806781 """ from alembic import op import sqlalchemy as sa revision = '0144_add_notification_reply_to' down_revision = '0143_remove_reply_to' def upgrade(): op.add_column('notifications', sa.Column('reply_to_text', sa.String(), nullable=True)) def downgrade(): op.drop_column('notifications', 'reply_to_text')
Add a column to Notifications to store the reply_to_text. This is text value of the sender_id, depending on the channel this will be a SMS sender, email reply to address or a letter contact block. This is the first PR in a series to refactor how we send the "reply_to" the provider, eventually we can eliminate the notification_to_sms_sender and notification_to_email_to tables.""" Revision ID: 0144_add_notification_reply_to Revises: 0143_remove_reply_to Create Date: 2017-11-22 14:23:48.806781 """ from alembic import op import sqlalchemy as sa revision = '0144_add_notification_reply_to' down_revision = '0143_remove_reply_to' def upgrade(): op.add_column('notifications', sa.Column('reply_to_text', sa.String(), nullable=True)) def downgrade(): op.drop_column('notifications', 'reply_to_text')
<commit_before><commit_msg>Add a column to Notifications to store the reply_to_text. This is text value of the sender_id, depending on the channel this will be a SMS sender, email reply to address or a letter contact block. This is the first PR in a series to refactor how we send the "reply_to" the provider, eventually we can eliminate the notification_to_sms_sender and notification_to_email_to tables.<commit_after>""" Revision ID: 0144_add_notification_reply_to Revises: 0143_remove_reply_to Create Date: 2017-11-22 14:23:48.806781 """ from alembic import op import sqlalchemy as sa revision = '0144_add_notification_reply_to' down_revision = '0143_remove_reply_to' def upgrade(): op.add_column('notifications', sa.Column('reply_to_text', sa.String(), nullable=True)) def downgrade(): op.drop_column('notifications', 'reply_to_text')
1475a740f122f915127ed283ec25f0d48e2cc211
tests/integration/templatetags/test_currency_filters.py
tests/integration/templatetags/test_currency_filters.py
# -*- coding: utf-8 -*- from decimal import Decimal as D from django.utils import translation from django.test import TestCase from django import template def render(template_string, ctx): tpl = template.Template(template_string) return tpl.render(template.Context(ctx)) class TestCurrencyFilter(TestCase): def setUp(self): self.template = template.Template( "{% load currency_filters %}" "{{ price|currency }}" ) def test_renders_price_correctly(self): out = self.template.render(template.Context({ 'price': D('10.23'), })) self.assertTrue(u'£10.23' in out) def test_handles_none_price_gracefully(self): self.template.render(template.Context({ 'price': None })) def test_handles_string_price_gracefully(self): self.template.render(template.Context({ 'price': '' })) @translation.override(None, deactivate=True) def test_handles_no_translation(self): self.template.render(template.Context({ 'price': D('10.23'), }))
# -*- coding: utf-8 -*- from decimal import Decimal as D from django.utils import translation from django.test import TestCase from django import template def render(template_string, ctx): tpl = template.Template(template_string) return tpl.render(template.Context(ctx)) class TestCurrencyFilter(TestCase): def setUp(self): self.template = template.Template( "{% load currency_filters %}" "{{ price|currency }}" ) def test_renders_price_correctly(self): out = self.template.render(template.Context({ 'price': D('10.23'), })) self.assertTrue(u'£10.23' in out) def test_handles_none_price_gracefully(self): self.template.render(template.Context({ 'price': None })) def test_handles_string_price_gracefully(self): self.template.render(template.Context({ 'price': '' })) def test_handles_no_translation(self): with translation.override(None, deactivate=True): self.template.render(template.Context({ 'price': D('10.23'), }))
Use translation.override as a context manager instead of a decorator.
Use translation.override as a context manager instead of a decorator.
Python
bsd-3-clause
kapari/django-oscar,MatthewWilkes/django-oscar,WillisXChen/django-oscar,taedori81/django-oscar,solarissmoke/django-oscar,WillisXChen/django-oscar,WadeYuChen/django-oscar,MatthewWilkes/django-oscar,taedori81/django-oscar,django-oscar/django-oscar,anentropic/django-oscar,okfish/django-oscar,jlmadurga/django-oscar,saadatqadri/django-oscar,john-parton/django-oscar,solarissmoke/django-oscar,jlmadurga/django-oscar,Jannes123/django-oscar,itbabu/django-oscar,Bogh/django-oscar,anentropic/django-oscar,Jannes123/django-oscar,sonofatailor/django-oscar,sonofatailor/django-oscar,WillisXChen/django-oscar,sasha0/django-oscar,django-oscar/django-oscar,WadeYuChen/django-oscar,WillisXChen/django-oscar,sonofatailor/django-oscar,MatthewWilkes/django-oscar,dongguangming/django-oscar,okfish/django-oscar,kapari/django-oscar,jlmadurga/django-oscar,rocopartners/django-oscar,dongguangming/django-oscar,bschuon/django-oscar,ka7eh/django-oscar,Jannes123/django-oscar,Bogh/django-oscar,WadeYuChen/django-oscar,michaelkuty/django-oscar,spartonia/django-oscar,michaelkuty/django-oscar,michaelkuty/django-oscar,taedori81/django-oscar,sonofatailor/django-oscar,dongguangming/django-oscar,spartonia/django-oscar,django-oscar/django-oscar,ka7eh/django-oscar,dongguangming/django-oscar,Bogh/django-oscar,saadatqadri/django-oscar,okfish/django-oscar,saadatqadri/django-oscar,eddiep1101/django-oscar,anentropic/django-oscar,sasha0/django-oscar,john-parton/django-oscar,itbabu/django-oscar,eddiep1101/django-oscar,bschuon/django-oscar,faratro/django-oscar,faratro/django-oscar,ka7eh/django-oscar,bschuon/django-oscar,okfish/django-oscar,michaelkuty/django-oscar,itbabu/django-oscar,solarissmoke/django-oscar,kapari/django-oscar,WadeYuChen/django-oscar,WillisXChen/django-oscar,rocopartners/django-oscar,Bogh/django-oscar,faratro/django-oscar,itbabu/django-oscar,ka7eh/django-oscar,Jannes123/django-oscar,rocopartners/django-oscar,sasha0/django-oscar,bschuon/django-oscar,MatthewWilkes/django-oscar,faratro/django-oscar,anentropic/django-oscar,spartonia/django-oscar,kapari/django-oscar,jlmadurga/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar,john-parton/django-oscar,eddiep1101/django-oscar,eddiep1101/django-oscar,saadatqadri/django-oscar,rocopartners/django-oscar,spartonia/django-oscar,WillisXChen/django-oscar,django-oscar/django-oscar,taedori81/django-oscar,john-parton/django-oscar
# -*- coding: utf-8 -*- from decimal import Decimal as D from django.utils import translation from django.test import TestCase from django import template def render(template_string, ctx): tpl = template.Template(template_string) return tpl.render(template.Context(ctx)) class TestCurrencyFilter(TestCase): def setUp(self): self.template = template.Template( "{% load currency_filters %}" "{{ price|currency }}" ) def test_renders_price_correctly(self): out = self.template.render(template.Context({ 'price': D('10.23'), })) self.assertTrue(u'£10.23' in out) def test_handles_none_price_gracefully(self): self.template.render(template.Context({ 'price': None })) def test_handles_string_price_gracefully(self): self.template.render(template.Context({ 'price': '' })) @translation.override(None, deactivate=True) def test_handles_no_translation(self): self.template.render(template.Context({ 'price': D('10.23'), }))Use translation.override as a context manager instead of a decorator.
# -*- coding: utf-8 -*- from decimal import Decimal as D from django.utils import translation from django.test import TestCase from django import template def render(template_string, ctx): tpl = template.Template(template_string) return tpl.render(template.Context(ctx)) class TestCurrencyFilter(TestCase): def setUp(self): self.template = template.Template( "{% load currency_filters %}" "{{ price|currency }}" ) def test_renders_price_correctly(self): out = self.template.render(template.Context({ 'price': D('10.23'), })) self.assertTrue(u'£10.23' in out) def test_handles_none_price_gracefully(self): self.template.render(template.Context({ 'price': None })) def test_handles_string_price_gracefully(self): self.template.render(template.Context({ 'price': '' })) def test_handles_no_translation(self): with translation.override(None, deactivate=True): self.template.render(template.Context({ 'price': D('10.23'), }))
<commit_before># -*- coding: utf-8 -*- from decimal import Decimal as D from django.utils import translation from django.test import TestCase from django import template def render(template_string, ctx): tpl = template.Template(template_string) return tpl.render(template.Context(ctx)) class TestCurrencyFilter(TestCase): def setUp(self): self.template = template.Template( "{% load currency_filters %}" "{{ price|currency }}" ) def test_renders_price_correctly(self): out = self.template.render(template.Context({ 'price': D('10.23'), })) self.assertTrue(u'£10.23' in out) def test_handles_none_price_gracefully(self): self.template.render(template.Context({ 'price': None })) def test_handles_string_price_gracefully(self): self.template.render(template.Context({ 'price': '' })) @translation.override(None, deactivate=True) def test_handles_no_translation(self): self.template.render(template.Context({ 'price': D('10.23'), }))<commit_msg>Use translation.override as a context manager instead of a decorator.<commit_after>
# -*- coding: utf-8 -*- from decimal import Decimal as D from django.utils import translation from django.test import TestCase from django import template def render(template_string, ctx): tpl = template.Template(template_string) return tpl.render(template.Context(ctx)) class TestCurrencyFilter(TestCase): def setUp(self): self.template = template.Template( "{% load currency_filters %}" "{{ price|currency }}" ) def test_renders_price_correctly(self): out = self.template.render(template.Context({ 'price': D('10.23'), })) self.assertTrue(u'£10.23' in out) def test_handles_none_price_gracefully(self): self.template.render(template.Context({ 'price': None })) def test_handles_string_price_gracefully(self): self.template.render(template.Context({ 'price': '' })) def test_handles_no_translation(self): with translation.override(None, deactivate=True): self.template.render(template.Context({ 'price': D('10.23'), }))
# -*- coding: utf-8 -*- from decimal import Decimal as D from django.utils import translation from django.test import TestCase from django import template def render(template_string, ctx): tpl = template.Template(template_string) return tpl.render(template.Context(ctx)) class TestCurrencyFilter(TestCase): def setUp(self): self.template = template.Template( "{% load currency_filters %}" "{{ price|currency }}" ) def test_renders_price_correctly(self): out = self.template.render(template.Context({ 'price': D('10.23'), })) self.assertTrue(u'£10.23' in out) def test_handles_none_price_gracefully(self): self.template.render(template.Context({ 'price': None })) def test_handles_string_price_gracefully(self): self.template.render(template.Context({ 'price': '' })) @translation.override(None, deactivate=True) def test_handles_no_translation(self): self.template.render(template.Context({ 'price': D('10.23'), }))Use translation.override as a context manager instead of a decorator.# -*- coding: utf-8 -*- from decimal import Decimal as D from django.utils import translation from django.test import TestCase from django import template def render(template_string, ctx): tpl = template.Template(template_string) return tpl.render(template.Context(ctx)) class TestCurrencyFilter(TestCase): def setUp(self): self.template = template.Template( "{% load currency_filters %}" "{{ price|currency }}" ) def test_renders_price_correctly(self): out = self.template.render(template.Context({ 'price': D('10.23'), })) self.assertTrue(u'£10.23' in out) def test_handles_none_price_gracefully(self): self.template.render(template.Context({ 'price': None })) def test_handles_string_price_gracefully(self): self.template.render(template.Context({ 'price': '' })) def test_handles_no_translation(self): with translation.override(None, deactivate=True): self.template.render(template.Context({ 'price': D('10.23'), }))
<commit_before># -*- coding: utf-8 -*- from decimal import Decimal as D from django.utils import translation from django.test import TestCase from django import template def render(template_string, ctx): tpl = template.Template(template_string) return tpl.render(template.Context(ctx)) class TestCurrencyFilter(TestCase): def setUp(self): self.template = template.Template( "{% load currency_filters %}" "{{ price|currency }}" ) def test_renders_price_correctly(self): out = self.template.render(template.Context({ 'price': D('10.23'), })) self.assertTrue(u'£10.23' in out) def test_handles_none_price_gracefully(self): self.template.render(template.Context({ 'price': None })) def test_handles_string_price_gracefully(self): self.template.render(template.Context({ 'price': '' })) @translation.override(None, deactivate=True) def test_handles_no_translation(self): self.template.render(template.Context({ 'price': D('10.23'), }))<commit_msg>Use translation.override as a context manager instead of a decorator.<commit_after># -*- coding: utf-8 -*- from decimal import Decimal as D from django.utils import translation from django.test import TestCase from django import template def render(template_string, ctx): tpl = template.Template(template_string) return tpl.render(template.Context(ctx)) class TestCurrencyFilter(TestCase): def setUp(self): self.template = template.Template( "{% load currency_filters %}" "{{ price|currency }}" ) def test_renders_price_correctly(self): out = self.template.render(template.Context({ 'price': D('10.23'), })) self.assertTrue(u'£10.23' in out) def test_handles_none_price_gracefully(self): self.template.render(template.Context({ 'price': None })) def test_handles_string_price_gracefully(self): self.template.render(template.Context({ 'price': '' })) def test_handles_no_translation(self): with translation.override(None, deactivate=True): self.template.render(template.Context({ 'price': D('10.23'), }))
6ff3506b94789cde6772efa097253ceda4729db4
tests/spec/test_spec_schema.py
tests/spec/test_spec_schema.py
import pytest from aiohttp import hdrs from aiohttp_json_api.common import JSONAPI_CONTENT_TYPE @pytest.mark.parametrize( 'resource_type', ('authors', 'books', 'chapters', 'photos', 'stores') ) async def test_spec_schema(test_client, fantasy_app, jsonapi_validator, resource_type): client = await test_client(fantasy_app) response = await client.get(f'/api/{resource_type}') json = await response.json(content_type=JSONAPI_CONTENT_TYPE) assert jsonapi_validator.is_valid(json) async def test_content_negotiation(test_client, fantasy_app): client = await test_client(fantasy_app) response = await client.get( '/api/books/1', headers={hdrs.ACCEPT: 'application/vnd.api+json'} ) assert response.status == 200 assert response.headers['Content-Type'] == 'application/vnd.api+json' response = await client.get( '/api/books/1', headers={hdrs.CONTENT_TYPE: 'application/vnd.api+json; foo=bar'} ) assert response.status == 415 response = await client.get( '/api/books/1', headers={hdrs.ACCEPT: 'application/vnd.api+json; foo=bar'} ) assert response.status == 406
Add tests for JSON API schema and content type negotiation
Add tests for JSON API schema and content type negotiation
Python
mit
vovanbo/aiohttp_json_api
Add tests for JSON API schema and content type negotiation
import pytest from aiohttp import hdrs from aiohttp_json_api.common import JSONAPI_CONTENT_TYPE @pytest.mark.parametrize( 'resource_type', ('authors', 'books', 'chapters', 'photos', 'stores') ) async def test_spec_schema(test_client, fantasy_app, jsonapi_validator, resource_type): client = await test_client(fantasy_app) response = await client.get(f'/api/{resource_type}') json = await response.json(content_type=JSONAPI_CONTENT_TYPE) assert jsonapi_validator.is_valid(json) async def test_content_negotiation(test_client, fantasy_app): client = await test_client(fantasy_app) response = await client.get( '/api/books/1', headers={hdrs.ACCEPT: 'application/vnd.api+json'} ) assert response.status == 200 assert response.headers['Content-Type'] == 'application/vnd.api+json' response = await client.get( '/api/books/1', headers={hdrs.CONTENT_TYPE: 'application/vnd.api+json; foo=bar'} ) assert response.status == 415 response = await client.get( '/api/books/1', headers={hdrs.ACCEPT: 'application/vnd.api+json; foo=bar'} ) assert response.status == 406
<commit_before><commit_msg>Add tests for JSON API schema and content type negotiation<commit_after>
import pytest from aiohttp import hdrs from aiohttp_json_api.common import JSONAPI_CONTENT_TYPE @pytest.mark.parametrize( 'resource_type', ('authors', 'books', 'chapters', 'photos', 'stores') ) async def test_spec_schema(test_client, fantasy_app, jsonapi_validator, resource_type): client = await test_client(fantasy_app) response = await client.get(f'/api/{resource_type}') json = await response.json(content_type=JSONAPI_CONTENT_TYPE) assert jsonapi_validator.is_valid(json) async def test_content_negotiation(test_client, fantasy_app): client = await test_client(fantasy_app) response = await client.get( '/api/books/1', headers={hdrs.ACCEPT: 'application/vnd.api+json'} ) assert response.status == 200 assert response.headers['Content-Type'] == 'application/vnd.api+json' response = await client.get( '/api/books/1', headers={hdrs.CONTENT_TYPE: 'application/vnd.api+json; foo=bar'} ) assert response.status == 415 response = await client.get( '/api/books/1', headers={hdrs.ACCEPT: 'application/vnd.api+json; foo=bar'} ) assert response.status == 406
Add tests for JSON API schema and content type negotiationimport pytest from aiohttp import hdrs from aiohttp_json_api.common import JSONAPI_CONTENT_TYPE @pytest.mark.parametrize( 'resource_type', ('authors', 'books', 'chapters', 'photos', 'stores') ) async def test_spec_schema(test_client, fantasy_app, jsonapi_validator, resource_type): client = await test_client(fantasy_app) response = await client.get(f'/api/{resource_type}') json = await response.json(content_type=JSONAPI_CONTENT_TYPE) assert jsonapi_validator.is_valid(json) async def test_content_negotiation(test_client, fantasy_app): client = await test_client(fantasy_app) response = await client.get( '/api/books/1', headers={hdrs.ACCEPT: 'application/vnd.api+json'} ) assert response.status == 200 assert response.headers['Content-Type'] == 'application/vnd.api+json' response = await client.get( '/api/books/1', headers={hdrs.CONTENT_TYPE: 'application/vnd.api+json; foo=bar'} ) assert response.status == 415 response = await client.get( '/api/books/1', headers={hdrs.ACCEPT: 'application/vnd.api+json; foo=bar'} ) assert response.status == 406
<commit_before><commit_msg>Add tests for JSON API schema and content type negotiation<commit_after>import pytest from aiohttp import hdrs from aiohttp_json_api.common import JSONAPI_CONTENT_TYPE @pytest.mark.parametrize( 'resource_type', ('authors', 'books', 'chapters', 'photos', 'stores') ) async def test_spec_schema(test_client, fantasy_app, jsonapi_validator, resource_type): client = await test_client(fantasy_app) response = await client.get(f'/api/{resource_type}') json = await response.json(content_type=JSONAPI_CONTENT_TYPE) assert jsonapi_validator.is_valid(json) async def test_content_negotiation(test_client, fantasy_app): client = await test_client(fantasy_app) response = await client.get( '/api/books/1', headers={hdrs.ACCEPT: 'application/vnd.api+json'} ) assert response.status == 200 assert response.headers['Content-Type'] == 'application/vnd.api+json' response = await client.get( '/api/books/1', headers={hdrs.CONTENT_TYPE: 'application/vnd.api+json; foo=bar'} ) assert response.status == 415 response = await client.get( '/api/books/1', headers={hdrs.ACCEPT: 'application/vnd.api+json; foo=bar'} ) assert response.status == 406
dbdda8bbb60807f7b3e55728ce0d61c1b8fed8a9
tests/test_global_arguments.py
tests/test_global_arguments.py
from glob import glob from importlib import import_module from inspect import getargspec, getmembers from os import path from types import FunctionType from unittest import TestCase from pyinfra import operations from pyinfra.api.operation_kwargs import OPERATION_KWARGS def _is_pyinfra_operation(module, key, value): return ( isinstance(value, FunctionType) and value.__module__ == module.__name__ and getattr(value, '_pyinfra_op', False) and not value.__name__.startswith('_') and not key.startswith('_') ) def iter_operations(): module_filenames = glob(path.join(path.dirname(operations.__file__), '*.py')) for module_name in sorted(module_filenames): module = import_module('pyinfra.operations.{0}'.format( path.basename(module_name)[:-3], )) for key, value in sorted(getmembers(module)): if _is_pyinfra_operation(module, key, value): yield module, value class TestOperationGlobalArguments(TestCase): def test_operations_do_not_use_global_arguments(self): global_arg_keys = set() for group, kwargs in OPERATION_KWARGS.items(): global_arg_keys.update(kwargs.keys()) for op_module, op_func in iter_operations(): argspec = getargspec(op_func._pyinfra_op) for arg in argspec.args: assert arg not in global_arg_keys, \ '`{0}` argument found in {1}.{2} operation function'.format( arg, op_module.__name__, op_func.__name__, )
Add tests for operation arguments clashing with global arguments.
Add tests for operation arguments clashing with global arguments. This makes it possible to expand the global argument list with confidence. It's worth noting this shouldn't be done lightly, as it prevents any argument keys being used elsewhere.
Python
mit
Fizzadar/pyinfra,Fizzadar/pyinfra
Add tests for operation arguments clashing with global arguments. This makes it possible to expand the global argument list with confidence. It's worth noting this shouldn't be done lightly, as it prevents any argument keys being used elsewhere.
from glob import glob from importlib import import_module from inspect import getargspec, getmembers from os import path from types import FunctionType from unittest import TestCase from pyinfra import operations from pyinfra.api.operation_kwargs import OPERATION_KWARGS def _is_pyinfra_operation(module, key, value): return ( isinstance(value, FunctionType) and value.__module__ == module.__name__ and getattr(value, '_pyinfra_op', False) and not value.__name__.startswith('_') and not key.startswith('_') ) def iter_operations(): module_filenames = glob(path.join(path.dirname(operations.__file__), '*.py')) for module_name in sorted(module_filenames): module = import_module('pyinfra.operations.{0}'.format( path.basename(module_name)[:-3], )) for key, value in sorted(getmembers(module)): if _is_pyinfra_operation(module, key, value): yield module, value class TestOperationGlobalArguments(TestCase): def test_operations_do_not_use_global_arguments(self): global_arg_keys = set() for group, kwargs in OPERATION_KWARGS.items(): global_arg_keys.update(kwargs.keys()) for op_module, op_func in iter_operations(): argspec = getargspec(op_func._pyinfra_op) for arg in argspec.args: assert arg not in global_arg_keys, \ '`{0}` argument found in {1}.{2} operation function'.format( arg, op_module.__name__, op_func.__name__, )
<commit_before><commit_msg>Add tests for operation arguments clashing with global arguments. This makes it possible to expand the global argument list with confidence. It's worth noting this shouldn't be done lightly, as it prevents any argument keys being used elsewhere.<commit_after>
from glob import glob from importlib import import_module from inspect import getargspec, getmembers from os import path from types import FunctionType from unittest import TestCase from pyinfra import operations from pyinfra.api.operation_kwargs import OPERATION_KWARGS def _is_pyinfra_operation(module, key, value): return ( isinstance(value, FunctionType) and value.__module__ == module.__name__ and getattr(value, '_pyinfra_op', False) and not value.__name__.startswith('_') and not key.startswith('_') ) def iter_operations(): module_filenames = glob(path.join(path.dirname(operations.__file__), '*.py')) for module_name in sorted(module_filenames): module = import_module('pyinfra.operations.{0}'.format( path.basename(module_name)[:-3], )) for key, value in sorted(getmembers(module)): if _is_pyinfra_operation(module, key, value): yield module, value class TestOperationGlobalArguments(TestCase): def test_operations_do_not_use_global_arguments(self): global_arg_keys = set() for group, kwargs in OPERATION_KWARGS.items(): global_arg_keys.update(kwargs.keys()) for op_module, op_func in iter_operations(): argspec = getargspec(op_func._pyinfra_op) for arg in argspec.args: assert arg not in global_arg_keys, \ '`{0}` argument found in {1}.{2} operation function'.format( arg, op_module.__name__, op_func.__name__, )
Add tests for operation arguments clashing with global arguments. This makes it possible to expand the global argument list with confidence. It's worth noting this shouldn't be done lightly, as it prevents any argument keys being used elsewhere.from glob import glob from importlib import import_module from inspect import getargspec, getmembers from os import path from types import FunctionType from unittest import TestCase from pyinfra import operations from pyinfra.api.operation_kwargs import OPERATION_KWARGS def _is_pyinfra_operation(module, key, value): return ( isinstance(value, FunctionType) and value.__module__ == module.__name__ and getattr(value, '_pyinfra_op', False) and not value.__name__.startswith('_') and not key.startswith('_') ) def iter_operations(): module_filenames = glob(path.join(path.dirname(operations.__file__), '*.py')) for module_name in sorted(module_filenames): module = import_module('pyinfra.operations.{0}'.format( path.basename(module_name)[:-3], )) for key, value in sorted(getmembers(module)): if _is_pyinfra_operation(module, key, value): yield module, value class TestOperationGlobalArguments(TestCase): def test_operations_do_not_use_global_arguments(self): global_arg_keys = set() for group, kwargs in OPERATION_KWARGS.items(): global_arg_keys.update(kwargs.keys()) for op_module, op_func in iter_operations(): argspec = getargspec(op_func._pyinfra_op) for arg in argspec.args: assert arg not in global_arg_keys, \ '`{0}` argument found in {1}.{2} operation function'.format( arg, op_module.__name__, op_func.__name__, )
<commit_before><commit_msg>Add tests for operation arguments clashing with global arguments. This makes it possible to expand the global argument list with confidence. It's worth noting this shouldn't be done lightly, as it prevents any argument keys being used elsewhere.<commit_after>from glob import glob from importlib import import_module from inspect import getargspec, getmembers from os import path from types import FunctionType from unittest import TestCase from pyinfra import operations from pyinfra.api.operation_kwargs import OPERATION_KWARGS def _is_pyinfra_operation(module, key, value): return ( isinstance(value, FunctionType) and value.__module__ == module.__name__ and getattr(value, '_pyinfra_op', False) and not value.__name__.startswith('_') and not key.startswith('_') ) def iter_operations(): module_filenames = glob(path.join(path.dirname(operations.__file__), '*.py')) for module_name in sorted(module_filenames): module = import_module('pyinfra.operations.{0}'.format( path.basename(module_name)[:-3], )) for key, value in sorted(getmembers(module)): if _is_pyinfra_operation(module, key, value): yield module, value class TestOperationGlobalArguments(TestCase): def test_operations_do_not_use_global_arguments(self): global_arg_keys = set() for group, kwargs in OPERATION_KWARGS.items(): global_arg_keys.update(kwargs.keys()) for op_module, op_func in iter_operations(): argspec = getargspec(op_func._pyinfra_op) for arg in argspec.args: assert arg not in global_arg_keys, \ '`{0}` argument found in {1}.{2} operation function'.format( arg, op_module.__name__, op_func.__name__, )
4defc023eddc8ccd2f324e5971412c14b931c346
apps/network/tests/test_routes/test_infrastructure.py
apps/network/tests/test_routes/test_infrastructure.py
def test_create_network(client): result = client.post("/networks/", data={"name": "test_network"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network created succesfully!"} def test_get_all_network(client): result = client.get("/networks/") assert result.status_code == 200 assert result.get_json() == {"networks": ["Net-Gama", "Net-Beta", "Net-Pi"]} def test_get_specific_network(client): result = client.get("/networks/464615") assert result.status_code == 200 assert result.get_json() == {"network": {"name": "Net-Gama", "id": "464615"}} def test_update_network(client): result = client.put("/networks/546313", data={"node": "{new_node}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network was updated succesfully!"} def test_delete_network(client): result = client.delete("/networks/546313") assert result.status_code == 200 assert result.get_json() == {"msg": "Network was deleted succesfully!"} def test_create_autoscaling(client): result = client.post("/networks/autoscaling", data={"configs": "{auto-scaling_configs}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling created succesfully!"} def test_get_all_autoscaling_conditions(client): result = client.get("/networks/autoscaling/") assert result.status_code == 200 assert result.get_json() == {"auto-scalings": ["Condition 1", "Condition 2", "Condition 3"]} def test_get_specific_autoscaling_condition(client): result = client.get("/networks/autoscaling/6413568") assert result.status_code == 200 assert result.get_json() == {"network": {"name": "Net-Gama", "id": "6413568"}} def test_update_autoscaling_condition(client): result = client.put("/networks/autoscaling/6413568", data={"autoscaling": "{new_autoscaling_condition}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling was updated succesfully!"} def test_delete_autoscaling_condition(client): result = client.delete("/networks/autoscaling/6413568") assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling was deleted succesfully!"}
ADD Network infrastructure unit tests
ADD Network infrastructure unit tests
Python
apache-2.0
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
ADD Network infrastructure unit tests
def test_create_network(client): result = client.post("/networks/", data={"name": "test_network"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network created succesfully!"} def test_get_all_network(client): result = client.get("/networks/") assert result.status_code == 200 assert result.get_json() == {"networks": ["Net-Gama", "Net-Beta", "Net-Pi"]} def test_get_specific_network(client): result = client.get("/networks/464615") assert result.status_code == 200 assert result.get_json() == {"network": {"name": "Net-Gama", "id": "464615"}} def test_update_network(client): result = client.put("/networks/546313", data={"node": "{new_node}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network was updated succesfully!"} def test_delete_network(client): result = client.delete("/networks/546313") assert result.status_code == 200 assert result.get_json() == {"msg": "Network was deleted succesfully!"} def test_create_autoscaling(client): result = client.post("/networks/autoscaling", data={"configs": "{auto-scaling_configs}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling created succesfully!"} def test_get_all_autoscaling_conditions(client): result = client.get("/networks/autoscaling/") assert result.status_code == 200 assert result.get_json() == {"auto-scalings": ["Condition 1", "Condition 2", "Condition 3"]} def test_get_specific_autoscaling_condition(client): result = client.get("/networks/autoscaling/6413568") assert result.status_code == 200 assert result.get_json() == {"network": {"name": "Net-Gama", "id": "6413568"}} def test_update_autoscaling_condition(client): result = client.put("/networks/autoscaling/6413568", data={"autoscaling": "{new_autoscaling_condition}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling was updated succesfully!"} def test_delete_autoscaling_condition(client): result = client.delete("/networks/autoscaling/6413568") assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling was deleted succesfully!"}
<commit_before><commit_msg>ADD Network infrastructure unit tests<commit_after>
def test_create_network(client): result = client.post("/networks/", data={"name": "test_network"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network created succesfully!"} def test_get_all_network(client): result = client.get("/networks/") assert result.status_code == 200 assert result.get_json() == {"networks": ["Net-Gama", "Net-Beta", "Net-Pi"]} def test_get_specific_network(client): result = client.get("/networks/464615") assert result.status_code == 200 assert result.get_json() == {"network": {"name": "Net-Gama", "id": "464615"}} def test_update_network(client): result = client.put("/networks/546313", data={"node": "{new_node}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network was updated succesfully!"} def test_delete_network(client): result = client.delete("/networks/546313") assert result.status_code == 200 assert result.get_json() == {"msg": "Network was deleted succesfully!"} def test_create_autoscaling(client): result = client.post("/networks/autoscaling", data={"configs": "{auto-scaling_configs}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling created succesfully!"} def test_get_all_autoscaling_conditions(client): result = client.get("/networks/autoscaling/") assert result.status_code == 200 assert result.get_json() == {"auto-scalings": ["Condition 1", "Condition 2", "Condition 3"]} def test_get_specific_autoscaling_condition(client): result = client.get("/networks/autoscaling/6413568") assert result.status_code == 200 assert result.get_json() == {"network": {"name": "Net-Gama", "id": "6413568"}} def test_update_autoscaling_condition(client): result = client.put("/networks/autoscaling/6413568", data={"autoscaling": "{new_autoscaling_condition}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling was updated succesfully!"} def test_delete_autoscaling_condition(client): result = client.delete("/networks/autoscaling/6413568") assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling was deleted succesfully!"}
ADD Network infrastructure unit tests def test_create_network(client): result = client.post("/networks/", data={"name": "test_network"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network created succesfully!"} def test_get_all_network(client): result = client.get("/networks/") assert result.status_code == 200 assert result.get_json() == {"networks": ["Net-Gama", "Net-Beta", "Net-Pi"]} def test_get_specific_network(client): result = client.get("/networks/464615") assert result.status_code == 200 assert result.get_json() == {"network": {"name": "Net-Gama", "id": "464615"}} def test_update_network(client): result = client.put("/networks/546313", data={"node": "{new_node}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network was updated succesfully!"} def test_delete_network(client): result = client.delete("/networks/546313") assert result.status_code == 200 assert result.get_json() == {"msg": "Network was deleted succesfully!"} def test_create_autoscaling(client): result = client.post("/networks/autoscaling", data={"configs": "{auto-scaling_configs}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling created succesfully!"} def test_get_all_autoscaling_conditions(client): result = client.get("/networks/autoscaling/") assert result.status_code == 200 assert result.get_json() == {"auto-scalings": ["Condition 1", "Condition 2", "Condition 3"]} def test_get_specific_autoscaling_condition(client): result = client.get("/networks/autoscaling/6413568") assert result.status_code == 200 assert result.get_json() == {"network": {"name": "Net-Gama", "id": "6413568"}} def test_update_autoscaling_condition(client): result = client.put("/networks/autoscaling/6413568", data={"autoscaling": "{new_autoscaling_condition}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling was updated succesfully!"} def test_delete_autoscaling_condition(client): result = client.delete("/networks/autoscaling/6413568") assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling was deleted succesfully!"}
<commit_before><commit_msg>ADD Network infrastructure unit tests<commit_after> def test_create_network(client): result = client.post("/networks/", data={"name": "test_network"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network created succesfully!"} def test_get_all_network(client): result = client.get("/networks/") assert result.status_code == 200 assert result.get_json() == {"networks": ["Net-Gama", "Net-Beta", "Net-Pi"]} def test_get_specific_network(client): result = client.get("/networks/464615") assert result.status_code == 200 assert result.get_json() == {"network": {"name": "Net-Gama", "id": "464615"}} def test_update_network(client): result = client.put("/networks/546313", data={"node": "{new_node}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network was updated succesfully!"} def test_delete_network(client): result = client.delete("/networks/546313") assert result.status_code == 200 assert result.get_json() == {"msg": "Network was deleted succesfully!"} def test_create_autoscaling(client): result = client.post("/networks/autoscaling", data={"configs": "{auto-scaling_configs}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling created succesfully!"} def test_get_all_autoscaling_conditions(client): result = client.get("/networks/autoscaling/") assert result.status_code == 200 assert result.get_json() == {"auto-scalings": ["Condition 1", "Condition 2", "Condition 3"]} def test_get_specific_autoscaling_condition(client): result = client.get("/networks/autoscaling/6413568") assert result.status_code == 200 assert result.get_json() == {"network": {"name": "Net-Gama", "id": "6413568"}} def test_update_autoscaling_condition(client): result = client.put("/networks/autoscaling/6413568", data={"autoscaling": "{new_autoscaling_condition}"}) assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling was updated succesfully!"} def test_delete_autoscaling_condition(client): result = client.delete("/networks/autoscaling/6413568") assert result.status_code == 200 assert result.get_json() == {"msg": "Network auto-scaling was deleted succesfully!"}
0a4b52a0aaa4c244139d31d0ac96b877995d800c
examples/manage_node_labels.py
examples/manage_node_labels.py
# Copyright 2016 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pprint import pprint from kubernetes import client, config def main(): """ Change labels of the "minikube" node: - Add label "foo" with value "bar". This will overwrite the "foo" label if it already exists. - Remove the label "baz" from the node. """ config.load_kube_config() api_instance = client.CoreV1Api() body = { "metadata": { "labels": { "foo": "bar", "baz": None} } } api_response = api_instance.patch_node("minikube", body) pprint(api_response) if __name__ == '__main__': main()
Add example about node label management
Add example about node label management Show how to add/remove/change node labels. Signed-off-by: Flavio Castelli <d1035a58d87d0b7918844ad2fd699273faf2f5d5@suse.com>
Python
apache-2.0
mbohlool/client-python,sebgoa/client-python,mbohlool/client-python,kubernetes-client/python,sebgoa/client-python,kubernetes-client/python,djkonro/client-python,djkonro/client-python
Add example about node label management Show how to add/remove/change node labels. Signed-off-by: Flavio Castelli <d1035a58d87d0b7918844ad2fd699273faf2f5d5@suse.com>
# Copyright 2016 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pprint import pprint from kubernetes import client, config def main(): """ Change labels of the "minikube" node: - Add label "foo" with value "bar". This will overwrite the "foo" label if it already exists. - Remove the label "baz" from the node. """ config.load_kube_config() api_instance = client.CoreV1Api() body = { "metadata": { "labels": { "foo": "bar", "baz": None} } } api_response = api_instance.patch_node("minikube", body) pprint(api_response) if __name__ == '__main__': main()
<commit_before><commit_msg>Add example about node label management Show how to add/remove/change node labels. Signed-off-by: Flavio Castelli <d1035a58d87d0b7918844ad2fd699273faf2f5d5@suse.com><commit_after>
# Copyright 2016 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pprint import pprint from kubernetes import client, config def main(): """ Change labels of the "minikube" node: - Add label "foo" with value "bar". This will overwrite the "foo" label if it already exists. - Remove the label "baz" from the node. """ config.load_kube_config() api_instance = client.CoreV1Api() body = { "metadata": { "labels": { "foo": "bar", "baz": None} } } api_response = api_instance.patch_node("minikube", body) pprint(api_response) if __name__ == '__main__': main()
Add example about node label management Show how to add/remove/change node labels. Signed-off-by: Flavio Castelli <d1035a58d87d0b7918844ad2fd699273faf2f5d5@suse.com># Copyright 2016 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pprint import pprint from kubernetes import client, config def main(): """ Change labels of the "minikube" node: - Add label "foo" with value "bar". This will overwrite the "foo" label if it already exists. - Remove the label "baz" from the node. """ config.load_kube_config() api_instance = client.CoreV1Api() body = { "metadata": { "labels": { "foo": "bar", "baz": None} } } api_response = api_instance.patch_node("minikube", body) pprint(api_response) if __name__ == '__main__': main()
<commit_before><commit_msg>Add example about node label management Show how to add/remove/change node labels. Signed-off-by: Flavio Castelli <d1035a58d87d0b7918844ad2fd699273faf2f5d5@suse.com><commit_after># Copyright 2016 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pprint import pprint from kubernetes import client, config def main(): """ Change labels of the "minikube" node: - Add label "foo" with value "bar". This will overwrite the "foo" label if it already exists. - Remove the label "baz" from the node. """ config.load_kube_config() api_instance = client.CoreV1Api() body = { "metadata": { "labels": { "foo": "bar", "baz": None} } } api_response = api_instance.patch_node("minikube", body) pprint(api_response) if __name__ == '__main__': main()
3052ea9df56c9a501685b6ccb7d820e96c119d15
easycrud/urls.py
easycrud/urls.py
from django.conf.urls import patterns, url from django.db.models.loading import get_models from .models import EasyCrudModel from .views import ListView, CreateView, DetailView, UpdateView, DeleteView def easycrud_urlpatterns(): model_list = [m for m in get_models() if issubclass(m, EasyCrudModel)] pattern_list = [] for model in model_list: name = model.model_name.replace(' ', '') url_list = [] url_list.append(url('^%s/$' % name, ListView.as_view(model=model), name='%s_list' % name)) url_list.append(url('^%s/(?P<pk>\d+)/$' % name, DetailView.as_view(model=model), name='%s_detail' % name)) if model.has_create: url_list.append(url('^%s/create/$' % name, CreateView.as_view(model=model), name='%s_create' % name)) if model.has_update: url_list.append(url('^%s/(?P<pk>\d+)/update/$' % name, UpdateView.as_view(model=model), name='%s_update' % name)) if model.has_delete: url_list.append(url('^%s/(?P<pk>\d+)/delete/$' % name, DeleteView.as_view(model=model), name='%s_delete' % name)) pattern_list += patterns('', *url_list) return pattern_list
Add function to compute url patterns
Add function to compute url patterns
Python
bsd-2-clause
dekkers/django-easycrud,dekkers/django-easycrud
Add function to compute url patterns
from django.conf.urls import patterns, url from django.db.models.loading import get_models from .models import EasyCrudModel from .views import ListView, CreateView, DetailView, UpdateView, DeleteView def easycrud_urlpatterns(): model_list = [m for m in get_models() if issubclass(m, EasyCrudModel)] pattern_list = [] for model in model_list: name = model.model_name.replace(' ', '') url_list = [] url_list.append(url('^%s/$' % name, ListView.as_view(model=model), name='%s_list' % name)) url_list.append(url('^%s/(?P<pk>\d+)/$' % name, DetailView.as_view(model=model), name='%s_detail' % name)) if model.has_create: url_list.append(url('^%s/create/$' % name, CreateView.as_view(model=model), name='%s_create' % name)) if model.has_update: url_list.append(url('^%s/(?P<pk>\d+)/update/$' % name, UpdateView.as_view(model=model), name='%s_update' % name)) if model.has_delete: url_list.append(url('^%s/(?P<pk>\d+)/delete/$' % name, DeleteView.as_view(model=model), name='%s_delete' % name)) pattern_list += patterns('', *url_list) return pattern_list
<commit_before><commit_msg>Add function to compute url patterns<commit_after>
from django.conf.urls import patterns, url from django.db.models.loading import get_models from .models import EasyCrudModel from .views import ListView, CreateView, DetailView, UpdateView, DeleteView def easycrud_urlpatterns(): model_list = [m for m in get_models() if issubclass(m, EasyCrudModel)] pattern_list = [] for model in model_list: name = model.model_name.replace(' ', '') url_list = [] url_list.append(url('^%s/$' % name, ListView.as_view(model=model), name='%s_list' % name)) url_list.append(url('^%s/(?P<pk>\d+)/$' % name, DetailView.as_view(model=model), name='%s_detail' % name)) if model.has_create: url_list.append(url('^%s/create/$' % name, CreateView.as_view(model=model), name='%s_create' % name)) if model.has_update: url_list.append(url('^%s/(?P<pk>\d+)/update/$' % name, UpdateView.as_view(model=model), name='%s_update' % name)) if model.has_delete: url_list.append(url('^%s/(?P<pk>\d+)/delete/$' % name, DeleteView.as_view(model=model), name='%s_delete' % name)) pattern_list += patterns('', *url_list) return pattern_list
Add function to compute url patternsfrom django.conf.urls import patterns, url from django.db.models.loading import get_models from .models import EasyCrudModel from .views import ListView, CreateView, DetailView, UpdateView, DeleteView def easycrud_urlpatterns(): model_list = [m for m in get_models() if issubclass(m, EasyCrudModel)] pattern_list = [] for model in model_list: name = model.model_name.replace(' ', '') url_list = [] url_list.append(url('^%s/$' % name, ListView.as_view(model=model), name='%s_list' % name)) url_list.append(url('^%s/(?P<pk>\d+)/$' % name, DetailView.as_view(model=model), name='%s_detail' % name)) if model.has_create: url_list.append(url('^%s/create/$' % name, CreateView.as_view(model=model), name='%s_create' % name)) if model.has_update: url_list.append(url('^%s/(?P<pk>\d+)/update/$' % name, UpdateView.as_view(model=model), name='%s_update' % name)) if model.has_delete: url_list.append(url('^%s/(?P<pk>\d+)/delete/$' % name, DeleteView.as_view(model=model), name='%s_delete' % name)) pattern_list += patterns('', *url_list) return pattern_list
<commit_before><commit_msg>Add function to compute url patterns<commit_after>from django.conf.urls import patterns, url from django.db.models.loading import get_models from .models import EasyCrudModel from .views import ListView, CreateView, DetailView, UpdateView, DeleteView def easycrud_urlpatterns(): model_list = [m for m in get_models() if issubclass(m, EasyCrudModel)] pattern_list = [] for model in model_list: name = model.model_name.replace(' ', '') url_list = [] url_list.append(url('^%s/$' % name, ListView.as_view(model=model), name='%s_list' % name)) url_list.append(url('^%s/(?P<pk>\d+)/$' % name, DetailView.as_view(model=model), name='%s_detail' % name)) if model.has_create: url_list.append(url('^%s/create/$' % name, CreateView.as_view(model=model), name='%s_create' % name)) if model.has_update: url_list.append(url('^%s/(?P<pk>\d+)/update/$' % name, UpdateView.as_view(model=model), name='%s_update' % name)) if model.has_delete: url_list.append(url('^%s/(?P<pk>\d+)/delete/$' % name, DeleteView.as_view(model=model), name='%s_delete' % name)) pattern_list += patterns('', *url_list) return pattern_list
0568cefa17975c3f70f78cc1815262aa62c586e6
json_readable.py
json_readable.py
#!/usr/bin/env python import json, os for filename in os.listdir('.'): if os.path.isfile(filename) and os.path.splitext(filename)[1].lower() == '.json': with open(filename) as in_file: data = json.load(in_file) with open(filename, 'w') as out_file: json.dump(data, out_file, indent=4) # indent=4 makes the files human readable
Reformat all local .json files to be human readable
Reformat all local .json files to be human readable
Python
apache-2.0
cclauss/Ten-lines-or-less
Reformat all local .json files to be human readable
#!/usr/bin/env python import json, os for filename in os.listdir('.'): if os.path.isfile(filename) and os.path.splitext(filename)[1].lower() == '.json': with open(filename) as in_file: data = json.load(in_file) with open(filename, 'w') as out_file: json.dump(data, out_file, indent=4) # indent=4 makes the files human readable
<commit_before><commit_msg>Reformat all local .json files to be human readable<commit_after>
#!/usr/bin/env python import json, os for filename in os.listdir('.'): if os.path.isfile(filename) and os.path.splitext(filename)[1].lower() == '.json': with open(filename) as in_file: data = json.load(in_file) with open(filename, 'w') as out_file: json.dump(data, out_file, indent=4) # indent=4 makes the files human readable
Reformat all local .json files to be human readable#!/usr/bin/env python import json, os for filename in os.listdir('.'): if os.path.isfile(filename) and os.path.splitext(filename)[1].lower() == '.json': with open(filename) as in_file: data = json.load(in_file) with open(filename, 'w') as out_file: json.dump(data, out_file, indent=4) # indent=4 makes the files human readable
<commit_before><commit_msg>Reformat all local .json files to be human readable<commit_after>#!/usr/bin/env python import json, os for filename in os.listdir('.'): if os.path.isfile(filename) and os.path.splitext(filename)[1].lower() == '.json': with open(filename) as in_file: data = json.load(in_file) with open(filename, 'w') as out_file: json.dump(data, out_file, indent=4) # indent=4 makes the files human readable
9928136681629b6d3e49bdd15839d1caad3feaf2
wooey/migrations/0009_wooeyjob_uuid.py
wooey/migrations/0009_wooeyjob_uuid.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import uuid, os def gen_uuid(apps, schema_editor): WooeyJob = apps.get_model('wooey', 'WooeyJob') for obj in WooeyJob.objects.all(): obj.uuid = uuid.uuid4() obj.save() class Migration(migrations.Migration): dependencies = [ ('wooey', '0008_short_param_admin'), ] operations = [ # Add the uuid field with unique=False for existing entries # due to a bug in migrations this will set all to the same uuid migrations.AddField( model_name='wooeyjob', name='uuid', field=models.CharField(default=uuid.uuid4, unique=False, max_length=255), ), # Set the uuids for existing records migrations.RunPython(gen_uuid, reverse_code=migrations.RunPython.noop), # Set to unique=True migrations.AlterField( model_name='wooeyjob', name='uuid', field=models.CharField(default=uuid.uuid4, unique=True, max_length=255), ), ]
Add migration for UUID (pre-fill)
Add migration for UUID (pre-fill)
Python
bsd-3-clause
wooey/Wooey,hottwaj/Wooey,alexkolar/Wooey,waytai/Wooey,waytai/Wooey,waytai/Wooey,wooey/Wooey,wooey/Wooey,hottwaj/Wooey,alexkolar/Wooey,hottwaj/Wooey,alexkolar/Wooey,wooey/Wooey
Add migration for UUID (pre-fill)
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import uuid, os def gen_uuid(apps, schema_editor): WooeyJob = apps.get_model('wooey', 'WooeyJob') for obj in WooeyJob.objects.all(): obj.uuid = uuid.uuid4() obj.save() class Migration(migrations.Migration): dependencies = [ ('wooey', '0008_short_param_admin'), ] operations = [ # Add the uuid field with unique=False for existing entries # due to a bug in migrations this will set all to the same uuid migrations.AddField( model_name='wooeyjob', name='uuid', field=models.CharField(default=uuid.uuid4, unique=False, max_length=255), ), # Set the uuids for existing records migrations.RunPython(gen_uuid, reverse_code=migrations.RunPython.noop), # Set to unique=True migrations.AlterField( model_name='wooeyjob', name='uuid', field=models.CharField(default=uuid.uuid4, unique=True, max_length=255), ), ]
<commit_before><commit_msg>Add migration for UUID (pre-fill)<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import uuid, os def gen_uuid(apps, schema_editor): WooeyJob = apps.get_model('wooey', 'WooeyJob') for obj in WooeyJob.objects.all(): obj.uuid = uuid.uuid4() obj.save() class Migration(migrations.Migration): dependencies = [ ('wooey', '0008_short_param_admin'), ] operations = [ # Add the uuid field with unique=False for existing entries # due to a bug in migrations this will set all to the same uuid migrations.AddField( model_name='wooeyjob', name='uuid', field=models.CharField(default=uuid.uuid4, unique=False, max_length=255), ), # Set the uuids for existing records migrations.RunPython(gen_uuid, reverse_code=migrations.RunPython.noop), # Set to unique=True migrations.AlterField( model_name='wooeyjob', name='uuid', field=models.CharField(default=uuid.uuid4, unique=True, max_length=255), ), ]
Add migration for UUID (pre-fill)# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import uuid, os def gen_uuid(apps, schema_editor): WooeyJob = apps.get_model('wooey', 'WooeyJob') for obj in WooeyJob.objects.all(): obj.uuid = uuid.uuid4() obj.save() class Migration(migrations.Migration): dependencies = [ ('wooey', '0008_short_param_admin'), ] operations = [ # Add the uuid field with unique=False for existing entries # due to a bug in migrations this will set all to the same uuid migrations.AddField( model_name='wooeyjob', name='uuid', field=models.CharField(default=uuid.uuid4, unique=False, max_length=255), ), # Set the uuids for existing records migrations.RunPython(gen_uuid, reverse_code=migrations.RunPython.noop), # Set to unique=True migrations.AlterField( model_name='wooeyjob', name='uuid', field=models.CharField(default=uuid.uuid4, unique=True, max_length=255), ), ]
<commit_before><commit_msg>Add migration for UUID (pre-fill)<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import uuid, os def gen_uuid(apps, schema_editor): WooeyJob = apps.get_model('wooey', 'WooeyJob') for obj in WooeyJob.objects.all(): obj.uuid = uuid.uuid4() obj.save() class Migration(migrations.Migration): dependencies = [ ('wooey', '0008_short_param_admin'), ] operations = [ # Add the uuid field with unique=False for existing entries # due to a bug in migrations this will set all to the same uuid migrations.AddField( model_name='wooeyjob', name='uuid', field=models.CharField(default=uuid.uuid4, unique=False, max_length=255), ), # Set the uuids for existing records migrations.RunPython(gen_uuid, reverse_code=migrations.RunPython.noop), # Set to unique=True migrations.AlterField( model_name='wooeyjob', name='uuid', field=models.CharField(default=uuid.uuid4, unique=True, max_length=255), ), ]
0ceb2f6dfb7fd4e34ca2f8f286f2eb9d3d22cd57
bagpipe/bgp/vpn/rd_allocator.py
bagpipe/bgp/vpn/rd_allocator.py
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # encoding: utf-8 # Copyright 2014 Orange # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import random from threading import Lock from bagpipe.bgp.common import utils from bagpipe.bgp.common.looking_glass import LookingGlass from exabgp.bgp.message.update.nlri.qualifier.rd import RouteDistinguisher log = logging.getLogger(__name__) class RDAllocator(LookingGlass): def __init__(self, prefix): self.prefix = prefix self.currentSuffix = random.randint(100, 200) self.rds = dict() self.lock = Lock() @utils.synchronized def getNewRD(self, description): if (self.currentSuffix == 2 ** 20): # Looking forward to the day will hit this one: log.error("All the 2^20 possible suffixes have been used at least " "once, and this piece of code doesn't know how to reuse " "them") raise Exception("Out of suffixes") rd = RouteDistinguisher.fromElements(self.prefix, self.currentSuffix) self.currentSuffix += 1 self.rds[rd] = description log.debug("Allocated route distinguisher %s for '%s'", rd, description) return rd @utils.synchronized def release(self, rd): if rd in self.rds: log.debug("Released route distinguisher %s ('%s')", rd, self.rds[rd]) del self.rds[rd] else: log.warn("Asked to release a non registered route distinguisher: " "%s", rd) def getLookingGlassLocalInfo(self, prefix): return self.rds
Add an allocator for Route Distinguishers
Add an allocator for Route Distinguishers
Python
apache-2.0
openstack/networking-bagpipe-l2,openstack/networking-bagpipe,openstack/networking-bagpipe-l2,openstack/networking-bagpipe,stackforge/networking-bagpipe-l2,stackforge/networking-bagpipe-l2
Add an allocator for Route Distinguishers
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # encoding: utf-8 # Copyright 2014 Orange # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import random from threading import Lock from bagpipe.bgp.common import utils from bagpipe.bgp.common.looking_glass import LookingGlass from exabgp.bgp.message.update.nlri.qualifier.rd import RouteDistinguisher log = logging.getLogger(__name__) class RDAllocator(LookingGlass): def __init__(self, prefix): self.prefix = prefix self.currentSuffix = random.randint(100, 200) self.rds = dict() self.lock = Lock() @utils.synchronized def getNewRD(self, description): if (self.currentSuffix == 2 ** 20): # Looking forward to the day will hit this one: log.error("All the 2^20 possible suffixes have been used at least " "once, and this piece of code doesn't know how to reuse " "them") raise Exception("Out of suffixes") rd = RouteDistinguisher.fromElements(self.prefix, self.currentSuffix) self.currentSuffix += 1 self.rds[rd] = description log.debug("Allocated route distinguisher %s for '%s'", rd, description) return rd @utils.synchronized def release(self, rd): if rd in self.rds: log.debug("Released route distinguisher %s ('%s')", rd, self.rds[rd]) del self.rds[rd] else: log.warn("Asked to release a non registered route distinguisher: " "%s", rd) def getLookingGlassLocalInfo(self, prefix): return self.rds
<commit_before><commit_msg>Add an allocator for Route Distinguishers<commit_after>
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # encoding: utf-8 # Copyright 2014 Orange # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import random from threading import Lock from bagpipe.bgp.common import utils from bagpipe.bgp.common.looking_glass import LookingGlass from exabgp.bgp.message.update.nlri.qualifier.rd import RouteDistinguisher log = logging.getLogger(__name__) class RDAllocator(LookingGlass): def __init__(self, prefix): self.prefix = prefix self.currentSuffix = random.randint(100, 200) self.rds = dict() self.lock = Lock() @utils.synchronized def getNewRD(self, description): if (self.currentSuffix == 2 ** 20): # Looking forward to the day will hit this one: log.error("All the 2^20 possible suffixes have been used at least " "once, and this piece of code doesn't know how to reuse " "them") raise Exception("Out of suffixes") rd = RouteDistinguisher.fromElements(self.prefix, self.currentSuffix) self.currentSuffix += 1 self.rds[rd] = description log.debug("Allocated route distinguisher %s for '%s'", rd, description) return rd @utils.synchronized def release(self, rd): if rd in self.rds: log.debug("Released route distinguisher %s ('%s')", rd, self.rds[rd]) del self.rds[rd] else: log.warn("Asked to release a non registered route distinguisher: " "%s", rd) def getLookingGlassLocalInfo(self, prefix): return self.rds
Add an allocator for Route Distinguishers# vim: tabstop=4 shiftwidth=4 softtabstop=4 # encoding: utf-8 # Copyright 2014 Orange # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import random from threading import Lock from bagpipe.bgp.common import utils from bagpipe.bgp.common.looking_glass import LookingGlass from exabgp.bgp.message.update.nlri.qualifier.rd import RouteDistinguisher log = logging.getLogger(__name__) class RDAllocator(LookingGlass): def __init__(self, prefix): self.prefix = prefix self.currentSuffix = random.randint(100, 200) self.rds = dict() self.lock = Lock() @utils.synchronized def getNewRD(self, description): if (self.currentSuffix == 2 ** 20): # Looking forward to the day will hit this one: log.error("All the 2^20 possible suffixes have been used at least " "once, and this piece of code doesn't know how to reuse " "them") raise Exception("Out of suffixes") rd = RouteDistinguisher.fromElements(self.prefix, self.currentSuffix) self.currentSuffix += 1 self.rds[rd] = description log.debug("Allocated route distinguisher %s for '%s'", rd, description) return rd @utils.synchronized def release(self, rd): if rd in self.rds: log.debug("Released route distinguisher %s ('%s')", rd, self.rds[rd]) del self.rds[rd] else: log.warn("Asked to release a non registered route distinguisher: " "%s", rd) def getLookingGlassLocalInfo(self, prefix): return self.rds
<commit_before><commit_msg>Add an allocator for Route Distinguishers<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4 # encoding: utf-8 # Copyright 2014 Orange # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import random from threading import Lock from bagpipe.bgp.common import utils from bagpipe.bgp.common.looking_glass import LookingGlass from exabgp.bgp.message.update.nlri.qualifier.rd import RouteDistinguisher log = logging.getLogger(__name__) class RDAllocator(LookingGlass): def __init__(self, prefix): self.prefix = prefix self.currentSuffix = random.randint(100, 200) self.rds = dict() self.lock = Lock() @utils.synchronized def getNewRD(self, description): if (self.currentSuffix == 2 ** 20): # Looking forward to the day will hit this one: log.error("All the 2^20 possible suffixes have been used at least " "once, and this piece of code doesn't know how to reuse " "them") raise Exception("Out of suffixes") rd = RouteDistinguisher.fromElements(self.prefix, self.currentSuffix) self.currentSuffix += 1 self.rds[rd] = description log.debug("Allocated route distinguisher %s for '%s'", rd, description) return rd @utils.synchronized def release(self, rd): if rd in self.rds: log.debug("Released route distinguisher %s ('%s')", rd, self.rds[rd]) del self.rds[rd] else: log.warn("Asked to release a non registered route distinguisher: " "%s", rd) def getLookingGlassLocalInfo(self, prefix): return self.rds
9a475c84e66abc7acadcb9fca1be755597093190
corehq/apps/reports/tests/test_message_event_display.py
corehq/apps/reports/tests/test_message_event_display.py
from testil import eq from corehq.apps.sms.models import MessagingEvent from ..standard.message_event_display import get_status_display def test_get_status_display_escapes_error_message(): class fake_event: status = MessagingEvent.STATUS_ERROR error_code = None additional_error_text = "<&>" result = get_status_display(fake_event) eq(result, "Error - View details for more information. &lt;&amp;&gt;")
Add test for message status escaping
Add test for message status escaping
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
Add test for message status escaping
from testil import eq from corehq.apps.sms.models import MessagingEvent from ..standard.message_event_display import get_status_display def test_get_status_display_escapes_error_message(): class fake_event: status = MessagingEvent.STATUS_ERROR error_code = None additional_error_text = "<&>" result = get_status_display(fake_event) eq(result, "Error - View details for more information. &lt;&amp;&gt;")
<commit_before><commit_msg>Add test for message status escaping<commit_after>
from testil import eq from corehq.apps.sms.models import MessagingEvent from ..standard.message_event_display import get_status_display def test_get_status_display_escapes_error_message(): class fake_event: status = MessagingEvent.STATUS_ERROR error_code = None additional_error_text = "<&>" result = get_status_display(fake_event) eq(result, "Error - View details for more information. &lt;&amp;&gt;")
Add test for message status escapingfrom testil import eq from corehq.apps.sms.models import MessagingEvent from ..standard.message_event_display import get_status_display def test_get_status_display_escapes_error_message(): class fake_event: status = MessagingEvent.STATUS_ERROR error_code = None additional_error_text = "<&>" result = get_status_display(fake_event) eq(result, "Error - View details for more information. &lt;&amp;&gt;")
<commit_before><commit_msg>Add test for message status escaping<commit_after>from testil import eq from corehq.apps.sms.models import MessagingEvent from ..standard.message_event_display import get_status_display def test_get_status_display_escapes_error_message(): class fake_event: status = MessagingEvent.STATUS_ERROR error_code = None additional_error_text = "<&>" result = get_status_display(fake_event) eq(result, "Error - View details for more information. &lt;&amp;&gt;")
aa572e1a1e40e091882a565a9200e3242453cf22
green/test/test_integration.py
green/test/test_integration.py
import multiprocessing import os from pathlib import PurePath import subprocess import sys import tempfile from textwrap import dedent import unittest try: from unittest.mock import MagicMock except: from mock import MagicMock from green import cmdline class TestFinalizer(unittest.TestCase): def setUp(self): self.tmpdir = tempfile.mkdtemp() def test_finalizer(self): """ Test that the finalizer works on Python 3.8+ """ sub_tmpdir = tempfile.mkdtemp(dir=self.tmpdir) for i in range(multiprocessing.cpu_count() * 2): fh = open(os.path.join(sub_tmpdir, f"test_finalizer{i}.py"), "w") fh.write( dedent( f""" import unittest class Pass{i}(unittest.TestCase): def test_pass{i}(self): pass def msg(): print("finalizer worked") """ ) ) fh.close() args = [ sys.executable, "-m", "green.cmdline", "--finalizer=test_finalizer0.msg", "--maxtasksperchild=1", ] pythonpath = str(PurePath(__file__).parent.parent.parent) print(pythonpath) print( subprocess.run( args, cwd=sub_tmpdir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env={"PYTHONPATH": pythonpath}, timeout=10, ).stdout.decode("utf-8") )
Add test for finalizer, which broke in 3.8 due to underlying change in Pool._repopulate_pool, which also manifests as a hang in long-running tests that manage to kill all the worker processes.
Add test for finalizer, which broke in 3.8 due to underlying change in Pool._repopulate_pool, which also manifests as a hang in long-running tests that manage to kill all the worker processes.
Python
mit
CleanCut/green,CleanCut/green
Add test for finalizer, which broke in 3.8 due to underlying change in Pool._repopulate_pool, which also manifests as a hang in long-running tests that manage to kill all the worker processes.
import multiprocessing import os from pathlib import PurePath import subprocess import sys import tempfile from textwrap import dedent import unittest try: from unittest.mock import MagicMock except: from mock import MagicMock from green import cmdline class TestFinalizer(unittest.TestCase): def setUp(self): self.tmpdir = tempfile.mkdtemp() def test_finalizer(self): """ Test that the finalizer works on Python 3.8+ """ sub_tmpdir = tempfile.mkdtemp(dir=self.tmpdir) for i in range(multiprocessing.cpu_count() * 2): fh = open(os.path.join(sub_tmpdir, f"test_finalizer{i}.py"), "w") fh.write( dedent( f""" import unittest class Pass{i}(unittest.TestCase): def test_pass{i}(self): pass def msg(): print("finalizer worked") """ ) ) fh.close() args = [ sys.executable, "-m", "green.cmdline", "--finalizer=test_finalizer0.msg", "--maxtasksperchild=1", ] pythonpath = str(PurePath(__file__).parent.parent.parent) print(pythonpath) print( subprocess.run( args, cwd=sub_tmpdir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env={"PYTHONPATH": pythonpath}, timeout=10, ).stdout.decode("utf-8") )
<commit_before><commit_msg>Add test for finalizer, which broke in 3.8 due to underlying change in Pool._repopulate_pool, which also manifests as a hang in long-running tests that manage to kill all the worker processes.<commit_after>
import multiprocessing import os from pathlib import PurePath import subprocess import sys import tempfile from textwrap import dedent import unittest try: from unittest.mock import MagicMock except: from mock import MagicMock from green import cmdline class TestFinalizer(unittest.TestCase): def setUp(self): self.tmpdir = tempfile.mkdtemp() def test_finalizer(self): """ Test that the finalizer works on Python 3.8+ """ sub_tmpdir = tempfile.mkdtemp(dir=self.tmpdir) for i in range(multiprocessing.cpu_count() * 2): fh = open(os.path.join(sub_tmpdir, f"test_finalizer{i}.py"), "w") fh.write( dedent( f""" import unittest class Pass{i}(unittest.TestCase): def test_pass{i}(self): pass def msg(): print("finalizer worked") """ ) ) fh.close() args = [ sys.executable, "-m", "green.cmdline", "--finalizer=test_finalizer0.msg", "--maxtasksperchild=1", ] pythonpath = str(PurePath(__file__).parent.parent.parent) print(pythonpath) print( subprocess.run( args, cwd=sub_tmpdir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env={"PYTHONPATH": pythonpath}, timeout=10, ).stdout.decode("utf-8") )
Add test for finalizer, which broke in 3.8 due to underlying change in Pool._repopulate_pool, which also manifests as a hang in long-running tests that manage to kill all the worker processes.import multiprocessing import os from pathlib import PurePath import subprocess import sys import tempfile from textwrap import dedent import unittest try: from unittest.mock import MagicMock except: from mock import MagicMock from green import cmdline class TestFinalizer(unittest.TestCase): def setUp(self): self.tmpdir = tempfile.mkdtemp() def test_finalizer(self): """ Test that the finalizer works on Python 3.8+ """ sub_tmpdir = tempfile.mkdtemp(dir=self.tmpdir) for i in range(multiprocessing.cpu_count() * 2): fh = open(os.path.join(sub_tmpdir, f"test_finalizer{i}.py"), "w") fh.write( dedent( f""" import unittest class Pass{i}(unittest.TestCase): def test_pass{i}(self): pass def msg(): print("finalizer worked") """ ) ) fh.close() args = [ sys.executable, "-m", "green.cmdline", "--finalizer=test_finalizer0.msg", "--maxtasksperchild=1", ] pythonpath = str(PurePath(__file__).parent.parent.parent) print(pythonpath) print( subprocess.run( args, cwd=sub_tmpdir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env={"PYTHONPATH": pythonpath}, timeout=10, ).stdout.decode("utf-8") )
<commit_before><commit_msg>Add test for finalizer, which broke in 3.8 due to underlying change in Pool._repopulate_pool, which also manifests as a hang in long-running tests that manage to kill all the worker processes.<commit_after>import multiprocessing import os from pathlib import PurePath import subprocess import sys import tempfile from textwrap import dedent import unittest try: from unittest.mock import MagicMock except: from mock import MagicMock from green import cmdline class TestFinalizer(unittest.TestCase): def setUp(self): self.tmpdir = tempfile.mkdtemp() def test_finalizer(self): """ Test that the finalizer works on Python 3.8+ """ sub_tmpdir = tempfile.mkdtemp(dir=self.tmpdir) for i in range(multiprocessing.cpu_count() * 2): fh = open(os.path.join(sub_tmpdir, f"test_finalizer{i}.py"), "w") fh.write( dedent( f""" import unittest class Pass{i}(unittest.TestCase): def test_pass{i}(self): pass def msg(): print("finalizer worked") """ ) ) fh.close() args = [ sys.executable, "-m", "green.cmdline", "--finalizer=test_finalizer0.msg", "--maxtasksperchild=1", ] pythonpath = str(PurePath(__file__).parent.parent.parent) print(pythonpath) print( subprocess.run( args, cwd=sub_tmpdir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env={"PYTHONPATH": pythonpath}, timeout=10, ).stdout.decode("utf-8") )
6a9d0d1b75cf9fa53b54964ba5d55d4534a7f3b1
tests/test_lexer.py
tests/test_lexer.py
import unittest import sure import lexer class TestLexer(unittest.TestCase): def _lex_data(self, data): lex = lexer.Lexer() lex.input(data) token_list = list(lex) return token_list def _assert_individual_token(self, input, expected_token_type, expected_token_value): l = self._lex_data(input) len(l).should.be.equal(1) tok = l[0] tok.type.should.be.equal(expected_token_type) tok.value.should.be.equal(expected_token_value) def test_empty(self): self._lex_data("").should.be.empty def test_keywords(self): for input_program, token in lexer.reserved_tokens.items(): self._assert_individual_token(input_program, token, input_program) def test_genid(self): self._assert_individual_token("koko", "GENID", "koko") self._assert_individual_token("a", "GENID", "a") self._assert_individual_token(2048 * "koko", "GENID", 2048 * "koko") def test_conid(self): self._assert_individual_token("Koko", "CONID", "Koko") self._assert_individual_token("A", "CONID", "A") self._assert_individual_token(2048 * "Koko", "CONID", 2048 * "Koko") def test_iconst(self): self._assert_individual_token("42", "ICONST", 42) self._assert_individual_token("17", "ICONST", 17) self._assert_individual_token("0", "ICONST", 0) self._assert_individual_token("00042", "ICONST", 42)
Add very basic lexer test that passes
Add very basic lexer test that passes
Python
mit
Renelvon/llama,dionyziz/llama,dionyziz/llama,Renelvon/llama
Add very basic lexer test that passes
import unittest import sure import lexer class TestLexer(unittest.TestCase): def _lex_data(self, data): lex = lexer.Lexer() lex.input(data) token_list = list(lex) return token_list def _assert_individual_token(self, input, expected_token_type, expected_token_value): l = self._lex_data(input) len(l).should.be.equal(1) tok = l[0] tok.type.should.be.equal(expected_token_type) tok.value.should.be.equal(expected_token_value) def test_empty(self): self._lex_data("").should.be.empty def test_keywords(self): for input_program, token in lexer.reserved_tokens.items(): self._assert_individual_token(input_program, token, input_program) def test_genid(self): self._assert_individual_token("koko", "GENID", "koko") self._assert_individual_token("a", "GENID", "a") self._assert_individual_token(2048 * "koko", "GENID", 2048 * "koko") def test_conid(self): self._assert_individual_token("Koko", "CONID", "Koko") self._assert_individual_token("A", "CONID", "A") self._assert_individual_token(2048 * "Koko", "CONID", 2048 * "Koko") def test_iconst(self): self._assert_individual_token("42", "ICONST", 42) self._assert_individual_token("17", "ICONST", 17) self._assert_individual_token("0", "ICONST", 0) self._assert_individual_token("00042", "ICONST", 42)
<commit_before><commit_msg>Add very basic lexer test that passes<commit_after>
import unittest import sure import lexer class TestLexer(unittest.TestCase): def _lex_data(self, data): lex = lexer.Lexer() lex.input(data) token_list = list(lex) return token_list def _assert_individual_token(self, input, expected_token_type, expected_token_value): l = self._lex_data(input) len(l).should.be.equal(1) tok = l[0] tok.type.should.be.equal(expected_token_type) tok.value.should.be.equal(expected_token_value) def test_empty(self): self._lex_data("").should.be.empty def test_keywords(self): for input_program, token in lexer.reserved_tokens.items(): self._assert_individual_token(input_program, token, input_program) def test_genid(self): self._assert_individual_token("koko", "GENID", "koko") self._assert_individual_token("a", "GENID", "a") self._assert_individual_token(2048 * "koko", "GENID", 2048 * "koko") def test_conid(self): self._assert_individual_token("Koko", "CONID", "Koko") self._assert_individual_token("A", "CONID", "A") self._assert_individual_token(2048 * "Koko", "CONID", 2048 * "Koko") def test_iconst(self): self._assert_individual_token("42", "ICONST", 42) self._assert_individual_token("17", "ICONST", 17) self._assert_individual_token("0", "ICONST", 0) self._assert_individual_token("00042", "ICONST", 42)
Add very basic lexer test that passesimport unittest import sure import lexer class TestLexer(unittest.TestCase): def _lex_data(self, data): lex = lexer.Lexer() lex.input(data) token_list = list(lex) return token_list def _assert_individual_token(self, input, expected_token_type, expected_token_value): l = self._lex_data(input) len(l).should.be.equal(1) tok = l[0] tok.type.should.be.equal(expected_token_type) tok.value.should.be.equal(expected_token_value) def test_empty(self): self._lex_data("").should.be.empty def test_keywords(self): for input_program, token in lexer.reserved_tokens.items(): self._assert_individual_token(input_program, token, input_program) def test_genid(self): self._assert_individual_token("koko", "GENID", "koko") self._assert_individual_token("a", "GENID", "a") self._assert_individual_token(2048 * "koko", "GENID", 2048 * "koko") def test_conid(self): self._assert_individual_token("Koko", "CONID", "Koko") self._assert_individual_token("A", "CONID", "A") self._assert_individual_token(2048 * "Koko", "CONID", 2048 * "Koko") def test_iconst(self): self._assert_individual_token("42", "ICONST", 42) self._assert_individual_token("17", "ICONST", 17) self._assert_individual_token("0", "ICONST", 0) self._assert_individual_token("00042", "ICONST", 42)
<commit_before><commit_msg>Add very basic lexer test that passes<commit_after>import unittest import sure import lexer class TestLexer(unittest.TestCase): def _lex_data(self, data): lex = lexer.Lexer() lex.input(data) token_list = list(lex) return token_list def _assert_individual_token(self, input, expected_token_type, expected_token_value): l = self._lex_data(input) len(l).should.be.equal(1) tok = l[0] tok.type.should.be.equal(expected_token_type) tok.value.should.be.equal(expected_token_value) def test_empty(self): self._lex_data("").should.be.empty def test_keywords(self): for input_program, token in lexer.reserved_tokens.items(): self._assert_individual_token(input_program, token, input_program) def test_genid(self): self._assert_individual_token("koko", "GENID", "koko") self._assert_individual_token("a", "GENID", "a") self._assert_individual_token(2048 * "koko", "GENID", 2048 * "koko") def test_conid(self): self._assert_individual_token("Koko", "CONID", "Koko") self._assert_individual_token("A", "CONID", "A") self._assert_individual_token(2048 * "Koko", "CONID", 2048 * "Koko") def test_iconst(self): self._assert_individual_token("42", "ICONST", 42) self._assert_individual_token("17", "ICONST", 17) self._assert_individual_token("0", "ICONST", 0) self._assert_individual_token("00042", "ICONST", 42)
bbdb59b9f94e4c0fa887e4ddb4cf3df3413f27fe
test/test_controller.py
test/test_controller.py
import mock from libmproxy import controller class TestMaster: def test_default_handler(self): m = controller.Master(None) msg = mock.MagicMock() m.handle(msg) assert msg.reply.call_count == 1
Test controller message default reply.
Test controller message default reply.
Python
mit
mhils/mitmproxy,mhils/mitmproxy,bazzinotti/mitmproxy,xbzbing/mitmproxy,scriptmediala/mitmproxy,gzzhanghao/mitmproxy,sethp-jive/mitmproxy,jvillacorta/mitmproxy,xaxa89/mitmproxy,devasia1000/mitmproxy,mosajjal/mitmproxy,sethp-jive/mitmproxy,ikoz/mitmproxy,Kriechi/mitmproxy,xtso520ok/mitmproxy,jvillacorta/mitmproxy,meizhoubao/mitmproxy,fimad/mitmproxy,MatthewShao/mitmproxy,rauburtin/mitmproxy,0xwindows/InfoLeak,vhaupert/mitmproxy,tdickers/mitmproxy,liorvh/mitmproxy,bazzinotti/mitmproxy,macmantrl/mitmproxy,mosajjal/mitmproxy,dweinstein/mitmproxy,dxq-git/mitmproxy,dufferzafar/mitmproxy,claimsmall/mitmproxy,cortesi/mitmproxy,laurmurclar/mitmproxy,StevenVanAcker/mitmproxy,ccccccccccc/mitmproxy,tekii/mitmproxy,syjzwjj/mitmproxy,ujjwal96/mitmproxy,MatthewShao/mitmproxy,ryoqun/mitmproxy,bazzinotti/mitmproxy,tekii/mitmproxy,elitest/mitmproxy,vhaupert/mitmproxy,meizhoubao/mitmproxy,devasia1000/mitmproxy,mitmproxy/mitmproxy,azureplus/mitmproxy,ADemonisis/mitmproxy,syjzwjj/mitmproxy,dufferzafar/mitmproxy,mhils/mitmproxy,elitest/mitmproxy,azureplus/mitmproxy,bltb/mitmproxy,ZeYt/mitmproxy,owers19856/mitmproxy,MatthewShao/mitmproxy,bltb/mitmproxy,tekii/mitmproxy,rauburtin/mitmproxy,Endika/mitmproxy,jvillacorta/mitmproxy,guiquanz/mitmproxy,onlywade/mitmproxy,vhaupert/mitmproxy,pombredanne/mitmproxy,StevenVanAcker/mitmproxy,inscriptionweb/mitmproxy,jpic/mitmproxy,xtso520ok/mitmproxy,Endika/mitmproxy,owers19856/mitmproxy,liorvh/mitmproxy,sethp-jive/mitmproxy,onlywade/mitmproxy,zbuc/mitmproxy,ikoz/mitmproxy,claimsmall/mitmproxy,Kriechi/mitmproxy,jvillacorta/mitmproxy,tdickers/mitmproxy,ikoz/mitmproxy,zlorb/mitmproxy,fimad/mitmproxy,cortesi/mitmproxy,Endika/mitmproxy,noikiy/mitmproxy,noikiy/mitmproxy,ADemonisis/mitmproxy,MatthewShao/mitmproxy,bazzinotti/mitmproxy,StevenVanAcker/mitmproxy,guiquanz/mitmproxy,mitmproxy/mitmproxy,tfeagle/mitmproxy,owers19856/mitmproxy,noikiy/mitmproxy,mitmproxy/mitmproxy,azureplus/mitmproxy,0x0mar/mitmproxy,owers19856/mitmproxy,tekii/mitmproxy,dweinstein/mitmproxy,zbuc/mitmproxy,cortesi/mitmproxy,devasia1000/anti_adblock,ZeYt/mitmproxy,legendtang/mitmproxy,mitmproxy/mitmproxy,dwfreed/mitmproxy,macmantrl/mitmproxy,claimsmall/mitmproxy,legendtang/mitmproxy,zlorb/mitmproxy,cortesi/mitmproxy,jpic/mitmproxy,pombredanne/mitmproxy,liorvh/mitmproxy,xbzbing/mitmproxy,xaxa89/mitmproxy,ccccccccccc/mitmproxy,gzzhanghao/mitmproxy,ddworken/mitmproxy,StevenVanAcker/mitmproxy,ddworken/mitmproxy,xaxa89/mitmproxy,meizhoubao/mitmproxy,jpic/mitmproxy,xtso520ok/mitmproxy,Fuzion24/mitmproxy,ParthGanatra/mitmproxy,Fuzion24/mitmproxy,zlorb/mitmproxy,xbzbing/mitmproxy,elitest/mitmproxy,xbzbing/mitmproxy,tfeagle/mitmproxy,gzzhanghao/mitmproxy,ddworken/mitmproxy,inscriptionweb/mitmproxy,mhils/mitmproxy,0xwindows/InfoLeak,byt3bl33d3r/mitmproxy,dufferzafar/mitmproxy,inscriptionweb/mitmproxy,pombredanne/mitmproxy,ccccccccccc/mitmproxy,0xwindows/InfoLeak,bltb/mitmproxy,ParthGanatra/mitmproxy,ParthGanatra/mitmproxy,onlywade/mitmproxy,byt3bl33d3r/mitmproxy,byt3bl33d3r/mitmproxy,ZeYt/mitmproxy,dufferzafar/mitmproxy,Endika/mitmproxy,mosajjal/mitmproxy,dweinstein/mitmproxy,ddworken/mitmproxy,noikiy/mitmproxy,sethp-jive/mitmproxy,pombredanne/mitmproxy,byt3bl33d3r/mitmproxy,macmantrl/mitmproxy,ikoz/mitmproxy,syjzwjj/mitmproxy,0x0mar/mitmproxy,mitmproxy/mitmproxy,gzzhanghao/mitmproxy,tfeagle/mitmproxy,fimad/mitmproxy,ujjwal96/mitmproxy,xaxa89/mitmproxy,legendtang/mitmproxy,scriptmediala/mitmproxy,ParthGanatra/mitmproxy,onlywade/mitmproxy,inscriptionweb/mitmproxy,dxq-git/mitmproxy,devasia1000/anti_adblock,ADemonisis/mitmproxy,elitest/mitmproxy,azureplus/mitmproxy,ZeYt/mitmproxy,rauburtin/mitmproxy,claimsmall/mitmproxy,dxq-git/mitmproxy,syjzwjj/mitmproxy,zbuc/mitmproxy,tfeagle/mitmproxy,guiquanz/mitmproxy,macmantrl/mitmproxy,dwfreed/mitmproxy,Fuzion24/mitmproxy,scriptmediala/mitmproxy,fimad/mitmproxy,0x0mar/mitmproxy,guiquanz/mitmproxy,scriptmediala/mitmproxy,Kriechi/mitmproxy,liorvh/mitmproxy,tdickers/mitmproxy,legendtang/mitmproxy,ujjwal96/mitmproxy,dweinstein/mitmproxy,dxq-git/mitmproxy,ADemonisis/mitmproxy,ccccccccccc/mitmproxy,Fuzion24/mitmproxy,jpic/mitmproxy,laurmurclar/mitmproxy,dwfreed/mitmproxy,laurmurclar/mitmproxy,tdickers/mitmproxy,laurmurclar/mitmproxy,devasia1000/anti_adblock,ujjwal96/mitmproxy,0xwindows/InfoLeak,rauburtin/mitmproxy,zlorb/mitmproxy,ryoqun/mitmproxy,dwfreed/mitmproxy,devasia1000/mitmproxy,ryoqun/mitmproxy,devasia1000/mitmproxy,mosajjal/mitmproxy,ryoqun/mitmproxy,mhils/mitmproxy,Kriechi/mitmproxy,zbuc/mitmproxy,bltb/mitmproxy,meizhoubao/mitmproxy,vhaupert/mitmproxy
Test controller message default reply.
import mock from libmproxy import controller class TestMaster: def test_default_handler(self): m = controller.Master(None) msg = mock.MagicMock() m.handle(msg) assert msg.reply.call_count == 1
<commit_before><commit_msg>Test controller message default reply.<commit_after>
import mock from libmproxy import controller class TestMaster: def test_default_handler(self): m = controller.Master(None) msg = mock.MagicMock() m.handle(msg) assert msg.reply.call_count == 1
Test controller message default reply.import mock from libmproxy import controller class TestMaster: def test_default_handler(self): m = controller.Master(None) msg = mock.MagicMock() m.handle(msg) assert msg.reply.call_count == 1
<commit_before><commit_msg>Test controller message default reply.<commit_after>import mock from libmproxy import controller class TestMaster: def test_default_handler(self): m = controller.Master(None) msg = mock.MagicMock() m.handle(msg) assert msg.reply.call_count == 1
b3470f055f666c928839489e93945fc932abe19e
tests/test_compounds.py
tests/test_compounds.py
import unittest import inflect class test(unittest.TestCase): def test_compound_1(self): self.assertEqual(p.singular_noun('hello-out-there'),'hello-out-there') def test_compound_2(self): self.assertEqual(p.singular_noun('hello out there'),'hello out there') def test_compound_3(self): self.assertEqual(p.singular_noun('continue-to-operate'),'continue-to-operate') def test_compound_4(self): self.assertEqual(p.singular_noun('case of diapers'),'case of diapers') if __name__ == '__main__': p=inflect.engine() unittest.main()
Test Case for testing COMPOUNDS for singular_noun
Test Case for testing COMPOUNDS for singular_noun
Python
mit
jazzband/inflect,hugovk/inflect.py,pwdyson/inflect.py
Test Case for testing COMPOUNDS for singular_noun
import unittest import inflect class test(unittest.TestCase): def test_compound_1(self): self.assertEqual(p.singular_noun('hello-out-there'),'hello-out-there') def test_compound_2(self): self.assertEqual(p.singular_noun('hello out there'),'hello out there') def test_compound_3(self): self.assertEqual(p.singular_noun('continue-to-operate'),'continue-to-operate') def test_compound_4(self): self.assertEqual(p.singular_noun('case of diapers'),'case of diapers') if __name__ == '__main__': p=inflect.engine() unittest.main()
<commit_before><commit_msg>Test Case for testing COMPOUNDS for singular_noun<commit_after>
import unittest import inflect class test(unittest.TestCase): def test_compound_1(self): self.assertEqual(p.singular_noun('hello-out-there'),'hello-out-there') def test_compound_2(self): self.assertEqual(p.singular_noun('hello out there'),'hello out there') def test_compound_3(self): self.assertEqual(p.singular_noun('continue-to-operate'),'continue-to-operate') def test_compound_4(self): self.assertEqual(p.singular_noun('case of diapers'),'case of diapers') if __name__ == '__main__': p=inflect.engine() unittest.main()
Test Case for testing COMPOUNDS for singular_nounimport unittest import inflect class test(unittest.TestCase): def test_compound_1(self): self.assertEqual(p.singular_noun('hello-out-there'),'hello-out-there') def test_compound_2(self): self.assertEqual(p.singular_noun('hello out there'),'hello out there') def test_compound_3(self): self.assertEqual(p.singular_noun('continue-to-operate'),'continue-to-operate') def test_compound_4(self): self.assertEqual(p.singular_noun('case of diapers'),'case of diapers') if __name__ == '__main__': p=inflect.engine() unittest.main()
<commit_before><commit_msg>Test Case for testing COMPOUNDS for singular_noun<commit_after>import unittest import inflect class test(unittest.TestCase): def test_compound_1(self): self.assertEqual(p.singular_noun('hello-out-there'),'hello-out-there') def test_compound_2(self): self.assertEqual(p.singular_noun('hello out there'),'hello out there') def test_compound_3(self): self.assertEqual(p.singular_noun('continue-to-operate'),'continue-to-operate') def test_compound_4(self): self.assertEqual(p.singular_noun('case of diapers'),'case of diapers') if __name__ == '__main__': p=inflect.engine() unittest.main()
0641c1fba12ac7e59d28654ed4e9bf16816101c5
opps/__init__.py
opps/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- VERSION = (0, 0, 1) __version__ = ".".join(map(str, VERSION)) __status__ = "Development" __description__ = u"Opps CMS websites magazines and high-traffic" __author__ = u"Thiago Avelino" __credits__ = [] __email__ = u"opps-developers@googlegroups.com" __license__ = u"BSD" __copyright__ = u"Copyright 2013, Opps Project"
Add init var opps package
Add init var opps package
Python
mit
williamroot/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,opps/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,opps/opps,williamroot/opps,opps/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,opps/opps
Add init var opps package
#!/usr/bin/env python # -*- coding: utf-8 -*- VERSION = (0, 0, 1) __version__ = ".".join(map(str, VERSION)) __status__ = "Development" __description__ = u"Opps CMS websites magazines and high-traffic" __author__ = u"Thiago Avelino" __credits__ = [] __email__ = u"opps-developers@googlegroups.com" __license__ = u"BSD" __copyright__ = u"Copyright 2013, Opps Project"
<commit_before><commit_msg>Add init var opps package<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- VERSION = (0, 0, 1) __version__ = ".".join(map(str, VERSION)) __status__ = "Development" __description__ = u"Opps CMS websites magazines and high-traffic" __author__ = u"Thiago Avelino" __credits__ = [] __email__ = u"opps-developers@googlegroups.com" __license__ = u"BSD" __copyright__ = u"Copyright 2013, Opps Project"
Add init var opps package#!/usr/bin/env python # -*- coding: utf-8 -*- VERSION = (0, 0, 1) __version__ = ".".join(map(str, VERSION)) __status__ = "Development" __description__ = u"Opps CMS websites magazines and high-traffic" __author__ = u"Thiago Avelino" __credits__ = [] __email__ = u"opps-developers@googlegroups.com" __license__ = u"BSD" __copyright__ = u"Copyright 2013, Opps Project"
<commit_before><commit_msg>Add init var opps package<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- VERSION = (0, 0, 1) __version__ = ".".join(map(str, VERSION)) __status__ = "Development" __description__ = u"Opps CMS websites magazines and high-traffic" __author__ = u"Thiago Avelino" __credits__ = [] __email__ = u"opps-developers@googlegroups.com" __license__ = u"BSD" __copyright__ = u"Copyright 2013, Opps Project"
afb541d39ae13526372c7480dcf775ce0480086c
oslo/__init__.py
oslo/__init__.py
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. __import__('pkg_resources').declare_namespace(__name__)
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. __import__('pkg_resources').declare_namespace(__name__)
Remove extraneous vim editor configuration comments
Remove extraneous vim editor configuration comments Change-Id: I2fb6d6174cf8b73ee663efa6718f4358be673869 Partial-Bug: #1229324
Python
apache-2.0
JioCloud/oslo.middleware,openstack/oslo.middleware,varunarya10/oslo.middleware
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. __import__('pkg_resources').declare_namespace(__name__) Remove extraneous vim editor configuration comments Change-Id: I2fb6d6174cf8b73ee663efa6718f4358be673869 Partial-Bug: #1229324
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. __import__('pkg_resources').declare_namespace(__name__)
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. __import__('pkg_resources').declare_namespace(__name__) <commit_msg>Remove extraneous vim editor configuration comments Change-Id: I2fb6d6174cf8b73ee663efa6718f4358be673869 Partial-Bug: #1229324<commit_after>
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. __import__('pkg_resources').declare_namespace(__name__)
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. __import__('pkg_resources').declare_namespace(__name__) Remove extraneous vim editor configuration comments Change-Id: I2fb6d6174cf8b73ee663efa6718f4358be673869 Partial-Bug: #1229324# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. __import__('pkg_resources').declare_namespace(__name__)
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. __import__('pkg_resources').declare_namespace(__name__) <commit_msg>Remove extraneous vim editor configuration comments Change-Id: I2fb6d6174cf8b73ee663efa6718f4358be673869 Partial-Bug: #1229324<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. __import__('pkg_resources').declare_namespace(__name__)
43eff2f524ed372134e6f50effb69c05a1df0b58
tools/crosstalk-test.py
tools/crosstalk-test.py
#!/usr/bin/env python # Open Pixel Control client: Test crosstalk between LED strips; # send each strip a different pattern, use a lot of low-brightness # pixels so that glitches show up clearly. import opc, time client = opc.Client('localhost:7890') while True: for strip in range(8): pixels = [ (40,40,40) ] * 512 for i in range(32): pixels[strip * 64 + i * 2] = (100,100,100) client.put_pixels(pixels) time.sleep(0.5)
Add a tool for provoking crosstalk glitches between channels
Add a tool for provoking crosstalk glitches between channels
Python
mit
jsestrich/fadecandy,pixelmatix/fadecandy,Protoneer/fadecandy,piers7/fadecandy,Jorgen-VikingGod/fadecandy,lincomatic/fadecandy,Protoneer/fadecandy,fragmede/fadecandy,nomis52/fadecandy,pixelmatix/fadecandy,scanlime/fadecandy,lincomatic/fadecandy,Jorgen-VikingGod/fadecandy,hakan42/fadecandy,pixelmatix/fadecandy,piers7/fadecandy,fragmede/fadecandy,scanlime/fadecandy,lincomatic/fadecandy,scanlime/fadecandy,poe/fadecandy,poe/fadecandy,piers7/fadecandy,scanlime/fadecandy,poe/fadecandy,PimentNoir/fadecandy,PimentNoir/fadecandy,scanlime/fadecandy,adam-back/fadecandy,fragmede/fadecandy,poe/fadecandy,hakan42/fadecandy,hakan42/fadecandy,PimentNoir/fadecandy,hakan42/fadecandy,adam-back/fadecandy,scanlime/fadecandy,fragmede/fadecandy,pixelmatix/fadecandy,nomis52/fadecandy,poe/fadecandy,adam-back/fadecandy,lincomatic/fadecandy,nomis52/fadecandy,jsestrich/fadecandy,Jorgen-VikingGod/fadecandy,Protoneer/fadecandy,hakan42/fadecandy,adam-back/fadecandy,fragmede/fadecandy,PimentNoir/fadecandy,pixelmatix/fadecandy,piers7/fadecandy,jsestrich/fadecandy,nomis52/fadecandy,piers7/fadecandy,nomis52/fadecandy,PimentNoir/fadecandy,fragmede/fadecandy,fragmede/fadecandy,Protoneer/fadecandy,Protoneer/fadecandy,lincomatic/fadecandy,nomis52/fadecandy,jsestrich/fadecandy,nomis52/fadecandy,lincomatic/fadecandy,poe/fadecandy,lincomatic/fadecandy,poe/fadecandy,PimentNoir/fadecandy,fragmede/fadecandy,jsestrich/fadecandy,Jorgen-VikingGod/fadecandy,adam-back/fadecandy,nomis52/fadecandy,Jorgen-VikingGod/fadecandy,lincomatic/fadecandy,poe/fadecandy
Add a tool for provoking crosstalk glitches between channels
#!/usr/bin/env python # Open Pixel Control client: Test crosstalk between LED strips; # send each strip a different pattern, use a lot of low-brightness # pixels so that glitches show up clearly. import opc, time client = opc.Client('localhost:7890') while True: for strip in range(8): pixels = [ (40,40,40) ] * 512 for i in range(32): pixels[strip * 64 + i * 2] = (100,100,100) client.put_pixels(pixels) time.sleep(0.5)
<commit_before><commit_msg>Add a tool for provoking crosstalk glitches between channels<commit_after>
#!/usr/bin/env python # Open Pixel Control client: Test crosstalk between LED strips; # send each strip a different pattern, use a lot of low-brightness # pixels so that glitches show up clearly. import opc, time client = opc.Client('localhost:7890') while True: for strip in range(8): pixels = [ (40,40,40) ] * 512 for i in range(32): pixels[strip * 64 + i * 2] = (100,100,100) client.put_pixels(pixels) time.sleep(0.5)
Add a tool for provoking crosstalk glitches between channels#!/usr/bin/env python # Open Pixel Control client: Test crosstalk between LED strips; # send each strip a different pattern, use a lot of low-brightness # pixels so that glitches show up clearly. import opc, time client = opc.Client('localhost:7890') while True: for strip in range(8): pixels = [ (40,40,40) ] * 512 for i in range(32): pixels[strip * 64 + i * 2] = (100,100,100) client.put_pixels(pixels) time.sleep(0.5)
<commit_before><commit_msg>Add a tool for provoking crosstalk glitches between channels<commit_after>#!/usr/bin/env python # Open Pixel Control client: Test crosstalk between LED strips; # send each strip a different pattern, use a lot of low-brightness # pixels so that glitches show up clearly. import opc, time client = opc.Client('localhost:7890') while True: for strip in range(8): pixels = [ (40,40,40) ] * 512 for i in range(32): pixels[strip * 64 + i * 2] = (100,100,100) client.put_pixels(pixels) time.sleep(0.5)
4f26177ef1bd75478be911462cfa28d23ffa6b7d
dbaas/workflow/tests/test_host_provider/test_provider.py
dbaas/workflow/tests/test_host_provider/test_provider.py
from mock import patch, MagicMock from workflow.steps.util.host_provider import Provider from physical.tests import factory as physical_factory from workflow.tests.test_host_provider import BaseCreateVirtualMachineTestCase from dbaas_credentials.tests import factory as credential_factory from dbaas_credentials.models import CredentialType from requests.models import Response class StartTestCase(BaseCreateVirtualMachineTestCase): def setUp(self): super(StartTestCase, self).setUp() self.env = physical_factory.EnvironmentFactory.create( name='fake_env' ) self.provider = Provider(self.instance, self.env) self.credential = credential_factory.CredentialFactory.create( integration_type__name='HOST_PROVIDER', integration_type__type=CredentialType.HOST_PROVIDER, endpoint='fake_endpoint', user='fake_user', password='fake_password', project='fake_project' ) self.credential.environments.add(self.env) self.host = physical_factory.HostFactory.create( identifier='fake_identifier1' ) self.instance.hostname = self.host self.instance.save() @patch('workflow.steps.util.host_provider.post') def test_params(self, post_mock): self.provider.start() self.assertTrue(post_mock.called) post_params = post_mock.call_args self.assertEqual( post_params[0][0], 'fake_endpoint/fake_project/fake_env/host/start' ) expected_json = { 'host_id': 'fake_identifier1' } self.assertDictEqual(post_params[1]['json'], expected_json) self.assertEqual(post_params[1]['auth'], ('fake_user', 'fake_password')) @patch('workflow.steps.util.host_provider.post') def test_200(self, post_mock): post_mock.status_code = 200 resp = self.provider.start() self.assertTrue(resp) @patch('workflow.steps.util.host_provider.post') def test_404(self, post_mock): fake_response = MagicMock(spec=Response) fake_response.status_code = 404 post_mock.return_value = fake_response resp = self.provider.start() self.assertFalse(resp) @patch('workflow.steps.util.host_provider.post') def test_500(self, post_mock): post_mock.status_code = 500 resp = self.provider.start() self.assertFalse(resp)
Add test for provider(not finished)
Add test for provider(not finished)
Python
bsd-3-clause
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
Add test for provider(not finished)
from mock import patch, MagicMock from workflow.steps.util.host_provider import Provider from physical.tests import factory as physical_factory from workflow.tests.test_host_provider import BaseCreateVirtualMachineTestCase from dbaas_credentials.tests import factory as credential_factory from dbaas_credentials.models import CredentialType from requests.models import Response class StartTestCase(BaseCreateVirtualMachineTestCase): def setUp(self): super(StartTestCase, self).setUp() self.env = physical_factory.EnvironmentFactory.create( name='fake_env' ) self.provider = Provider(self.instance, self.env) self.credential = credential_factory.CredentialFactory.create( integration_type__name='HOST_PROVIDER', integration_type__type=CredentialType.HOST_PROVIDER, endpoint='fake_endpoint', user='fake_user', password='fake_password', project='fake_project' ) self.credential.environments.add(self.env) self.host = physical_factory.HostFactory.create( identifier='fake_identifier1' ) self.instance.hostname = self.host self.instance.save() @patch('workflow.steps.util.host_provider.post') def test_params(self, post_mock): self.provider.start() self.assertTrue(post_mock.called) post_params = post_mock.call_args self.assertEqual( post_params[0][0], 'fake_endpoint/fake_project/fake_env/host/start' ) expected_json = { 'host_id': 'fake_identifier1' } self.assertDictEqual(post_params[1]['json'], expected_json) self.assertEqual(post_params[1]['auth'], ('fake_user', 'fake_password')) @patch('workflow.steps.util.host_provider.post') def test_200(self, post_mock): post_mock.status_code = 200 resp = self.provider.start() self.assertTrue(resp) @patch('workflow.steps.util.host_provider.post') def test_404(self, post_mock): fake_response = MagicMock(spec=Response) fake_response.status_code = 404 post_mock.return_value = fake_response resp = self.provider.start() self.assertFalse(resp) @patch('workflow.steps.util.host_provider.post') def test_500(self, post_mock): post_mock.status_code = 500 resp = self.provider.start() self.assertFalse(resp)
<commit_before><commit_msg>Add test for provider(not finished)<commit_after>
from mock import patch, MagicMock from workflow.steps.util.host_provider import Provider from physical.tests import factory as physical_factory from workflow.tests.test_host_provider import BaseCreateVirtualMachineTestCase from dbaas_credentials.tests import factory as credential_factory from dbaas_credentials.models import CredentialType from requests.models import Response class StartTestCase(BaseCreateVirtualMachineTestCase): def setUp(self): super(StartTestCase, self).setUp() self.env = physical_factory.EnvironmentFactory.create( name='fake_env' ) self.provider = Provider(self.instance, self.env) self.credential = credential_factory.CredentialFactory.create( integration_type__name='HOST_PROVIDER', integration_type__type=CredentialType.HOST_PROVIDER, endpoint='fake_endpoint', user='fake_user', password='fake_password', project='fake_project' ) self.credential.environments.add(self.env) self.host = physical_factory.HostFactory.create( identifier='fake_identifier1' ) self.instance.hostname = self.host self.instance.save() @patch('workflow.steps.util.host_provider.post') def test_params(self, post_mock): self.provider.start() self.assertTrue(post_mock.called) post_params = post_mock.call_args self.assertEqual( post_params[0][0], 'fake_endpoint/fake_project/fake_env/host/start' ) expected_json = { 'host_id': 'fake_identifier1' } self.assertDictEqual(post_params[1]['json'], expected_json) self.assertEqual(post_params[1]['auth'], ('fake_user', 'fake_password')) @patch('workflow.steps.util.host_provider.post') def test_200(self, post_mock): post_mock.status_code = 200 resp = self.provider.start() self.assertTrue(resp) @patch('workflow.steps.util.host_provider.post') def test_404(self, post_mock): fake_response = MagicMock(spec=Response) fake_response.status_code = 404 post_mock.return_value = fake_response resp = self.provider.start() self.assertFalse(resp) @patch('workflow.steps.util.host_provider.post') def test_500(self, post_mock): post_mock.status_code = 500 resp = self.provider.start() self.assertFalse(resp)
Add test for provider(not finished)from mock import patch, MagicMock from workflow.steps.util.host_provider import Provider from physical.tests import factory as physical_factory from workflow.tests.test_host_provider import BaseCreateVirtualMachineTestCase from dbaas_credentials.tests import factory as credential_factory from dbaas_credentials.models import CredentialType from requests.models import Response class StartTestCase(BaseCreateVirtualMachineTestCase): def setUp(self): super(StartTestCase, self).setUp() self.env = physical_factory.EnvironmentFactory.create( name='fake_env' ) self.provider = Provider(self.instance, self.env) self.credential = credential_factory.CredentialFactory.create( integration_type__name='HOST_PROVIDER', integration_type__type=CredentialType.HOST_PROVIDER, endpoint='fake_endpoint', user='fake_user', password='fake_password', project='fake_project' ) self.credential.environments.add(self.env) self.host = physical_factory.HostFactory.create( identifier='fake_identifier1' ) self.instance.hostname = self.host self.instance.save() @patch('workflow.steps.util.host_provider.post') def test_params(self, post_mock): self.provider.start() self.assertTrue(post_mock.called) post_params = post_mock.call_args self.assertEqual( post_params[0][0], 'fake_endpoint/fake_project/fake_env/host/start' ) expected_json = { 'host_id': 'fake_identifier1' } self.assertDictEqual(post_params[1]['json'], expected_json) self.assertEqual(post_params[1]['auth'], ('fake_user', 'fake_password')) @patch('workflow.steps.util.host_provider.post') def test_200(self, post_mock): post_mock.status_code = 200 resp = self.provider.start() self.assertTrue(resp) @patch('workflow.steps.util.host_provider.post') def test_404(self, post_mock): fake_response = MagicMock(spec=Response) fake_response.status_code = 404 post_mock.return_value = fake_response resp = self.provider.start() self.assertFalse(resp) @patch('workflow.steps.util.host_provider.post') def test_500(self, post_mock): post_mock.status_code = 500 resp = self.provider.start() self.assertFalse(resp)
<commit_before><commit_msg>Add test for provider(not finished)<commit_after>from mock import patch, MagicMock from workflow.steps.util.host_provider import Provider from physical.tests import factory as physical_factory from workflow.tests.test_host_provider import BaseCreateVirtualMachineTestCase from dbaas_credentials.tests import factory as credential_factory from dbaas_credentials.models import CredentialType from requests.models import Response class StartTestCase(BaseCreateVirtualMachineTestCase): def setUp(self): super(StartTestCase, self).setUp() self.env = physical_factory.EnvironmentFactory.create( name='fake_env' ) self.provider = Provider(self.instance, self.env) self.credential = credential_factory.CredentialFactory.create( integration_type__name='HOST_PROVIDER', integration_type__type=CredentialType.HOST_PROVIDER, endpoint='fake_endpoint', user='fake_user', password='fake_password', project='fake_project' ) self.credential.environments.add(self.env) self.host = physical_factory.HostFactory.create( identifier='fake_identifier1' ) self.instance.hostname = self.host self.instance.save() @patch('workflow.steps.util.host_provider.post') def test_params(self, post_mock): self.provider.start() self.assertTrue(post_mock.called) post_params = post_mock.call_args self.assertEqual( post_params[0][0], 'fake_endpoint/fake_project/fake_env/host/start' ) expected_json = { 'host_id': 'fake_identifier1' } self.assertDictEqual(post_params[1]['json'], expected_json) self.assertEqual(post_params[1]['auth'], ('fake_user', 'fake_password')) @patch('workflow.steps.util.host_provider.post') def test_200(self, post_mock): post_mock.status_code = 200 resp = self.provider.start() self.assertTrue(resp) @patch('workflow.steps.util.host_provider.post') def test_404(self, post_mock): fake_response = MagicMock(spec=Response) fake_response.status_code = 404 post_mock.return_value = fake_response resp = self.provider.start() self.assertFalse(resp) @patch('workflow.steps.util.host_provider.post') def test_500(self, post_mock): post_mock.status_code = 500 resp = self.provider.start() self.assertFalse(resp)
769c1d808efa7940b3890f37b3f422b2194ab269
ehrcorral/herd.py
ehrcorral/herd.py
"""Base class for a herd - a group of records""" class Herd(): """You need: - validate names (check for commas, weird chars, convert to unicode/ascii?) * remove Mrs, PhD, Ms., etc. * check for commas, weird chars * convert to unicode/ascii? - validate that you only have certain field names in the incoming dict - parse names into first, last, prefix, suffix """ pass
Add skeleton file for Herd class
Add skeleton file for Herd class
Python
isc
nsh87/ehrcorral
Add skeleton file for Herd class
"""Base class for a herd - a group of records""" class Herd(): """You need: - validate names (check for commas, weird chars, convert to unicode/ascii?) * remove Mrs, PhD, Ms., etc. * check for commas, weird chars * convert to unicode/ascii? - validate that you only have certain field names in the incoming dict - parse names into first, last, prefix, suffix """ pass
<commit_before><commit_msg>Add skeleton file for Herd class<commit_after>
"""Base class for a herd - a group of records""" class Herd(): """You need: - validate names (check for commas, weird chars, convert to unicode/ascii?) * remove Mrs, PhD, Ms., etc. * check for commas, weird chars * convert to unicode/ascii? - validate that you only have certain field names in the incoming dict - parse names into first, last, prefix, suffix """ pass
Add skeleton file for Herd class"""Base class for a herd - a group of records""" class Herd(): """You need: - validate names (check for commas, weird chars, convert to unicode/ascii?) * remove Mrs, PhD, Ms., etc. * check for commas, weird chars * convert to unicode/ascii? - validate that you only have certain field names in the incoming dict - parse names into first, last, prefix, suffix """ pass
<commit_before><commit_msg>Add skeleton file for Herd class<commit_after>"""Base class for a herd - a group of records""" class Herd(): """You need: - validate names (check for commas, weird chars, convert to unicode/ascii?) * remove Mrs, PhD, Ms., etc. * check for commas, weird chars * convert to unicode/ascii? - validate that you only have certain field names in the incoming dict - parse names into first, last, prefix, suffix """ pass
ecf16ee24b076d93475d212ca1fd7efdca6f2c19
openstack/tests/functional/telemetry/v2/test_meter.py
openstack/tests/functional/telemetry/v2/test_meter.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack.tests.functional import base class TestMeter(base.BaseFunctionalTest): def test_list(self): names = set([o.name for o in self.conn.telemetry.meters()]) self.assertIn('storage.objects', names)
Add functional tests for telemetry meter
Add functional tests for telemetry meter Change-Id: I409e92761a400e12558338e7cba9edacf0b57f13
Python
apache-2.0
mtougeron/python-openstacksdk,mtougeron/python-openstacksdk,briancurtin/python-openstacksdk,dudymas/python-openstacksdk,dudymas/python-openstacksdk,dtroyer/python-openstacksdk,dtroyer/python-openstacksdk,stackforge/python-openstacksdk,openstack/python-openstacksdk,briancurtin/python-openstacksdk,stackforge/python-openstacksdk,openstack/python-openstacksdk
Add functional tests for telemetry meter Change-Id: I409e92761a400e12558338e7cba9edacf0b57f13
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack.tests.functional import base class TestMeter(base.BaseFunctionalTest): def test_list(self): names = set([o.name for o in self.conn.telemetry.meters()]) self.assertIn('storage.objects', names)
<commit_before><commit_msg>Add functional tests for telemetry meter Change-Id: I409e92761a400e12558338e7cba9edacf0b57f13<commit_after>
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack.tests.functional import base class TestMeter(base.BaseFunctionalTest): def test_list(self): names = set([o.name for o in self.conn.telemetry.meters()]) self.assertIn('storage.objects', names)
Add functional tests for telemetry meter Change-Id: I409e92761a400e12558338e7cba9edacf0b57f13# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack.tests.functional import base class TestMeter(base.BaseFunctionalTest): def test_list(self): names = set([o.name for o in self.conn.telemetry.meters()]) self.assertIn('storage.objects', names)
<commit_before><commit_msg>Add functional tests for telemetry meter Change-Id: I409e92761a400e12558338e7cba9edacf0b57f13<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack.tests.functional import base class TestMeter(base.BaseFunctionalTest): def test_list(self): names = set([o.name for o in self.conn.telemetry.meters()]) self.assertIn('storage.objects', names)
5f6616ba17a44552e31bdda4028336a3f23bb44d
scripts/analyze-project-deps.py
scripts/analyze-project-deps.py
import os import re from use_lldb_suite import lldb_root src_dir = os.path.join(lldb_root, "source") inc_dir = os.path.join(lldb_root, "include") src_map = {} include_regex = re.compile('#include \"(lldb(.*/)+).*\"') def scan_deps(this_dir, file): includes = set() with open(file) as f: for line in list(f): m = include_regex.match(line) if m is not None: relative = m.groups()[0].rstrip("/") if relative != this_dir: includes.add(relative) return includes def insert_or_add_mapping(base, deps): global src_map if len(deps) > 0: if base in src_map: existing_deps = src_map[base] existing_deps.update(deps) else: src_map[base] = deps for (base, dirs, files) in os.walk(inc_dir): dir = os.path.basename(base) relative = os.path.relpath(base, inc_dir) inc_files = filter(lambda x : os.path.splitext(x)[1] in [".h"], files) deps = set() for inc in inc_files: inc_path = os.path.join(base, inc) deps.update(scan_deps(relative, inc_path)) insert_or_add_mapping(relative, deps) for (base, dirs, files) in os.walk(src_dir): dir = os.path.basename(base) relative = os.path.relpath(base, src_dir) src_files = filter(lambda x : os.path.splitext(x)[1] in [".cpp", ".h", ".mm"], files) deps = set() norm_base_path = os.path.normpath(os.path.join("lldb", relative)) norm_base_path = norm_base_path.replace("\\", "/") for src in src_files: src_path = os.path.join(base, src) deps.update(scan_deps(norm_base_path, src_path)) insert_or_add_mapping(norm_base_path, deps) pass items = list(src_map.iteritems()) items.sort(lambda A, B : cmp(A[0], B[0])) for (path, deps) in items: print path + ":" sorted_deps = list(deps) sorted_deps.sort() for dep in sorted_deps: print "\t" + dep pass
Add a script to dump out all project inter-dependencies.
Add a script to dump out all project inter-dependencies. git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@296920 91177308-0d34-0410-b5e6-96231b3b80d8
Python
apache-2.0
apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb
Add a script to dump out all project inter-dependencies. git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@296920 91177308-0d34-0410-b5e6-96231b3b80d8
import os import re from use_lldb_suite import lldb_root src_dir = os.path.join(lldb_root, "source") inc_dir = os.path.join(lldb_root, "include") src_map = {} include_regex = re.compile('#include \"(lldb(.*/)+).*\"') def scan_deps(this_dir, file): includes = set() with open(file) as f: for line in list(f): m = include_regex.match(line) if m is not None: relative = m.groups()[0].rstrip("/") if relative != this_dir: includes.add(relative) return includes def insert_or_add_mapping(base, deps): global src_map if len(deps) > 0: if base in src_map: existing_deps = src_map[base] existing_deps.update(deps) else: src_map[base] = deps for (base, dirs, files) in os.walk(inc_dir): dir = os.path.basename(base) relative = os.path.relpath(base, inc_dir) inc_files = filter(lambda x : os.path.splitext(x)[1] in [".h"], files) deps = set() for inc in inc_files: inc_path = os.path.join(base, inc) deps.update(scan_deps(relative, inc_path)) insert_or_add_mapping(relative, deps) for (base, dirs, files) in os.walk(src_dir): dir = os.path.basename(base) relative = os.path.relpath(base, src_dir) src_files = filter(lambda x : os.path.splitext(x)[1] in [".cpp", ".h", ".mm"], files) deps = set() norm_base_path = os.path.normpath(os.path.join("lldb", relative)) norm_base_path = norm_base_path.replace("\\", "/") for src in src_files: src_path = os.path.join(base, src) deps.update(scan_deps(norm_base_path, src_path)) insert_or_add_mapping(norm_base_path, deps) pass items = list(src_map.iteritems()) items.sort(lambda A, B : cmp(A[0], B[0])) for (path, deps) in items: print path + ":" sorted_deps = list(deps) sorted_deps.sort() for dep in sorted_deps: print "\t" + dep pass
<commit_before><commit_msg>Add a script to dump out all project inter-dependencies. git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@296920 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
import os import re from use_lldb_suite import lldb_root src_dir = os.path.join(lldb_root, "source") inc_dir = os.path.join(lldb_root, "include") src_map = {} include_regex = re.compile('#include \"(lldb(.*/)+).*\"') def scan_deps(this_dir, file): includes = set() with open(file) as f: for line in list(f): m = include_regex.match(line) if m is not None: relative = m.groups()[0].rstrip("/") if relative != this_dir: includes.add(relative) return includes def insert_or_add_mapping(base, deps): global src_map if len(deps) > 0: if base in src_map: existing_deps = src_map[base] existing_deps.update(deps) else: src_map[base] = deps for (base, dirs, files) in os.walk(inc_dir): dir = os.path.basename(base) relative = os.path.relpath(base, inc_dir) inc_files = filter(lambda x : os.path.splitext(x)[1] in [".h"], files) deps = set() for inc in inc_files: inc_path = os.path.join(base, inc) deps.update(scan_deps(relative, inc_path)) insert_or_add_mapping(relative, deps) for (base, dirs, files) in os.walk(src_dir): dir = os.path.basename(base) relative = os.path.relpath(base, src_dir) src_files = filter(lambda x : os.path.splitext(x)[1] in [".cpp", ".h", ".mm"], files) deps = set() norm_base_path = os.path.normpath(os.path.join("lldb", relative)) norm_base_path = norm_base_path.replace("\\", "/") for src in src_files: src_path = os.path.join(base, src) deps.update(scan_deps(norm_base_path, src_path)) insert_or_add_mapping(norm_base_path, deps) pass items = list(src_map.iteritems()) items.sort(lambda A, B : cmp(A[0], B[0])) for (path, deps) in items: print path + ":" sorted_deps = list(deps) sorted_deps.sort() for dep in sorted_deps: print "\t" + dep pass
Add a script to dump out all project inter-dependencies. git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@296920 91177308-0d34-0410-b5e6-96231b3b80d8import os import re from use_lldb_suite import lldb_root src_dir = os.path.join(lldb_root, "source") inc_dir = os.path.join(lldb_root, "include") src_map = {} include_regex = re.compile('#include \"(lldb(.*/)+).*\"') def scan_deps(this_dir, file): includes = set() with open(file) as f: for line in list(f): m = include_regex.match(line) if m is not None: relative = m.groups()[0].rstrip("/") if relative != this_dir: includes.add(relative) return includes def insert_or_add_mapping(base, deps): global src_map if len(deps) > 0: if base in src_map: existing_deps = src_map[base] existing_deps.update(deps) else: src_map[base] = deps for (base, dirs, files) in os.walk(inc_dir): dir = os.path.basename(base) relative = os.path.relpath(base, inc_dir) inc_files = filter(lambda x : os.path.splitext(x)[1] in [".h"], files) deps = set() for inc in inc_files: inc_path = os.path.join(base, inc) deps.update(scan_deps(relative, inc_path)) insert_or_add_mapping(relative, deps) for (base, dirs, files) in os.walk(src_dir): dir = os.path.basename(base) relative = os.path.relpath(base, src_dir) src_files = filter(lambda x : os.path.splitext(x)[1] in [".cpp", ".h", ".mm"], files) deps = set() norm_base_path = os.path.normpath(os.path.join("lldb", relative)) norm_base_path = norm_base_path.replace("\\", "/") for src in src_files: src_path = os.path.join(base, src) deps.update(scan_deps(norm_base_path, src_path)) insert_or_add_mapping(norm_base_path, deps) pass items = list(src_map.iteritems()) items.sort(lambda A, B : cmp(A[0], B[0])) for (path, deps) in items: print path + ":" sorted_deps = list(deps) sorted_deps.sort() for dep in sorted_deps: print "\t" + dep pass
<commit_before><commit_msg>Add a script to dump out all project inter-dependencies. git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@296920 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>import os import re from use_lldb_suite import lldb_root src_dir = os.path.join(lldb_root, "source") inc_dir = os.path.join(lldb_root, "include") src_map = {} include_regex = re.compile('#include \"(lldb(.*/)+).*\"') def scan_deps(this_dir, file): includes = set() with open(file) as f: for line in list(f): m = include_regex.match(line) if m is not None: relative = m.groups()[0].rstrip("/") if relative != this_dir: includes.add(relative) return includes def insert_or_add_mapping(base, deps): global src_map if len(deps) > 0: if base in src_map: existing_deps = src_map[base] existing_deps.update(deps) else: src_map[base] = deps for (base, dirs, files) in os.walk(inc_dir): dir = os.path.basename(base) relative = os.path.relpath(base, inc_dir) inc_files = filter(lambda x : os.path.splitext(x)[1] in [".h"], files) deps = set() for inc in inc_files: inc_path = os.path.join(base, inc) deps.update(scan_deps(relative, inc_path)) insert_or_add_mapping(relative, deps) for (base, dirs, files) in os.walk(src_dir): dir = os.path.basename(base) relative = os.path.relpath(base, src_dir) src_files = filter(lambda x : os.path.splitext(x)[1] in [".cpp", ".h", ".mm"], files) deps = set() norm_base_path = os.path.normpath(os.path.join("lldb", relative)) norm_base_path = norm_base_path.replace("\\", "/") for src in src_files: src_path = os.path.join(base, src) deps.update(scan_deps(norm_base_path, src_path)) insert_or_add_mapping(norm_base_path, deps) pass items = list(src_map.iteritems()) items.sort(lambda A, B : cmp(A[0], B[0])) for (path, deps) in items: print path + ":" sorted_deps = list(deps) sorted_deps.sort() for dep in sorted_deps: print "\t" + dep pass
670ef725528e474f9ee89695999bf127d81c94aa
fellowms/migrations/0007_auto_20160414_1411.py
fellowms/migrations/0007_auto_20160414_1411.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2016-04-14 14:11 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('fellowms', '0006_auto_20160414_1132'), ] operations = [ migrations.RenameField( model_name='fellow', old_name='full_name', new_name='surname', ), migrations.AddField( model_name='fellow', name='affiliation', field=models.CharField(default='', max_length=120, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='forenames', field=models.CharField(default='', max_length=120, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='gender', field=models.CharField(choices=[('M', 'Male'), ('F', 'Female'), ('O', 'Other'), ('R', 'Rather not say')], default='R', max_length=1), ), migrations.AddField( model_name='fellow', name='phone', field=models.CharField(default='', max_length=14, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='research_area', field=models.CharField(default='', max_length=4, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='work_description', field=models.TextField(default=''), preserve_default=False, ), migrations.AlterField( model_name='fellow', name='year', field=models.IntegerField(default=2017), ), ]
Add migration for update on fellow model
Add migration for update on fellow model
Python
bsd-3-clause
softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat
Add migration for update on fellow model
# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2016-04-14 14:11 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('fellowms', '0006_auto_20160414_1132'), ] operations = [ migrations.RenameField( model_name='fellow', old_name='full_name', new_name='surname', ), migrations.AddField( model_name='fellow', name='affiliation', field=models.CharField(default='', max_length=120, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='forenames', field=models.CharField(default='', max_length=120, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='gender', field=models.CharField(choices=[('M', 'Male'), ('F', 'Female'), ('O', 'Other'), ('R', 'Rather not say')], default='R', max_length=1), ), migrations.AddField( model_name='fellow', name='phone', field=models.CharField(default='', max_length=14, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='research_area', field=models.CharField(default='', max_length=4, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='work_description', field=models.TextField(default=''), preserve_default=False, ), migrations.AlterField( model_name='fellow', name='year', field=models.IntegerField(default=2017), ), ]
<commit_before><commit_msg>Add migration for update on fellow model<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2016-04-14 14:11 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('fellowms', '0006_auto_20160414_1132'), ] operations = [ migrations.RenameField( model_name='fellow', old_name='full_name', new_name='surname', ), migrations.AddField( model_name='fellow', name='affiliation', field=models.CharField(default='', max_length=120, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='forenames', field=models.CharField(default='', max_length=120, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='gender', field=models.CharField(choices=[('M', 'Male'), ('F', 'Female'), ('O', 'Other'), ('R', 'Rather not say')], default='R', max_length=1), ), migrations.AddField( model_name='fellow', name='phone', field=models.CharField(default='', max_length=14, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='research_area', field=models.CharField(default='', max_length=4, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='work_description', field=models.TextField(default=''), preserve_default=False, ), migrations.AlterField( model_name='fellow', name='year', field=models.IntegerField(default=2017), ), ]
Add migration for update on fellow model# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2016-04-14 14:11 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('fellowms', '0006_auto_20160414_1132'), ] operations = [ migrations.RenameField( model_name='fellow', old_name='full_name', new_name='surname', ), migrations.AddField( model_name='fellow', name='affiliation', field=models.CharField(default='', max_length=120, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='forenames', field=models.CharField(default='', max_length=120, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='gender', field=models.CharField(choices=[('M', 'Male'), ('F', 'Female'), ('O', 'Other'), ('R', 'Rather not say')], default='R', max_length=1), ), migrations.AddField( model_name='fellow', name='phone', field=models.CharField(default='', max_length=14, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='research_area', field=models.CharField(default='', max_length=4, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='work_description', field=models.TextField(default=''), preserve_default=False, ), migrations.AlterField( model_name='fellow', name='year', field=models.IntegerField(default=2017), ), ]
<commit_before><commit_msg>Add migration for update on fellow model<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2016-04-14 14:11 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('fellowms', '0006_auto_20160414_1132'), ] operations = [ migrations.RenameField( model_name='fellow', old_name='full_name', new_name='surname', ), migrations.AddField( model_name='fellow', name='affiliation', field=models.CharField(default='', max_length=120, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='forenames', field=models.CharField(default='', max_length=120, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='gender', field=models.CharField(choices=[('M', 'Male'), ('F', 'Female'), ('O', 'Other'), ('R', 'Rather not say')], default='R', max_length=1), ), migrations.AddField( model_name='fellow', name='phone', field=models.CharField(default='', max_length=14, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='research_area', field=models.CharField(default='', max_length=4, unique=True), preserve_default=False, ), migrations.AddField( model_name='fellow', name='work_description', field=models.TextField(default=''), preserve_default=False, ), migrations.AlterField( model_name='fellow', name='year', field=models.IntegerField(default=2017), ), ]
4838b59bb3ab30ab7641308c2de4e1ce989f663f
config/sublime-text-3/Packages/User/disable_minimap.py
config/sublime-text-3/Packages/User/disable_minimap.py
# -*- encoding: utf-8 -*- import sublime import sublime_plugin class DisableMinimap(sublime_plugin.EventListener): def on_activated(self, view): view.window().set_minimap_visible(False)
Disable minimap in Sublime Text 3.
Disable minimap in Sublime Text 3.
Python
apache-2.0
mikelward/conf,mikelward/conf
Disable minimap in Sublime Text 3.
# -*- encoding: utf-8 -*- import sublime import sublime_plugin class DisableMinimap(sublime_plugin.EventListener): def on_activated(self, view): view.window().set_minimap_visible(False)
<commit_before><commit_msg>Disable minimap in Sublime Text 3.<commit_after>
# -*- encoding: utf-8 -*- import sublime import sublime_plugin class DisableMinimap(sublime_plugin.EventListener): def on_activated(self, view): view.window().set_minimap_visible(False)
Disable minimap in Sublime Text 3.# -*- encoding: utf-8 -*- import sublime import sublime_plugin class DisableMinimap(sublime_plugin.EventListener): def on_activated(self, view): view.window().set_minimap_visible(False)
<commit_before><commit_msg>Disable minimap in Sublime Text 3.<commit_after># -*- encoding: utf-8 -*- import sublime import sublime_plugin class DisableMinimap(sublime_plugin.EventListener): def on_activated(self, view): view.window().set_minimap_visible(False)
0980736fbefd8b7bbf881108bc021058335624e4
edm/mathtypes.py
edm/mathtypes.py
try: from mathutils import Matrix, Vector except ImportError: # We don't have mathutils. Make some very basic replacements. class Vector(tuple): def __repr__(self): return "Vector({})".format(super(Vector, self).__repr__()) class Matrix(tuple): def transposed(self): cols = [[self[j][i] for j in range(len(self))] for i in range(len(self))] return Matrix(cols) def __repr__(self): return "Matrix({})".format(super(Matrix, self).__repr__())
Add previously-missed placeholder Vector/Matrix code
Add previously-missed placeholder Vector/Matrix code This lets us use the read.py script without having to import the Blender module, at the moment. Importing should probably be kept separate from reading into blender, but it's convenient to share math structures if possible.
Python
mit
ndevenish/Blender_ioEDM,ndevenish/Blender_ioEDM
Add previously-missed placeholder Vector/Matrix code This lets us use the read.py script without having to import the Blender module, at the moment. Importing should probably be kept separate from reading into blender, but it's convenient to share math structures if possible.
try: from mathutils import Matrix, Vector except ImportError: # We don't have mathutils. Make some very basic replacements. class Vector(tuple): def __repr__(self): return "Vector({})".format(super(Vector, self).__repr__()) class Matrix(tuple): def transposed(self): cols = [[self[j][i] for j in range(len(self))] for i in range(len(self))] return Matrix(cols) def __repr__(self): return "Matrix({})".format(super(Matrix, self).__repr__())
<commit_before><commit_msg>Add previously-missed placeholder Vector/Matrix code This lets us use the read.py script without having to import the Blender module, at the moment. Importing should probably be kept separate from reading into blender, but it's convenient to share math structures if possible.<commit_after>
try: from mathutils import Matrix, Vector except ImportError: # We don't have mathutils. Make some very basic replacements. class Vector(tuple): def __repr__(self): return "Vector({})".format(super(Vector, self).__repr__()) class Matrix(tuple): def transposed(self): cols = [[self[j][i] for j in range(len(self))] for i in range(len(self))] return Matrix(cols) def __repr__(self): return "Matrix({})".format(super(Matrix, self).__repr__())
Add previously-missed placeholder Vector/Matrix code This lets us use the read.py script without having to import the Blender module, at the moment. Importing should probably be kept separate from reading into blender, but it's convenient to share math structures if possible. try: from mathutils import Matrix, Vector except ImportError: # We don't have mathutils. Make some very basic replacements. class Vector(tuple): def __repr__(self): return "Vector({})".format(super(Vector, self).__repr__()) class Matrix(tuple): def transposed(self): cols = [[self[j][i] for j in range(len(self))] for i in range(len(self))] return Matrix(cols) def __repr__(self): return "Matrix({})".format(super(Matrix, self).__repr__())
<commit_before><commit_msg>Add previously-missed placeholder Vector/Matrix code This lets us use the read.py script without having to import the Blender module, at the moment. Importing should probably be kept separate from reading into blender, but it's convenient to share math structures if possible.<commit_after> try: from mathutils import Matrix, Vector except ImportError: # We don't have mathutils. Make some very basic replacements. class Vector(tuple): def __repr__(self): return "Vector({})".format(super(Vector, self).__repr__()) class Matrix(tuple): def transposed(self): cols = [[self[j][i] for j in range(len(self))] for i in range(len(self))] return Matrix(cols) def __repr__(self): return "Matrix({})".format(super(Matrix, self).__repr__())
9fba9934c9b47881ee468f295a3710f2c184fab1
tendrl/node_agent/__init__.py
tendrl/node_agent/__init__.py
try: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() import __builtin__ __builtin__.tendrl_ns = NodeAgentNS()
try: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() NodeAgentNS()
Fix greenlet and essential objects startup order
Fix greenlet and essential objects startup order
Python
lgpl-2.1
Tendrl/node_agent,Tendrl/node-agent,r0h4n/node-agent,Tendrl/node_agent,Tendrl/node-agent,Tendrl/node-agent,r0h4n/node-agent,r0h4n/node-agent
try: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() import __builtin__ __builtin__.tendrl_ns = NodeAgentNS() Fix greenlet and essential objects startup order
try: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() NodeAgentNS()
<commit_before>try: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() import __builtin__ __builtin__.tendrl_ns = NodeAgentNS() <commit_msg>Fix greenlet and essential objects startup order<commit_after>
try: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() NodeAgentNS()
try: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() import __builtin__ __builtin__.tendrl_ns = NodeAgentNS() Fix greenlet and essential objects startup ordertry: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() NodeAgentNS()
<commit_before>try: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() import __builtin__ __builtin__.tendrl_ns = NodeAgentNS() <commit_msg>Fix greenlet and essential objects startup order<commit_after>try: from gevent import monkey except ImportError: pass else: monkey.patch_all() from tendrl.commons import CommonNS from tendrl.node_agent.objects.definition import Definition from tendrl.node_agent.objects.config import Config from tendrl.node_agent.objects.node_context import NodeContext from tendrl.node_agent.objects.detected_cluster import DetectedCluster from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.objects.tendrl_context import TendrlContext from tendrl.node_agent.objects.service import Service from tendrl.node_agent.objects.cpu import Cpu from tendrl.node_agent.objects.disk import Disk from tendrl.node_agent.objects.file import File from tendrl.node_agent.objects.memory import Memory from tendrl.node_agent.objects.node import Node from tendrl.node_agent.objects.os import Os from tendrl.node_agent.objects.package import Package from tendrl.node_agent.objects.platform import Platform from tendrl.node_agent.flows.import_cluster import ImportCluster class NodeAgentNS(CommonNS): def __init__(self): # Create the "tendrl_ns.node_agent" namespace self.to_str = "tendrl.node_agent" self.type = 'node' super(NodeAgentNS, self).__init__() NodeAgentNS()
f3b4de822bee52e103ff2ba4543f941cceed420f
zerver/management/commands/generate_multiuse_invite_link.py
zerver/management/commands/generate_multiuse_invite_link.py
from __future__ import absolute_import from __future__ import print_function from typing import Any from argparse import ArgumentParser from confirmation.models import Confirmation, create_confirmation_link from zerver.lib.management import ZulipBaseCommand from zerver.lib.actions import create_stream_if_needed from zerver.models import MultiuseInvite class Command(ZulipBaseCommand): help = "Generates invite link that can be used for inviting multiple users" def add_arguments(self, parser): # type: (ArgumentParser) -> None self.add_realm_args(parser, True) parser.add_argument( '-s', '--streams', dest='streams', type=str, help='A comma-separated list of stream names.') parser.add_argument( '--referred-by', dest='referred_by', type=str, help='Email of referrer', required=True, ) def handle(self, *args, **options): # type: (*Any, **Any) -> None realm = self.get_realm(options) streams = [] if options["streams"]: stream_names = set([stream.strip() for stream in options["streams"].split(",")]) for stream_name in set(stream_names): stream, _ = create_stream_if_needed(realm, stream_name) streams.append(stream) referred_by = self.get_user(options['referred_by'], realm) invite = MultiuseInvite.objects.create(realm=realm, referred_by=referred_by) if streams: invite.streams = streams invite.save() invite_link = create_confirmation_link(invite, realm.host, Confirmation.MULTIUSE_INVITE) print("You can use %s to invite as many number of people to the organization." % (invite_link,))
Create multiuse user invite generation command.
command: Create multiuse user invite generation command.
Python
apache-2.0
showell/zulip,shubhamdhama/zulip,eeshangarg/zulip,hackerkid/zulip,eeshangarg/zulip,shubhamdhama/zulip,hackerkid/zulip,jackrzhang/zulip,shubhamdhama/zulip,verma-varsha/zulip,dhcrzf/zulip,rht/zulip,punchagan/zulip,timabbott/zulip,jackrzhang/zulip,Galexrt/zulip,synicalsyntax/zulip,timabbott/zulip,zulip/zulip,amanharitsh123/zulip,amanharitsh123/zulip,hackerkid/zulip,tommyip/zulip,jackrzhang/zulip,verma-varsha/zulip,rht/zulip,eeshangarg/zulip,Galexrt/zulip,brockwhittaker/zulip,amanharitsh123/zulip,dhcrzf/zulip,kou/zulip,punchagan/zulip,dhcrzf/zulip,kou/zulip,andersk/zulip,Galexrt/zulip,hackerkid/zulip,jackrzhang/zulip,brainwane/zulip,timabbott/zulip,kou/zulip,kou/zulip,synicalsyntax/zulip,timabbott/zulip,brainwane/zulip,Galexrt/zulip,rishig/zulip,rishig/zulip,shubhamdhama/zulip,zulip/zulip,rishig/zulip,rishig/zulip,brainwane/zulip,eeshangarg/zulip,synicalsyntax/zulip,brockwhittaker/zulip,andersk/zulip,timabbott/zulip,dhcrzf/zulip,showell/zulip,Galexrt/zulip,dhcrzf/zulip,synicalsyntax/zulip,tommyip/zulip,tommyip/zulip,brainwane/zulip,synicalsyntax/zulip,dhcrzf/zulip,rht/zulip,kou/zulip,Galexrt/zulip,hackerkid/zulip,rht/zulip,showell/zulip,hackerkid/zulip,brainwane/zulip,zulip/zulip,verma-varsha/zulip,brockwhittaker/zulip,zulip/zulip,jackrzhang/zulip,mahim97/zulip,kou/zulip,hackerkid/zulip,rht/zulip,verma-varsha/zulip,punchagan/zulip,dhcrzf/zulip,showell/zulip,andersk/zulip,rishig/zulip,mahim97/zulip,tommyip/zulip,showell/zulip,synicalsyntax/zulip,amanharitsh123/zulip,Galexrt/zulip,jackrzhang/zulip,verma-varsha/zulip,brockwhittaker/zulip,kou/zulip,brainwane/zulip,rht/zulip,tommyip/zulip,punchagan/zulip,synicalsyntax/zulip,tommyip/zulip,andersk/zulip,andersk/zulip,timabbott/zulip,eeshangarg/zulip,showell/zulip,shubhamdhama/zulip,shubhamdhama/zulip,brockwhittaker/zulip,eeshangarg/zulip,punchagan/zulip,amanharitsh123/zulip,zulip/zulip,rishig/zulip,jackrzhang/zulip,punchagan/zulip,mahim97/zulip,zulip/zulip,shubhamdhama/zulip,mahim97/zulip,rishig/zulip,showell/zulip,punchagan/zulip,mahim97/zulip,zulip/zulip,amanharitsh123/zulip,rht/zulip,brainwane/zulip,eeshangarg/zulip,tommyip/zulip,timabbott/zulip,verma-varsha/zulip,mahim97/zulip,brockwhittaker/zulip,andersk/zulip,andersk/zulip
command: Create multiuse user invite generation command.
from __future__ import absolute_import from __future__ import print_function from typing import Any from argparse import ArgumentParser from confirmation.models import Confirmation, create_confirmation_link from zerver.lib.management import ZulipBaseCommand from zerver.lib.actions import create_stream_if_needed from zerver.models import MultiuseInvite class Command(ZulipBaseCommand): help = "Generates invite link that can be used for inviting multiple users" def add_arguments(self, parser): # type: (ArgumentParser) -> None self.add_realm_args(parser, True) parser.add_argument( '-s', '--streams', dest='streams', type=str, help='A comma-separated list of stream names.') parser.add_argument( '--referred-by', dest='referred_by', type=str, help='Email of referrer', required=True, ) def handle(self, *args, **options): # type: (*Any, **Any) -> None realm = self.get_realm(options) streams = [] if options["streams"]: stream_names = set([stream.strip() for stream in options["streams"].split(",")]) for stream_name in set(stream_names): stream, _ = create_stream_if_needed(realm, stream_name) streams.append(stream) referred_by = self.get_user(options['referred_by'], realm) invite = MultiuseInvite.objects.create(realm=realm, referred_by=referred_by) if streams: invite.streams = streams invite.save() invite_link = create_confirmation_link(invite, realm.host, Confirmation.MULTIUSE_INVITE) print("You can use %s to invite as many number of people to the organization." % (invite_link,))
<commit_before><commit_msg>command: Create multiuse user invite generation command.<commit_after>
from __future__ import absolute_import from __future__ import print_function from typing import Any from argparse import ArgumentParser from confirmation.models import Confirmation, create_confirmation_link from zerver.lib.management import ZulipBaseCommand from zerver.lib.actions import create_stream_if_needed from zerver.models import MultiuseInvite class Command(ZulipBaseCommand): help = "Generates invite link that can be used for inviting multiple users" def add_arguments(self, parser): # type: (ArgumentParser) -> None self.add_realm_args(parser, True) parser.add_argument( '-s', '--streams', dest='streams', type=str, help='A comma-separated list of stream names.') parser.add_argument( '--referred-by', dest='referred_by', type=str, help='Email of referrer', required=True, ) def handle(self, *args, **options): # type: (*Any, **Any) -> None realm = self.get_realm(options) streams = [] if options["streams"]: stream_names = set([stream.strip() for stream in options["streams"].split(",")]) for stream_name in set(stream_names): stream, _ = create_stream_if_needed(realm, stream_name) streams.append(stream) referred_by = self.get_user(options['referred_by'], realm) invite = MultiuseInvite.objects.create(realm=realm, referred_by=referred_by) if streams: invite.streams = streams invite.save() invite_link = create_confirmation_link(invite, realm.host, Confirmation.MULTIUSE_INVITE) print("You can use %s to invite as many number of people to the organization." % (invite_link,))
command: Create multiuse user invite generation command.from __future__ import absolute_import from __future__ import print_function from typing import Any from argparse import ArgumentParser from confirmation.models import Confirmation, create_confirmation_link from zerver.lib.management import ZulipBaseCommand from zerver.lib.actions import create_stream_if_needed from zerver.models import MultiuseInvite class Command(ZulipBaseCommand): help = "Generates invite link that can be used for inviting multiple users" def add_arguments(self, parser): # type: (ArgumentParser) -> None self.add_realm_args(parser, True) parser.add_argument( '-s', '--streams', dest='streams', type=str, help='A comma-separated list of stream names.') parser.add_argument( '--referred-by', dest='referred_by', type=str, help='Email of referrer', required=True, ) def handle(self, *args, **options): # type: (*Any, **Any) -> None realm = self.get_realm(options) streams = [] if options["streams"]: stream_names = set([stream.strip() for stream in options["streams"].split(",")]) for stream_name in set(stream_names): stream, _ = create_stream_if_needed(realm, stream_name) streams.append(stream) referred_by = self.get_user(options['referred_by'], realm) invite = MultiuseInvite.objects.create(realm=realm, referred_by=referred_by) if streams: invite.streams = streams invite.save() invite_link = create_confirmation_link(invite, realm.host, Confirmation.MULTIUSE_INVITE) print("You can use %s to invite as many number of people to the organization." % (invite_link,))
<commit_before><commit_msg>command: Create multiuse user invite generation command.<commit_after>from __future__ import absolute_import from __future__ import print_function from typing import Any from argparse import ArgumentParser from confirmation.models import Confirmation, create_confirmation_link from zerver.lib.management import ZulipBaseCommand from zerver.lib.actions import create_stream_if_needed from zerver.models import MultiuseInvite class Command(ZulipBaseCommand): help = "Generates invite link that can be used for inviting multiple users" def add_arguments(self, parser): # type: (ArgumentParser) -> None self.add_realm_args(parser, True) parser.add_argument( '-s', '--streams', dest='streams', type=str, help='A comma-separated list of stream names.') parser.add_argument( '--referred-by', dest='referred_by', type=str, help='Email of referrer', required=True, ) def handle(self, *args, **options): # type: (*Any, **Any) -> None realm = self.get_realm(options) streams = [] if options["streams"]: stream_names = set([stream.strip() for stream in options["streams"].split(",")]) for stream_name in set(stream_names): stream, _ = create_stream_if_needed(realm, stream_name) streams.append(stream) referred_by = self.get_user(options['referred_by'], realm) invite = MultiuseInvite.objects.create(realm=realm, referred_by=referred_by) if streams: invite.streams = streams invite.save() invite_link = create_confirmation_link(invite, realm.host, Confirmation.MULTIUSE_INVITE) print("You can use %s to invite as many number of people to the organization." % (invite_link,))
3dd761611c8458f4df31fd8fb925c2758dbe9685
_python/main/migrations/0014_auto_20191219_1744.py
_python/main/migrations/0014_auto_20191219_1744.py
# Generated by Django 2.2.9 on 2019-12-19 17:44 from django.db import migrations def delete_courts_without_cases(apps, schema_editor): CaseCourt = apps.get_model('main', 'CaseCourt') CaseCourt.objects.filter(cases__isnull=True).delete() class Migration(migrations.Migration): dependencies = [ ('main', '0013_auto_20191219_1734'), ] operations = [ migrations.RunPython(delete_courts_without_cases), ]
Delete courts with no cases.
Delete courts with no cases.
Python
agpl-3.0
harvard-lil/h2o,harvard-lil/h2o,harvard-lil/h2o,harvard-lil/h2o
Delete courts with no cases.
# Generated by Django 2.2.9 on 2019-12-19 17:44 from django.db import migrations def delete_courts_without_cases(apps, schema_editor): CaseCourt = apps.get_model('main', 'CaseCourt') CaseCourt.objects.filter(cases__isnull=True).delete() class Migration(migrations.Migration): dependencies = [ ('main', '0013_auto_20191219_1734'), ] operations = [ migrations.RunPython(delete_courts_without_cases), ]
<commit_before><commit_msg>Delete courts with no cases.<commit_after>
# Generated by Django 2.2.9 on 2019-12-19 17:44 from django.db import migrations def delete_courts_without_cases(apps, schema_editor): CaseCourt = apps.get_model('main', 'CaseCourt') CaseCourt.objects.filter(cases__isnull=True).delete() class Migration(migrations.Migration): dependencies = [ ('main', '0013_auto_20191219_1734'), ] operations = [ migrations.RunPython(delete_courts_without_cases), ]
Delete courts with no cases.# Generated by Django 2.2.9 on 2019-12-19 17:44 from django.db import migrations def delete_courts_without_cases(apps, schema_editor): CaseCourt = apps.get_model('main', 'CaseCourt') CaseCourt.objects.filter(cases__isnull=True).delete() class Migration(migrations.Migration): dependencies = [ ('main', '0013_auto_20191219_1734'), ] operations = [ migrations.RunPython(delete_courts_without_cases), ]
<commit_before><commit_msg>Delete courts with no cases.<commit_after># Generated by Django 2.2.9 on 2019-12-19 17:44 from django.db import migrations def delete_courts_without_cases(apps, schema_editor): CaseCourt = apps.get_model('main', 'CaseCourt') CaseCourt.objects.filter(cases__isnull=True).delete() class Migration(migrations.Migration): dependencies = [ ('main', '0013_auto_20191219_1734'), ] operations = [ migrations.RunPython(delete_courts_without_cases), ]
650126e0007dfda5cc8091ef8ee42991433b742f
product_sale_price_by_margin/migrations/8.0.0.3.0/pre-migration.py
product_sale_price_by_margin/migrations/8.0.0.3.0/pre-migration.py
# -*- encoding: utf-8 -*- from openerp import SUPERUSER_ID from openerp.modules.registry import RegistryManager def copy_column(cr, model, table, target_field, source_field, condition): print 'Making copy of columne %s to column %s' % ( source_field, target_field) cr.execute('SELECT id, %(field)s ' 'FROM %(table)s ' '%(condition)s' % { 'table': table, 'field': source_field, 'condition': condition, }) for row in cr.fetchall(): model.write(cr, SUPERUSER_ID, row[0], {target_field: row[1]}) # model.write(cr, SUPERUSER_ID, row[0], {target_field: [(4, row[1])]}) def migrate(cr, version): print 'Migrating product_sale_price_by_margin' if not version: return registry = RegistryManager.get(cr.dbname) model = 'product.template' table = 'product_template' source_field = "manual_list_price" target_field = "list_price" condition = "WHERE list_price_type = 'manual'" copy_column( cr, registry[model], table, source_field, target_field, condition, )
ADD prod sale price by margin migration scripts
ADD prod sale price by margin migration scripts
Python
agpl-3.0
ingadhoc/product,ingadhoc/product
ADD prod sale price by margin migration scripts
# -*- encoding: utf-8 -*- from openerp import SUPERUSER_ID from openerp.modules.registry import RegistryManager def copy_column(cr, model, table, target_field, source_field, condition): print 'Making copy of columne %s to column %s' % ( source_field, target_field) cr.execute('SELECT id, %(field)s ' 'FROM %(table)s ' '%(condition)s' % { 'table': table, 'field': source_field, 'condition': condition, }) for row in cr.fetchall(): model.write(cr, SUPERUSER_ID, row[0], {target_field: row[1]}) # model.write(cr, SUPERUSER_ID, row[0], {target_field: [(4, row[1])]}) def migrate(cr, version): print 'Migrating product_sale_price_by_margin' if not version: return registry = RegistryManager.get(cr.dbname) model = 'product.template' table = 'product_template' source_field = "manual_list_price" target_field = "list_price" condition = "WHERE list_price_type = 'manual'" copy_column( cr, registry[model], table, source_field, target_field, condition, )
<commit_before><commit_msg>ADD prod sale price by margin migration scripts<commit_after>
# -*- encoding: utf-8 -*- from openerp import SUPERUSER_ID from openerp.modules.registry import RegistryManager def copy_column(cr, model, table, target_field, source_field, condition): print 'Making copy of columne %s to column %s' % ( source_field, target_field) cr.execute('SELECT id, %(field)s ' 'FROM %(table)s ' '%(condition)s' % { 'table': table, 'field': source_field, 'condition': condition, }) for row in cr.fetchall(): model.write(cr, SUPERUSER_ID, row[0], {target_field: row[1]}) # model.write(cr, SUPERUSER_ID, row[0], {target_field: [(4, row[1])]}) def migrate(cr, version): print 'Migrating product_sale_price_by_margin' if not version: return registry = RegistryManager.get(cr.dbname) model = 'product.template' table = 'product_template' source_field = "manual_list_price" target_field = "list_price" condition = "WHERE list_price_type = 'manual'" copy_column( cr, registry[model], table, source_field, target_field, condition, )
ADD prod sale price by margin migration scripts# -*- encoding: utf-8 -*- from openerp import SUPERUSER_ID from openerp.modules.registry import RegistryManager def copy_column(cr, model, table, target_field, source_field, condition): print 'Making copy of columne %s to column %s' % ( source_field, target_field) cr.execute('SELECT id, %(field)s ' 'FROM %(table)s ' '%(condition)s' % { 'table': table, 'field': source_field, 'condition': condition, }) for row in cr.fetchall(): model.write(cr, SUPERUSER_ID, row[0], {target_field: row[1]}) # model.write(cr, SUPERUSER_ID, row[0], {target_field: [(4, row[1])]}) def migrate(cr, version): print 'Migrating product_sale_price_by_margin' if not version: return registry = RegistryManager.get(cr.dbname) model = 'product.template' table = 'product_template' source_field = "manual_list_price" target_field = "list_price" condition = "WHERE list_price_type = 'manual'" copy_column( cr, registry[model], table, source_field, target_field, condition, )
<commit_before><commit_msg>ADD prod sale price by margin migration scripts<commit_after># -*- encoding: utf-8 -*- from openerp import SUPERUSER_ID from openerp.modules.registry import RegistryManager def copy_column(cr, model, table, target_field, source_field, condition): print 'Making copy of columne %s to column %s' % ( source_field, target_field) cr.execute('SELECT id, %(field)s ' 'FROM %(table)s ' '%(condition)s' % { 'table': table, 'field': source_field, 'condition': condition, }) for row in cr.fetchall(): model.write(cr, SUPERUSER_ID, row[0], {target_field: row[1]}) # model.write(cr, SUPERUSER_ID, row[0], {target_field: [(4, row[1])]}) def migrate(cr, version): print 'Migrating product_sale_price_by_margin' if not version: return registry = RegistryManager.get(cr.dbname) model = 'product.template' table = 'product_template' source_field = "manual_list_price" target_field = "list_price" condition = "WHERE list_price_type = 'manual'" copy_column( cr, registry[model], table, source_field, target_field, condition, )
383560e40268eedb56d5030f654f660465957a47
examples/subch/subch_forward.py
examples/subch/subch_forward.py
# C-burning with A=23 URCA rate module generator import pynucastro as pyna from pynucastro.networks import StarKillerNetwork library_file = "20180228default2" mylibrary = pyna.rates.Library(library_file) all_nuclei = ["he4", "c12", "o16", "n14", "f18", "ne21", "p", "n13", "ne20"] subCh = mylibrary.linking_nuclei(all_nuclei, with_reverse=False) net = StarKillerNetwork(libraries=subCh) net.write_network()
Add example building a subch network using the Library.linking_nuclei feature.
Add example building a subch network using the Library.linking_nuclei feature.
Python
bsd-3-clause
pyreaclib/pyreaclib
Add example building a subch network using the Library.linking_nuclei feature.
# C-burning with A=23 URCA rate module generator import pynucastro as pyna from pynucastro.networks import StarKillerNetwork library_file = "20180228default2" mylibrary = pyna.rates.Library(library_file) all_nuclei = ["he4", "c12", "o16", "n14", "f18", "ne21", "p", "n13", "ne20"] subCh = mylibrary.linking_nuclei(all_nuclei, with_reverse=False) net = StarKillerNetwork(libraries=subCh) net.write_network()
<commit_before><commit_msg>Add example building a subch network using the Library.linking_nuclei feature.<commit_after>
# C-burning with A=23 URCA rate module generator import pynucastro as pyna from pynucastro.networks import StarKillerNetwork library_file = "20180228default2" mylibrary = pyna.rates.Library(library_file) all_nuclei = ["he4", "c12", "o16", "n14", "f18", "ne21", "p", "n13", "ne20"] subCh = mylibrary.linking_nuclei(all_nuclei, with_reverse=False) net = StarKillerNetwork(libraries=subCh) net.write_network()
Add example building a subch network using the Library.linking_nuclei feature.# C-burning with A=23 URCA rate module generator import pynucastro as pyna from pynucastro.networks import StarKillerNetwork library_file = "20180228default2" mylibrary = pyna.rates.Library(library_file) all_nuclei = ["he4", "c12", "o16", "n14", "f18", "ne21", "p", "n13", "ne20"] subCh = mylibrary.linking_nuclei(all_nuclei, with_reverse=False) net = StarKillerNetwork(libraries=subCh) net.write_network()
<commit_before><commit_msg>Add example building a subch network using the Library.linking_nuclei feature.<commit_after># C-burning with A=23 URCA rate module generator import pynucastro as pyna from pynucastro.networks import StarKillerNetwork library_file = "20180228default2" mylibrary = pyna.rates.Library(library_file) all_nuclei = ["he4", "c12", "o16", "n14", "f18", "ne21", "p", "n13", "ne20"] subCh = mylibrary.linking_nuclei(all_nuclei, with_reverse=False) net = StarKillerNetwork(libraries=subCh) net.write_network()
97054c3ccac35100dace5df43dcd3b70a15836a6
peeringdb/management/commands/index_peer_records.py
peeringdb/management/commands/index_peer_records.py
from __future__ import unicode_literals import logging from django.core.management.base import BaseCommand from peeringdb.api import PeeringDB class Command(BaseCommand): help = 'Index peer records based on PeeringDB.' logger = logging.getLogger('peering.manager.peeringdb') def handle(self, *args, **options): self.logger.info('Indexing peer records...') api = PeeringDB() api.force_peer_records_discovery()
Add a command to index peer records.
Add a command to index peer records.
Python
apache-2.0
respawner/peering-manager,respawner/peering-manager,respawner/peering-manager,respawner/peering-manager
Add a command to index peer records.
from __future__ import unicode_literals import logging from django.core.management.base import BaseCommand from peeringdb.api import PeeringDB class Command(BaseCommand): help = 'Index peer records based on PeeringDB.' logger = logging.getLogger('peering.manager.peeringdb') def handle(self, *args, **options): self.logger.info('Indexing peer records...') api = PeeringDB() api.force_peer_records_discovery()
<commit_before><commit_msg>Add a command to index peer records.<commit_after>
from __future__ import unicode_literals import logging from django.core.management.base import BaseCommand from peeringdb.api import PeeringDB class Command(BaseCommand): help = 'Index peer records based on PeeringDB.' logger = logging.getLogger('peering.manager.peeringdb') def handle(self, *args, **options): self.logger.info('Indexing peer records...') api = PeeringDB() api.force_peer_records_discovery()
Add a command to index peer records.from __future__ import unicode_literals import logging from django.core.management.base import BaseCommand from peeringdb.api import PeeringDB class Command(BaseCommand): help = 'Index peer records based on PeeringDB.' logger = logging.getLogger('peering.manager.peeringdb') def handle(self, *args, **options): self.logger.info('Indexing peer records...') api = PeeringDB() api.force_peer_records_discovery()
<commit_before><commit_msg>Add a command to index peer records.<commit_after>from __future__ import unicode_literals import logging from django.core.management.base import BaseCommand from peeringdb.api import PeeringDB class Command(BaseCommand): help = 'Index peer records based on PeeringDB.' logger = logging.getLogger('peering.manager.peeringdb') def handle(self, *args, **options): self.logger.info('Indexing peer records...') api = PeeringDB() api.force_peer_records_discovery()
92622b83b1b191fec22655fa727fbd87c5af211f
spelling_ru.py
spelling_ru.py
def pl_1(order): """2, 3, 4""" return (order == 'тысяча') and 'тысячи' or order + 'а' def pl_2(order): """5 и больше""" return (order == 'тысяча') and 'тысяч' or order + 'ов' RU_PASSES = """ ^ 1 <order> = <order> 1 <thousand> = одна тысяча 2 <thousand> = две тысячи <2_to_4> <order> = <order, pl_1> <not_1> <order> = <order, pl_2> """
Add preliminary Russian spelling definitions
Add preliminary Russian spelling definitions
Python
mit
alco/numspell,alco/numspell
Add preliminary Russian spelling definitions
def pl_1(order): """2, 3, 4""" return (order == 'тысяча') and 'тысячи' or order + 'а' def pl_2(order): """5 и больше""" return (order == 'тысяча') and 'тысяч' or order + 'ов' RU_PASSES = """ ^ 1 <order> = <order> 1 <thousand> = одна тысяча 2 <thousand> = две тысячи <2_to_4> <order> = <order, pl_1> <not_1> <order> = <order, pl_2> """
<commit_before><commit_msg>Add preliminary Russian spelling definitions<commit_after>
def pl_1(order): """2, 3, 4""" return (order == 'тысяча') and 'тысячи' or order + 'а' def pl_2(order): """5 и больше""" return (order == 'тысяча') and 'тысяч' or order + 'ов' RU_PASSES = """ ^ 1 <order> = <order> 1 <thousand> = одна тысяча 2 <thousand> = две тысячи <2_to_4> <order> = <order, pl_1> <not_1> <order> = <order, pl_2> """
Add preliminary Russian spelling definitions def pl_1(order): """2, 3, 4""" return (order == 'тысяча') and 'тысячи' or order + 'а' def pl_2(order): """5 и больше""" return (order == 'тысяча') and 'тысяч' or order + 'ов' RU_PASSES = """ ^ 1 <order> = <order> 1 <thousand> = одна тысяча 2 <thousand> = две тысячи <2_to_4> <order> = <order, pl_1> <not_1> <order> = <order, pl_2> """
<commit_before><commit_msg>Add preliminary Russian spelling definitions<commit_after> def pl_1(order): """2, 3, 4""" return (order == 'тысяча') and 'тысячи' or order + 'а' def pl_2(order): """5 и больше""" return (order == 'тысяча') and 'тысяч' or order + 'ов' RU_PASSES = """ ^ 1 <order> = <order> 1 <thousand> = одна тысяча 2 <thousand> = две тысячи <2_to_4> <order> = <order, pl_1> <not_1> <order> = <order, pl_2> """
1b88cbcb7a0b7aef7c41d6eedc632394b5d915c4
examples/gegl.py
examples/gegl.py
# from gi.repository import Gegl, GeglGtk3 from gi.repository import MyPaint, MyPaintGegl if __name__ == '__main__': # Create a brush, load from disk brush = MyPaint.Brush() brush_def = open("brushes/classic/brush.myb").read() brush.from_string(brush_def) # List all settings # TODO: Is there a better way to list all enums with GI? settings = [getattr(MyPaint.BrushSetting, attr) for attr in dir(MyPaint.BrushSetting) if attr.startswith("SETTING_")] print "Available settings: %s\n" % str(settings) # Get info about a given setting setting = MyPaint.BrushSetting.SETTING_RADIUS_LOGARITHMIC info = MyPaint.brush_setting_info(setting) # TODO: rename "def_" to "default" print "Setting: %s\n\t Max: %f \n\t Default: %f \n\t Min: %f" % (info.cname, info.max, info.def_, info.min) print "\t Name: %s\n\t Tooltip: '%s'\n" % (info.get_name(), info.get_tooltip()) # Use the getters so that i18n works # TODO: should be MyPaint.BrushSetting.from_cname # Same with MyPaint.Brush.input_from_cname assert (MyPaint.Brush.setting_from_cname(info.cname) == setting) # Get/Set current base value for the given setting print "Base value is: %f" % brush.get_base_value(setting) brush.set_base_value(setting, 2.0) assert brush.get_base_value(setting) == 2.0 # Get dynamics for given setting inputs = [getattr(MyPaint.BrushInput, a) for a in dir(MyPaint.BrushInput) if a.startswith('INPUT_')] if not brush.is_constant(setting): for input in inputs: mapping_points = brush.get_mapping_n(setting, input) if mapping_points > 1: # If 0, no dynamics for this input points = [brush.get_mapping_point(setting, input, i) for i in range(mapping_points)] print "Has dynamics for input %s:\n%s" % (input, str(points)) # Create a surface to paint on surface = MyPaint.Surface() # FIXME: Must use a real surface here, using just the interface wont paint anything # TypeError: cannot allocate disguised struct MyPaintGegl.TiledSurface; # surface = MyPaintGegl.TiledSurface() for x, y in [(0.0, 0.0), (10.0, 10.0), (10.0, 20.0)]: dtime = 100.0 # XXX: Important to set correctly for speed calculations brush.stroke_to(surface, x, y, pressure=1.0, xtilt=0.0, ytilt=0.0, dtime=dtime)
Add start of a Python/GI/GEGL-based example
brushlib: Add start of a Python/GI/GEGL-based example
Python
isc
achadwick/libmypaint,achadwick/libmypaint,b3sigma/libmypaint,achadwick/libmypaint,achadwick/libmypaint,b3sigma/libmypaint,b3sigma/libmypaint
brushlib: Add start of a Python/GI/GEGL-based example
# from gi.repository import Gegl, GeglGtk3 from gi.repository import MyPaint, MyPaintGegl if __name__ == '__main__': # Create a brush, load from disk brush = MyPaint.Brush() brush_def = open("brushes/classic/brush.myb").read() brush.from_string(brush_def) # List all settings # TODO: Is there a better way to list all enums with GI? settings = [getattr(MyPaint.BrushSetting, attr) for attr in dir(MyPaint.BrushSetting) if attr.startswith("SETTING_")] print "Available settings: %s\n" % str(settings) # Get info about a given setting setting = MyPaint.BrushSetting.SETTING_RADIUS_LOGARITHMIC info = MyPaint.brush_setting_info(setting) # TODO: rename "def_" to "default" print "Setting: %s\n\t Max: %f \n\t Default: %f \n\t Min: %f" % (info.cname, info.max, info.def_, info.min) print "\t Name: %s\n\t Tooltip: '%s'\n" % (info.get_name(), info.get_tooltip()) # Use the getters so that i18n works # TODO: should be MyPaint.BrushSetting.from_cname # Same with MyPaint.Brush.input_from_cname assert (MyPaint.Brush.setting_from_cname(info.cname) == setting) # Get/Set current base value for the given setting print "Base value is: %f" % brush.get_base_value(setting) brush.set_base_value(setting, 2.0) assert brush.get_base_value(setting) == 2.0 # Get dynamics for given setting inputs = [getattr(MyPaint.BrushInput, a) for a in dir(MyPaint.BrushInput) if a.startswith('INPUT_')] if not brush.is_constant(setting): for input in inputs: mapping_points = brush.get_mapping_n(setting, input) if mapping_points > 1: # If 0, no dynamics for this input points = [brush.get_mapping_point(setting, input, i) for i in range(mapping_points)] print "Has dynamics for input %s:\n%s" % (input, str(points)) # Create a surface to paint on surface = MyPaint.Surface() # FIXME: Must use a real surface here, using just the interface wont paint anything # TypeError: cannot allocate disguised struct MyPaintGegl.TiledSurface; # surface = MyPaintGegl.TiledSurface() for x, y in [(0.0, 0.0), (10.0, 10.0), (10.0, 20.0)]: dtime = 100.0 # XXX: Important to set correctly for speed calculations brush.stroke_to(surface, x, y, pressure=1.0, xtilt=0.0, ytilt=0.0, dtime=dtime)
<commit_before><commit_msg>brushlib: Add start of a Python/GI/GEGL-based example<commit_after>
# from gi.repository import Gegl, GeglGtk3 from gi.repository import MyPaint, MyPaintGegl if __name__ == '__main__': # Create a brush, load from disk brush = MyPaint.Brush() brush_def = open("brushes/classic/brush.myb").read() brush.from_string(brush_def) # List all settings # TODO: Is there a better way to list all enums with GI? settings = [getattr(MyPaint.BrushSetting, attr) for attr in dir(MyPaint.BrushSetting) if attr.startswith("SETTING_")] print "Available settings: %s\n" % str(settings) # Get info about a given setting setting = MyPaint.BrushSetting.SETTING_RADIUS_LOGARITHMIC info = MyPaint.brush_setting_info(setting) # TODO: rename "def_" to "default" print "Setting: %s\n\t Max: %f \n\t Default: %f \n\t Min: %f" % (info.cname, info.max, info.def_, info.min) print "\t Name: %s\n\t Tooltip: '%s'\n" % (info.get_name(), info.get_tooltip()) # Use the getters so that i18n works # TODO: should be MyPaint.BrushSetting.from_cname # Same with MyPaint.Brush.input_from_cname assert (MyPaint.Brush.setting_from_cname(info.cname) == setting) # Get/Set current base value for the given setting print "Base value is: %f" % brush.get_base_value(setting) brush.set_base_value(setting, 2.0) assert brush.get_base_value(setting) == 2.0 # Get dynamics for given setting inputs = [getattr(MyPaint.BrushInput, a) for a in dir(MyPaint.BrushInput) if a.startswith('INPUT_')] if not brush.is_constant(setting): for input in inputs: mapping_points = brush.get_mapping_n(setting, input) if mapping_points > 1: # If 0, no dynamics for this input points = [brush.get_mapping_point(setting, input, i) for i in range(mapping_points)] print "Has dynamics for input %s:\n%s" % (input, str(points)) # Create a surface to paint on surface = MyPaint.Surface() # FIXME: Must use a real surface here, using just the interface wont paint anything # TypeError: cannot allocate disguised struct MyPaintGegl.TiledSurface; # surface = MyPaintGegl.TiledSurface() for x, y in [(0.0, 0.0), (10.0, 10.0), (10.0, 20.0)]: dtime = 100.0 # XXX: Important to set correctly for speed calculations brush.stroke_to(surface, x, y, pressure=1.0, xtilt=0.0, ytilt=0.0, dtime=dtime)
brushlib: Add start of a Python/GI/GEGL-based example # from gi.repository import Gegl, GeglGtk3 from gi.repository import MyPaint, MyPaintGegl if __name__ == '__main__': # Create a brush, load from disk brush = MyPaint.Brush() brush_def = open("brushes/classic/brush.myb").read() brush.from_string(brush_def) # List all settings # TODO: Is there a better way to list all enums with GI? settings = [getattr(MyPaint.BrushSetting, attr) for attr in dir(MyPaint.BrushSetting) if attr.startswith("SETTING_")] print "Available settings: %s\n" % str(settings) # Get info about a given setting setting = MyPaint.BrushSetting.SETTING_RADIUS_LOGARITHMIC info = MyPaint.brush_setting_info(setting) # TODO: rename "def_" to "default" print "Setting: %s\n\t Max: %f \n\t Default: %f \n\t Min: %f" % (info.cname, info.max, info.def_, info.min) print "\t Name: %s\n\t Tooltip: '%s'\n" % (info.get_name(), info.get_tooltip()) # Use the getters so that i18n works # TODO: should be MyPaint.BrushSetting.from_cname # Same with MyPaint.Brush.input_from_cname assert (MyPaint.Brush.setting_from_cname(info.cname) == setting) # Get/Set current base value for the given setting print "Base value is: %f" % brush.get_base_value(setting) brush.set_base_value(setting, 2.0) assert brush.get_base_value(setting) == 2.0 # Get dynamics for given setting inputs = [getattr(MyPaint.BrushInput, a) for a in dir(MyPaint.BrushInput) if a.startswith('INPUT_')] if not brush.is_constant(setting): for input in inputs: mapping_points = brush.get_mapping_n(setting, input) if mapping_points > 1: # If 0, no dynamics for this input points = [brush.get_mapping_point(setting, input, i) for i in range(mapping_points)] print "Has dynamics for input %s:\n%s" % (input, str(points)) # Create a surface to paint on surface = MyPaint.Surface() # FIXME: Must use a real surface here, using just the interface wont paint anything # TypeError: cannot allocate disguised struct MyPaintGegl.TiledSurface; # surface = MyPaintGegl.TiledSurface() for x, y in [(0.0, 0.0), (10.0, 10.0), (10.0, 20.0)]: dtime = 100.0 # XXX: Important to set correctly for speed calculations brush.stroke_to(surface, x, y, pressure=1.0, xtilt=0.0, ytilt=0.0, dtime=dtime)
<commit_before><commit_msg>brushlib: Add start of a Python/GI/GEGL-based example<commit_after> # from gi.repository import Gegl, GeglGtk3 from gi.repository import MyPaint, MyPaintGegl if __name__ == '__main__': # Create a brush, load from disk brush = MyPaint.Brush() brush_def = open("brushes/classic/brush.myb").read() brush.from_string(brush_def) # List all settings # TODO: Is there a better way to list all enums with GI? settings = [getattr(MyPaint.BrushSetting, attr) for attr in dir(MyPaint.BrushSetting) if attr.startswith("SETTING_")] print "Available settings: %s\n" % str(settings) # Get info about a given setting setting = MyPaint.BrushSetting.SETTING_RADIUS_LOGARITHMIC info = MyPaint.brush_setting_info(setting) # TODO: rename "def_" to "default" print "Setting: %s\n\t Max: %f \n\t Default: %f \n\t Min: %f" % (info.cname, info.max, info.def_, info.min) print "\t Name: %s\n\t Tooltip: '%s'\n" % (info.get_name(), info.get_tooltip()) # Use the getters so that i18n works # TODO: should be MyPaint.BrushSetting.from_cname # Same with MyPaint.Brush.input_from_cname assert (MyPaint.Brush.setting_from_cname(info.cname) == setting) # Get/Set current base value for the given setting print "Base value is: %f" % brush.get_base_value(setting) brush.set_base_value(setting, 2.0) assert brush.get_base_value(setting) == 2.0 # Get dynamics for given setting inputs = [getattr(MyPaint.BrushInput, a) for a in dir(MyPaint.BrushInput) if a.startswith('INPUT_')] if not brush.is_constant(setting): for input in inputs: mapping_points = brush.get_mapping_n(setting, input) if mapping_points > 1: # If 0, no dynamics for this input points = [brush.get_mapping_point(setting, input, i) for i in range(mapping_points)] print "Has dynamics for input %s:\n%s" % (input, str(points)) # Create a surface to paint on surface = MyPaint.Surface() # FIXME: Must use a real surface here, using just the interface wont paint anything # TypeError: cannot allocate disguised struct MyPaintGegl.TiledSurface; # surface = MyPaintGegl.TiledSurface() for x, y in [(0.0, 0.0), (10.0, 10.0), (10.0, 20.0)]: dtime = 100.0 # XXX: Important to set correctly for speed calculations brush.stroke_to(surface, x, y, pressure=1.0, xtilt=0.0, ytilt=0.0, dtime=dtime)
940cf693dcde58677630b7757701317c5cf4bbc3
filestore/test/test_handlers.py
filestore/test/test_handlers.py
from __future__ import (absolute_import, division, print_function, unicode_literals) import six import numpy as np import h5py import tempfile import uuid import mongoengine import mongoengine.connection from filestore.api import (insert_resource, insert_datum, retrieve, register_handler, deregister_handler, db_disconnect) from filestore.odm_templates import ALIAS from filestore.file_readers import AreaDetectorHDF5Handler from numpy.testing import assert_array_equal db_name = str(uuid.uuid4()) conn = None def setup(): global conn # make sure nothing is connected db_disconnect() # make sure it _is_ connected conn = mongoengine.connect(db_name, host='localhost', alias=ALIAS) print(id(conn.database)) register_handler('AD_HDF5', AreaDetectorHDF5Handler) def teardown(): deregister_handler('AD_HDF5') db_disconnect() # if we know about a connection, drop the database if conn: conn.drop_database(db_name) def test_AD_round_trip(): filename = tempfile.NamedTemporaryFile().name f = h5py.File(filename) N = 5 # Write the data. data = np.multiply.outer(np.arange(N), np.ones((2, 2))) f.create_dataset('/entry/data/data', data=data) f.close() # Insert the data records. print(id(conn.database)) resource_id = insert_resource('AD_HDF5', filename) datum_ids = [str(uuid.uuid4()) for i in range(N)] for i, datum_id in enumerate(datum_ids): insert_datum(resource_id, datum_id, dict(point_number=i)) print(id(conn.database)) # Retrieve the data. for i, datum_id in enumerate(datum_ids): print(id(conn.database)) data = retrieve(datum_id) known_data = i * np.ones((2, 2)) assert_array_equal(data, known_data)
Add test of AD_HDF5 handler.
ENH: Add test of AD_HDF5 handler.
Python
bsd-3-clause
ericdill/fileStore,danielballan/filestore,ericdill/databroker,tacaswell/filestore,ericdill/fileStore,ericdill/databroker,stuwilkins/filestore,NSLS-II/filestore,danielballan/filestore,stuwilkins/filestore
ENH: Add test of AD_HDF5 handler.
from __future__ import (absolute_import, division, print_function, unicode_literals) import six import numpy as np import h5py import tempfile import uuid import mongoengine import mongoengine.connection from filestore.api import (insert_resource, insert_datum, retrieve, register_handler, deregister_handler, db_disconnect) from filestore.odm_templates import ALIAS from filestore.file_readers import AreaDetectorHDF5Handler from numpy.testing import assert_array_equal db_name = str(uuid.uuid4()) conn = None def setup(): global conn # make sure nothing is connected db_disconnect() # make sure it _is_ connected conn = mongoengine.connect(db_name, host='localhost', alias=ALIAS) print(id(conn.database)) register_handler('AD_HDF5', AreaDetectorHDF5Handler) def teardown(): deregister_handler('AD_HDF5') db_disconnect() # if we know about a connection, drop the database if conn: conn.drop_database(db_name) def test_AD_round_trip(): filename = tempfile.NamedTemporaryFile().name f = h5py.File(filename) N = 5 # Write the data. data = np.multiply.outer(np.arange(N), np.ones((2, 2))) f.create_dataset('/entry/data/data', data=data) f.close() # Insert the data records. print(id(conn.database)) resource_id = insert_resource('AD_HDF5', filename) datum_ids = [str(uuid.uuid4()) for i in range(N)] for i, datum_id in enumerate(datum_ids): insert_datum(resource_id, datum_id, dict(point_number=i)) print(id(conn.database)) # Retrieve the data. for i, datum_id in enumerate(datum_ids): print(id(conn.database)) data = retrieve(datum_id) known_data = i * np.ones((2, 2)) assert_array_equal(data, known_data)
<commit_before><commit_msg>ENH: Add test of AD_HDF5 handler.<commit_after>
from __future__ import (absolute_import, division, print_function, unicode_literals) import six import numpy as np import h5py import tempfile import uuid import mongoengine import mongoengine.connection from filestore.api import (insert_resource, insert_datum, retrieve, register_handler, deregister_handler, db_disconnect) from filestore.odm_templates import ALIAS from filestore.file_readers import AreaDetectorHDF5Handler from numpy.testing import assert_array_equal db_name = str(uuid.uuid4()) conn = None def setup(): global conn # make sure nothing is connected db_disconnect() # make sure it _is_ connected conn = mongoengine.connect(db_name, host='localhost', alias=ALIAS) print(id(conn.database)) register_handler('AD_HDF5', AreaDetectorHDF5Handler) def teardown(): deregister_handler('AD_HDF5') db_disconnect() # if we know about a connection, drop the database if conn: conn.drop_database(db_name) def test_AD_round_trip(): filename = tempfile.NamedTemporaryFile().name f = h5py.File(filename) N = 5 # Write the data. data = np.multiply.outer(np.arange(N), np.ones((2, 2))) f.create_dataset('/entry/data/data', data=data) f.close() # Insert the data records. print(id(conn.database)) resource_id = insert_resource('AD_HDF5', filename) datum_ids = [str(uuid.uuid4()) for i in range(N)] for i, datum_id in enumerate(datum_ids): insert_datum(resource_id, datum_id, dict(point_number=i)) print(id(conn.database)) # Retrieve the data. for i, datum_id in enumerate(datum_ids): print(id(conn.database)) data = retrieve(datum_id) known_data = i * np.ones((2, 2)) assert_array_equal(data, known_data)
ENH: Add test of AD_HDF5 handler.from __future__ import (absolute_import, division, print_function, unicode_literals) import six import numpy as np import h5py import tempfile import uuid import mongoengine import mongoengine.connection from filestore.api import (insert_resource, insert_datum, retrieve, register_handler, deregister_handler, db_disconnect) from filestore.odm_templates import ALIAS from filestore.file_readers import AreaDetectorHDF5Handler from numpy.testing import assert_array_equal db_name = str(uuid.uuid4()) conn = None def setup(): global conn # make sure nothing is connected db_disconnect() # make sure it _is_ connected conn = mongoengine.connect(db_name, host='localhost', alias=ALIAS) print(id(conn.database)) register_handler('AD_HDF5', AreaDetectorHDF5Handler) def teardown(): deregister_handler('AD_HDF5') db_disconnect() # if we know about a connection, drop the database if conn: conn.drop_database(db_name) def test_AD_round_trip(): filename = tempfile.NamedTemporaryFile().name f = h5py.File(filename) N = 5 # Write the data. data = np.multiply.outer(np.arange(N), np.ones((2, 2))) f.create_dataset('/entry/data/data', data=data) f.close() # Insert the data records. print(id(conn.database)) resource_id = insert_resource('AD_HDF5', filename) datum_ids = [str(uuid.uuid4()) for i in range(N)] for i, datum_id in enumerate(datum_ids): insert_datum(resource_id, datum_id, dict(point_number=i)) print(id(conn.database)) # Retrieve the data. for i, datum_id in enumerate(datum_ids): print(id(conn.database)) data = retrieve(datum_id) known_data = i * np.ones((2, 2)) assert_array_equal(data, known_data)
<commit_before><commit_msg>ENH: Add test of AD_HDF5 handler.<commit_after>from __future__ import (absolute_import, division, print_function, unicode_literals) import six import numpy as np import h5py import tempfile import uuid import mongoengine import mongoengine.connection from filestore.api import (insert_resource, insert_datum, retrieve, register_handler, deregister_handler, db_disconnect) from filestore.odm_templates import ALIAS from filestore.file_readers import AreaDetectorHDF5Handler from numpy.testing import assert_array_equal db_name = str(uuid.uuid4()) conn = None def setup(): global conn # make sure nothing is connected db_disconnect() # make sure it _is_ connected conn = mongoengine.connect(db_name, host='localhost', alias=ALIAS) print(id(conn.database)) register_handler('AD_HDF5', AreaDetectorHDF5Handler) def teardown(): deregister_handler('AD_HDF5') db_disconnect() # if we know about a connection, drop the database if conn: conn.drop_database(db_name) def test_AD_round_trip(): filename = tempfile.NamedTemporaryFile().name f = h5py.File(filename) N = 5 # Write the data. data = np.multiply.outer(np.arange(N), np.ones((2, 2))) f.create_dataset('/entry/data/data', data=data) f.close() # Insert the data records. print(id(conn.database)) resource_id = insert_resource('AD_HDF5', filename) datum_ids = [str(uuid.uuid4()) for i in range(N)] for i, datum_id in enumerate(datum_ids): insert_datum(resource_id, datum_id, dict(point_number=i)) print(id(conn.database)) # Retrieve the data. for i, datum_id in enumerate(datum_ids): print(id(conn.database)) data = retrieve(datum_id) known_data = i * np.ones((2, 2)) assert_array_equal(data, known_data)
5fc2c62ff47d7483701d5677357f06e7d9169f60
gennotes_server/serializers.py
gennotes_server/serializers.py
from rest_framework import serializers from .models import Relation, Variant class VariantSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Variant fields = ["tags"] class RelationSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Relation fields = ["tags"]
from rest_framework import serializers from .models import Relation, Variant class VariantSerializer(serializers.HyperlinkedModelSerializer): """ Serialize a Variant object. """ b37_id = serializers.SerializerMethodField() class Meta: model = Variant fields = ['b37_id', 'tags'] @staticmethod def get_b37_id(obj): """ Return an ID like "1-883516-G-A". """ return '-'.join([ obj.tags['chrom_b37'], obj.tags['pos_b37'], obj.tags['ref_allele_b37'], obj.tags['var_allele_b37'], ]) class RelationSerializer(serializers.HyperlinkedModelSerializer): """ Serialize a Relation object. """ class Meta: model = Relation fields = ['tags']
Return the variant ID in the API
Return the variant ID in the API
Python
mit
usajusaj/gennotes,PersonalGenomesOrg/gennotes,PersonalGenomesOrg/gennotes,PersonalGenomesOrg/gennotes,usajusaj/gennotes,usajusaj/gennotes
from rest_framework import serializers from .models import Relation, Variant class VariantSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Variant fields = ["tags"] class RelationSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Relation fields = ["tags"] Return the variant ID in the API
from rest_framework import serializers from .models import Relation, Variant class VariantSerializer(serializers.HyperlinkedModelSerializer): """ Serialize a Variant object. """ b37_id = serializers.SerializerMethodField() class Meta: model = Variant fields = ['b37_id', 'tags'] @staticmethod def get_b37_id(obj): """ Return an ID like "1-883516-G-A". """ return '-'.join([ obj.tags['chrom_b37'], obj.tags['pos_b37'], obj.tags['ref_allele_b37'], obj.tags['var_allele_b37'], ]) class RelationSerializer(serializers.HyperlinkedModelSerializer): """ Serialize a Relation object. """ class Meta: model = Relation fields = ['tags']
<commit_before>from rest_framework import serializers from .models import Relation, Variant class VariantSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Variant fields = ["tags"] class RelationSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Relation fields = ["tags"] <commit_msg>Return the variant ID in the API<commit_after>
from rest_framework import serializers from .models import Relation, Variant class VariantSerializer(serializers.HyperlinkedModelSerializer): """ Serialize a Variant object. """ b37_id = serializers.SerializerMethodField() class Meta: model = Variant fields = ['b37_id', 'tags'] @staticmethod def get_b37_id(obj): """ Return an ID like "1-883516-G-A". """ return '-'.join([ obj.tags['chrom_b37'], obj.tags['pos_b37'], obj.tags['ref_allele_b37'], obj.tags['var_allele_b37'], ]) class RelationSerializer(serializers.HyperlinkedModelSerializer): """ Serialize a Relation object. """ class Meta: model = Relation fields = ['tags']
from rest_framework import serializers from .models import Relation, Variant class VariantSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Variant fields = ["tags"] class RelationSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Relation fields = ["tags"] Return the variant ID in the APIfrom rest_framework import serializers from .models import Relation, Variant class VariantSerializer(serializers.HyperlinkedModelSerializer): """ Serialize a Variant object. """ b37_id = serializers.SerializerMethodField() class Meta: model = Variant fields = ['b37_id', 'tags'] @staticmethod def get_b37_id(obj): """ Return an ID like "1-883516-G-A". """ return '-'.join([ obj.tags['chrom_b37'], obj.tags['pos_b37'], obj.tags['ref_allele_b37'], obj.tags['var_allele_b37'], ]) class RelationSerializer(serializers.HyperlinkedModelSerializer): """ Serialize a Relation object. """ class Meta: model = Relation fields = ['tags']
<commit_before>from rest_framework import serializers from .models import Relation, Variant class VariantSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Variant fields = ["tags"] class RelationSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Relation fields = ["tags"] <commit_msg>Return the variant ID in the API<commit_after>from rest_framework import serializers from .models import Relation, Variant class VariantSerializer(serializers.HyperlinkedModelSerializer): """ Serialize a Variant object. """ b37_id = serializers.SerializerMethodField() class Meta: model = Variant fields = ['b37_id', 'tags'] @staticmethod def get_b37_id(obj): """ Return an ID like "1-883516-G-A". """ return '-'.join([ obj.tags['chrom_b37'], obj.tags['pos_b37'], obj.tags['ref_allele_b37'], obj.tags['var_allele_b37'], ]) class RelationSerializer(serializers.HyperlinkedModelSerializer): """ Serialize a Relation object. """ class Meta: model = Relation fields = ['tags']
6b51bb8e62ca8bb43c93c2c58b65b5b4fb5c1a06
src/ggrc/settings/app_engine.py
src/ggrc/settings/app_engine.py
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com APP_ENGINE = True ENABLE_JASMINE = False LOGIN_MANAGER = 'ggrc.login.appengine' FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer' # Cannot access filesystem on AppEngine or when using SDK AUTOBUILD_ASSETS = False SQLALCHEMY_RECORD_QUERIES = True MEMCACHE_MECHANISM = True CALENDAR_MECHANISM = False
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com APP_ENGINE = True ENABLE_JASMINE = False LOGIN_MANAGER = 'ggrc.login.appengine' FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer' # Cannot access filesystem on AppEngine or when using SDK AUTOBUILD_ASSETS = False SQLALCHEMY_RECORD_QUERIES = False MEMCACHE_MECHANISM = True CALENDAR_MECHANISM = False
Disable SQL logging in appengine
Disable SQL logging in appengine
Python
apache-2.0
uskudnik/ggrc-core,uskudnik/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,NejcZupec/ggrc-core,kr41/ggrc-core,VinnieJohns/ggrc-core,vladan-m/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core,jmakov/ggrc-core,vladan-m/ggrc-core,uskudnik/ggrc-core,selahssea/ggrc-core,vladan-m/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,j0gurt/ggrc-core,hasanalom/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,vladan-m/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,jmakov/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,uskudnik/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,hasanalom/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,hyperNURb/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,j0gurt/ggrc-core,NejcZupec/ggrc-core,hyperNURb/ggrc-core,plamut/ggrc-core,hasanalom/ggrc-core,VinnieJohns/ggrc-core,uskudnik/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com APP_ENGINE = True ENABLE_JASMINE = False LOGIN_MANAGER = 'ggrc.login.appengine' FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer' # Cannot access filesystem on AppEngine or when using SDK AUTOBUILD_ASSETS = False SQLALCHEMY_RECORD_QUERIES = True MEMCACHE_MECHANISM = True CALENDAR_MECHANISM = False Disable SQL logging in appengine
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com APP_ENGINE = True ENABLE_JASMINE = False LOGIN_MANAGER = 'ggrc.login.appengine' FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer' # Cannot access filesystem on AppEngine or when using SDK AUTOBUILD_ASSETS = False SQLALCHEMY_RECORD_QUERIES = False MEMCACHE_MECHANISM = True CALENDAR_MECHANISM = False
<commit_before># Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com APP_ENGINE = True ENABLE_JASMINE = False LOGIN_MANAGER = 'ggrc.login.appengine' FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer' # Cannot access filesystem on AppEngine or when using SDK AUTOBUILD_ASSETS = False SQLALCHEMY_RECORD_QUERIES = True MEMCACHE_MECHANISM = True CALENDAR_MECHANISM = False <commit_msg>Disable SQL logging in appengine<commit_after>
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com APP_ENGINE = True ENABLE_JASMINE = False LOGIN_MANAGER = 'ggrc.login.appengine' FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer' # Cannot access filesystem on AppEngine or when using SDK AUTOBUILD_ASSETS = False SQLALCHEMY_RECORD_QUERIES = False MEMCACHE_MECHANISM = True CALENDAR_MECHANISM = False
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com APP_ENGINE = True ENABLE_JASMINE = False LOGIN_MANAGER = 'ggrc.login.appengine' FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer' # Cannot access filesystem on AppEngine or when using SDK AUTOBUILD_ASSETS = False SQLALCHEMY_RECORD_QUERIES = True MEMCACHE_MECHANISM = True CALENDAR_MECHANISM = False Disable SQL logging in appengine# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com APP_ENGINE = True ENABLE_JASMINE = False LOGIN_MANAGER = 'ggrc.login.appengine' FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer' # Cannot access filesystem on AppEngine or when using SDK AUTOBUILD_ASSETS = False SQLALCHEMY_RECORD_QUERIES = False MEMCACHE_MECHANISM = True CALENDAR_MECHANISM = False
<commit_before># Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com APP_ENGINE = True ENABLE_JASMINE = False LOGIN_MANAGER = 'ggrc.login.appengine' FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer' # Cannot access filesystem on AppEngine or when using SDK AUTOBUILD_ASSETS = False SQLALCHEMY_RECORD_QUERIES = True MEMCACHE_MECHANISM = True CALENDAR_MECHANISM = False <commit_msg>Disable SQL logging in appengine<commit_after># Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: dan@reciprocitylabs.com # Maintained By: dan@reciprocitylabs.com APP_ENGINE = True ENABLE_JASMINE = False LOGIN_MANAGER = 'ggrc.login.appengine' FULLTEXT_INDEXER = 'ggrc.fulltext.mysql.MysqlIndexer' # Cannot access filesystem on AppEngine or when using SDK AUTOBUILD_ASSETS = False SQLALCHEMY_RECORD_QUERIES = False MEMCACHE_MECHANISM = True CALENDAR_MECHANISM = False
a20daba8cb21b1513e9f81b9dc6fc2090512c270
infra/utils/delete-projects.py
infra/utils/delete-projects.py
#!/usr/bin/env python3 # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import google.api_core from google.cloud import resource_manager import sys import argparse from googleapiclient import discovery from oauth2client.client import GoogleCredentials client = resource_manager.Client() credentials = GoogleCredentials.get_application_default() def delete_liens(project_id): service = discovery.build('cloudresourcemanager', 'v1', credentials=credentials) parent = 'projects/{}'.format(project_id) request = service.liens().list(parent=parent) response = request.execute() liens_deleted = 0 for lien in response.get('liens', []): print("Deleting lien:", lien) d_request = service.liens().delete(name=lien.get('name')) d_request.execute() liens_deleted += 1 return liens_deleted def delete_project(project): try: project.delete() except google.api_core.exceptions.BadRequest as e: liens_deleted = delete_liens(project.project_id) if liens_deleted >= 1: delete_project(project) except (google.api_core.exceptions.Forbidden) as e: print("Failed to delete {}".format(project.project_id)) print(e) def delete_projects(parent_type, parent_id): print("Deleting projects in {} {}".format(parent_type, parent_id)) project_filter = { 'parent.type': parent_type, 'parent.id': parent_id } for project in client.list_projects(project_filter): print(" Deleting project {}...".format(project.project_id)) delete_project(project) def main(argv): parser = argparser() args = parser.parse_args(argv[1:]) (parent_type, parent_id) = args.parent_id.split('/') delete_projects(parent_type.strip('s'), parent_id) def argparser(): parser = argparse.ArgumentParser(description='Delete projects within a folder') parser.add_argument('parent_id') return parser if __name__ == "__main__": main(sys.argv)
Add script for deleting all projects within a folder
Add script for deleting all projects within a folder
Python
apache-2.0
GoogleCloudPlatform/cloud-foundation-toolkit,GoogleCloudPlatform/cloud-foundation-toolkit,GoogleCloudPlatform/cloud-foundation-toolkit,GoogleCloudPlatform/cloud-foundation-toolkit,GoogleCloudPlatform/cloud-foundation-toolkit
Add script for deleting all projects within a folder
#!/usr/bin/env python3 # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import google.api_core from google.cloud import resource_manager import sys import argparse from googleapiclient import discovery from oauth2client.client import GoogleCredentials client = resource_manager.Client() credentials = GoogleCredentials.get_application_default() def delete_liens(project_id): service = discovery.build('cloudresourcemanager', 'v1', credentials=credentials) parent = 'projects/{}'.format(project_id) request = service.liens().list(parent=parent) response = request.execute() liens_deleted = 0 for lien in response.get('liens', []): print("Deleting lien:", lien) d_request = service.liens().delete(name=lien.get('name')) d_request.execute() liens_deleted += 1 return liens_deleted def delete_project(project): try: project.delete() except google.api_core.exceptions.BadRequest as e: liens_deleted = delete_liens(project.project_id) if liens_deleted >= 1: delete_project(project) except (google.api_core.exceptions.Forbidden) as e: print("Failed to delete {}".format(project.project_id)) print(e) def delete_projects(parent_type, parent_id): print("Deleting projects in {} {}".format(parent_type, parent_id)) project_filter = { 'parent.type': parent_type, 'parent.id': parent_id } for project in client.list_projects(project_filter): print(" Deleting project {}...".format(project.project_id)) delete_project(project) def main(argv): parser = argparser() args = parser.parse_args(argv[1:]) (parent_type, parent_id) = args.parent_id.split('/') delete_projects(parent_type.strip('s'), parent_id) def argparser(): parser = argparse.ArgumentParser(description='Delete projects within a folder') parser.add_argument('parent_id') return parser if __name__ == "__main__": main(sys.argv)
<commit_before><commit_msg>Add script for deleting all projects within a folder<commit_after>
#!/usr/bin/env python3 # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import google.api_core from google.cloud import resource_manager import sys import argparse from googleapiclient import discovery from oauth2client.client import GoogleCredentials client = resource_manager.Client() credentials = GoogleCredentials.get_application_default() def delete_liens(project_id): service = discovery.build('cloudresourcemanager', 'v1', credentials=credentials) parent = 'projects/{}'.format(project_id) request = service.liens().list(parent=parent) response = request.execute() liens_deleted = 0 for lien in response.get('liens', []): print("Deleting lien:", lien) d_request = service.liens().delete(name=lien.get('name')) d_request.execute() liens_deleted += 1 return liens_deleted def delete_project(project): try: project.delete() except google.api_core.exceptions.BadRequest as e: liens_deleted = delete_liens(project.project_id) if liens_deleted >= 1: delete_project(project) except (google.api_core.exceptions.Forbidden) as e: print("Failed to delete {}".format(project.project_id)) print(e) def delete_projects(parent_type, parent_id): print("Deleting projects in {} {}".format(parent_type, parent_id)) project_filter = { 'parent.type': parent_type, 'parent.id': parent_id } for project in client.list_projects(project_filter): print(" Deleting project {}...".format(project.project_id)) delete_project(project) def main(argv): parser = argparser() args = parser.parse_args(argv[1:]) (parent_type, parent_id) = args.parent_id.split('/') delete_projects(parent_type.strip('s'), parent_id) def argparser(): parser = argparse.ArgumentParser(description='Delete projects within a folder') parser.add_argument('parent_id') return parser if __name__ == "__main__": main(sys.argv)
Add script for deleting all projects within a folder#!/usr/bin/env python3 # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import google.api_core from google.cloud import resource_manager import sys import argparse from googleapiclient import discovery from oauth2client.client import GoogleCredentials client = resource_manager.Client() credentials = GoogleCredentials.get_application_default() def delete_liens(project_id): service = discovery.build('cloudresourcemanager', 'v1', credentials=credentials) parent = 'projects/{}'.format(project_id) request = service.liens().list(parent=parent) response = request.execute() liens_deleted = 0 for lien in response.get('liens', []): print("Deleting lien:", lien) d_request = service.liens().delete(name=lien.get('name')) d_request.execute() liens_deleted += 1 return liens_deleted def delete_project(project): try: project.delete() except google.api_core.exceptions.BadRequest as e: liens_deleted = delete_liens(project.project_id) if liens_deleted >= 1: delete_project(project) except (google.api_core.exceptions.Forbidden) as e: print("Failed to delete {}".format(project.project_id)) print(e) def delete_projects(parent_type, parent_id): print("Deleting projects in {} {}".format(parent_type, parent_id)) project_filter = { 'parent.type': parent_type, 'parent.id': parent_id } for project in client.list_projects(project_filter): print(" Deleting project {}...".format(project.project_id)) delete_project(project) def main(argv): parser = argparser() args = parser.parse_args(argv[1:]) (parent_type, parent_id) = args.parent_id.split('/') delete_projects(parent_type.strip('s'), parent_id) def argparser(): parser = argparse.ArgumentParser(description='Delete projects within a folder') parser.add_argument('parent_id') return parser if __name__ == "__main__": main(sys.argv)
<commit_before><commit_msg>Add script for deleting all projects within a folder<commit_after>#!/usr/bin/env python3 # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import google.api_core from google.cloud import resource_manager import sys import argparse from googleapiclient import discovery from oauth2client.client import GoogleCredentials client = resource_manager.Client() credentials = GoogleCredentials.get_application_default() def delete_liens(project_id): service = discovery.build('cloudresourcemanager', 'v1', credentials=credentials) parent = 'projects/{}'.format(project_id) request = service.liens().list(parent=parent) response = request.execute() liens_deleted = 0 for lien in response.get('liens', []): print("Deleting lien:", lien) d_request = service.liens().delete(name=lien.get('name')) d_request.execute() liens_deleted += 1 return liens_deleted def delete_project(project): try: project.delete() except google.api_core.exceptions.BadRequest as e: liens_deleted = delete_liens(project.project_id) if liens_deleted >= 1: delete_project(project) except (google.api_core.exceptions.Forbidden) as e: print("Failed to delete {}".format(project.project_id)) print(e) def delete_projects(parent_type, parent_id): print("Deleting projects in {} {}".format(parent_type, parent_id)) project_filter = { 'parent.type': parent_type, 'parent.id': parent_id } for project in client.list_projects(project_filter): print(" Deleting project {}...".format(project.project_id)) delete_project(project) def main(argv): parser = argparser() args = parser.parse_args(argv[1:]) (parent_type, parent_id) = args.parent_id.split('/') delete_projects(parent_type.strip('s'), parent_id) def argparser(): parser = argparse.ArgumentParser(description='Delete projects within a folder') parser.add_argument('parent_id') return parser if __name__ == "__main__": main(sys.argv)
d4dd2dd454858686e9b4ef8f7fe6c9f3b267101a
migrations/versions/610_change_numberofsuppliers_to_integer.py
migrations/versions/610_change_numberofsuppliers_to_integer.py
"""Change numberOfSuppliers to integer Revision ID: 610 Revises: 600 Create Date: 2016-05-06 11:37:59.204714 """ # revision identifiers, used by Alembic. revision = '610' down_revision = '600' import re from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column from sqlalchemy.dialects import postgresql briefs = table( 'briefs', column('id', sa.Integer), column('data', postgresql.JSON), ) def upgrade(): conn = op.get_bind() for brief in conn.execute(briefs.select()): # Skip briefs with missing or integer 'numberOfSuppliers' if brief.data.get('numberOfSuppliers') is None or isinstance(brief.data['numberOfSuppliers'], int): continue # Get the last number in the 'numberOfSuppliers' string number_of_suppliers = re.search(r'(\d+)(?!.*\d)', brief.data['numberOfSuppliers']) if number_of_suppliers: brief.data['numberOfSuppliers'] = int(number_of_suppliers.group(0)) conn.execute( briefs.update().where( briefs.c.id == brief.id ).values( data=brief.data ) ) def downgrade(): conn = op.get_bind() for brief in conn.execute(briefs.select()): # Skip briefs with missing or integer 'numberOfSuppliers' if brief.data.get('numberOfSuppliers') is None: continue brief.data['numberOfSuppliers'] = "{}".format(brief.data['numberOfSuppliers']) conn.execute( briefs.update().where( briefs.c.id == brief.id ).values( data=brief.data ) )
Add a migration to change brief numberOfSuppliers to integer
Add a migration to change brief numberOfSuppliers to integer If 'numberOfSuppliers' is a string parse the last number from the value and convert it to integer.
Python
mit
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
Add a migration to change brief numberOfSuppliers to integer If 'numberOfSuppliers' is a string parse the last number from the value and convert it to integer.
"""Change numberOfSuppliers to integer Revision ID: 610 Revises: 600 Create Date: 2016-05-06 11:37:59.204714 """ # revision identifiers, used by Alembic. revision = '610' down_revision = '600' import re from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column from sqlalchemy.dialects import postgresql briefs = table( 'briefs', column('id', sa.Integer), column('data', postgresql.JSON), ) def upgrade(): conn = op.get_bind() for brief in conn.execute(briefs.select()): # Skip briefs with missing or integer 'numberOfSuppliers' if brief.data.get('numberOfSuppliers') is None or isinstance(brief.data['numberOfSuppliers'], int): continue # Get the last number in the 'numberOfSuppliers' string number_of_suppliers = re.search(r'(\d+)(?!.*\d)', brief.data['numberOfSuppliers']) if number_of_suppliers: brief.data['numberOfSuppliers'] = int(number_of_suppliers.group(0)) conn.execute( briefs.update().where( briefs.c.id == brief.id ).values( data=brief.data ) ) def downgrade(): conn = op.get_bind() for brief in conn.execute(briefs.select()): # Skip briefs with missing or integer 'numberOfSuppliers' if brief.data.get('numberOfSuppliers') is None: continue brief.data['numberOfSuppliers'] = "{}".format(brief.data['numberOfSuppliers']) conn.execute( briefs.update().where( briefs.c.id == brief.id ).values( data=brief.data ) )
<commit_before><commit_msg>Add a migration to change brief numberOfSuppliers to integer If 'numberOfSuppliers' is a string parse the last number from the value and convert it to integer.<commit_after>
"""Change numberOfSuppliers to integer Revision ID: 610 Revises: 600 Create Date: 2016-05-06 11:37:59.204714 """ # revision identifiers, used by Alembic. revision = '610' down_revision = '600' import re from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column from sqlalchemy.dialects import postgresql briefs = table( 'briefs', column('id', sa.Integer), column('data', postgresql.JSON), ) def upgrade(): conn = op.get_bind() for brief in conn.execute(briefs.select()): # Skip briefs with missing or integer 'numberOfSuppliers' if brief.data.get('numberOfSuppliers') is None or isinstance(brief.data['numberOfSuppliers'], int): continue # Get the last number in the 'numberOfSuppliers' string number_of_suppliers = re.search(r'(\d+)(?!.*\d)', brief.data['numberOfSuppliers']) if number_of_suppliers: brief.data['numberOfSuppliers'] = int(number_of_suppliers.group(0)) conn.execute( briefs.update().where( briefs.c.id == brief.id ).values( data=brief.data ) ) def downgrade(): conn = op.get_bind() for brief in conn.execute(briefs.select()): # Skip briefs with missing or integer 'numberOfSuppliers' if brief.data.get('numberOfSuppliers') is None: continue brief.data['numberOfSuppliers'] = "{}".format(brief.data['numberOfSuppliers']) conn.execute( briefs.update().where( briefs.c.id == brief.id ).values( data=brief.data ) )
Add a migration to change brief numberOfSuppliers to integer If 'numberOfSuppliers' is a string parse the last number from the value and convert it to integer."""Change numberOfSuppliers to integer Revision ID: 610 Revises: 600 Create Date: 2016-05-06 11:37:59.204714 """ # revision identifiers, used by Alembic. revision = '610' down_revision = '600' import re from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column from sqlalchemy.dialects import postgresql briefs = table( 'briefs', column('id', sa.Integer), column('data', postgresql.JSON), ) def upgrade(): conn = op.get_bind() for brief in conn.execute(briefs.select()): # Skip briefs with missing or integer 'numberOfSuppliers' if brief.data.get('numberOfSuppliers') is None or isinstance(brief.data['numberOfSuppliers'], int): continue # Get the last number in the 'numberOfSuppliers' string number_of_suppliers = re.search(r'(\d+)(?!.*\d)', brief.data['numberOfSuppliers']) if number_of_suppliers: brief.data['numberOfSuppliers'] = int(number_of_suppliers.group(0)) conn.execute( briefs.update().where( briefs.c.id == brief.id ).values( data=brief.data ) ) def downgrade(): conn = op.get_bind() for brief in conn.execute(briefs.select()): # Skip briefs with missing or integer 'numberOfSuppliers' if brief.data.get('numberOfSuppliers') is None: continue brief.data['numberOfSuppliers'] = "{}".format(brief.data['numberOfSuppliers']) conn.execute( briefs.update().where( briefs.c.id == brief.id ).values( data=brief.data ) )
<commit_before><commit_msg>Add a migration to change brief numberOfSuppliers to integer If 'numberOfSuppliers' is a string parse the last number from the value and convert it to integer.<commit_after>"""Change numberOfSuppliers to integer Revision ID: 610 Revises: 600 Create Date: 2016-05-06 11:37:59.204714 """ # revision identifiers, used by Alembic. revision = '610' down_revision = '600' import re from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column from sqlalchemy.dialects import postgresql briefs = table( 'briefs', column('id', sa.Integer), column('data', postgresql.JSON), ) def upgrade(): conn = op.get_bind() for brief in conn.execute(briefs.select()): # Skip briefs with missing or integer 'numberOfSuppliers' if brief.data.get('numberOfSuppliers') is None or isinstance(brief.data['numberOfSuppliers'], int): continue # Get the last number in the 'numberOfSuppliers' string number_of_suppliers = re.search(r'(\d+)(?!.*\d)', brief.data['numberOfSuppliers']) if number_of_suppliers: brief.data['numberOfSuppliers'] = int(number_of_suppliers.group(0)) conn.execute( briefs.update().where( briefs.c.id == brief.id ).values( data=brief.data ) ) def downgrade(): conn = op.get_bind() for brief in conn.execute(briefs.select()): # Skip briefs with missing or integer 'numberOfSuppliers' if brief.data.get('numberOfSuppliers') is None: continue brief.data['numberOfSuppliers'] = "{}".format(brief.data['numberOfSuppliers']) conn.execute( briefs.update().where( briefs.c.id == brief.id ).values( data=brief.data ) )
e1b2898c9201d8735797ad626843e448a8791773
conf_site/proposals/tests/test_presentation_connection.py
conf_site/proposals/tests/test_presentation_connection.py
from symposion.conference.models import Section from symposion.proposals.models import ProposalBase from symposion.schedule.models import Presentation from conf_site.proposals.tests import ProposalTestCase class PresentationProposalConnectionTestCase(ProposalTestCase): def test_presentation_proposal_model(self): """Make sure presentation.proposal is Proposal, not ProposalBase.""" # Create Presentation connected to existing Proposal. proposal_base = ProposalBase.objects.get( title=self.proposal.title, description=self.proposal.description, abstract=self.proposal.abstract, speaker=self.proposal.speaker, ) presentation = Presentation.objects.create( title=self.proposal.title, description=self.proposal.description, abstract=self.proposal.abstract, speaker=self.proposal.speaker, section=Section.objects.first(), proposal_base=proposal_base, ) # This should be a Proposal, not a ProposalBase. self.assertIs(type(presentation.proposal), type(self.proposal))
Add test case for Presentation-Proposal connection.
Add test case for Presentation-Proposal connection. Presentation.proposal should return the specific class (Proposal) and not ProposalBase.
Python
mit
pydata/conf_site,pydata/conf_site,pydata/conf_site
Add test case for Presentation-Proposal connection. Presentation.proposal should return the specific class (Proposal) and not ProposalBase.
from symposion.conference.models import Section from symposion.proposals.models import ProposalBase from symposion.schedule.models import Presentation from conf_site.proposals.tests import ProposalTestCase class PresentationProposalConnectionTestCase(ProposalTestCase): def test_presentation_proposal_model(self): """Make sure presentation.proposal is Proposal, not ProposalBase.""" # Create Presentation connected to existing Proposal. proposal_base = ProposalBase.objects.get( title=self.proposal.title, description=self.proposal.description, abstract=self.proposal.abstract, speaker=self.proposal.speaker, ) presentation = Presentation.objects.create( title=self.proposal.title, description=self.proposal.description, abstract=self.proposal.abstract, speaker=self.proposal.speaker, section=Section.objects.first(), proposal_base=proposal_base, ) # This should be a Proposal, not a ProposalBase. self.assertIs(type(presentation.proposal), type(self.proposal))
<commit_before><commit_msg>Add test case for Presentation-Proposal connection. Presentation.proposal should return the specific class (Proposal) and not ProposalBase.<commit_after>
from symposion.conference.models import Section from symposion.proposals.models import ProposalBase from symposion.schedule.models import Presentation from conf_site.proposals.tests import ProposalTestCase class PresentationProposalConnectionTestCase(ProposalTestCase): def test_presentation_proposal_model(self): """Make sure presentation.proposal is Proposal, not ProposalBase.""" # Create Presentation connected to existing Proposal. proposal_base = ProposalBase.objects.get( title=self.proposal.title, description=self.proposal.description, abstract=self.proposal.abstract, speaker=self.proposal.speaker, ) presentation = Presentation.objects.create( title=self.proposal.title, description=self.proposal.description, abstract=self.proposal.abstract, speaker=self.proposal.speaker, section=Section.objects.first(), proposal_base=proposal_base, ) # This should be a Proposal, not a ProposalBase. self.assertIs(type(presentation.proposal), type(self.proposal))
Add test case for Presentation-Proposal connection. Presentation.proposal should return the specific class (Proposal) and not ProposalBase.from symposion.conference.models import Section from symposion.proposals.models import ProposalBase from symposion.schedule.models import Presentation from conf_site.proposals.tests import ProposalTestCase class PresentationProposalConnectionTestCase(ProposalTestCase): def test_presentation_proposal_model(self): """Make sure presentation.proposal is Proposal, not ProposalBase.""" # Create Presentation connected to existing Proposal. proposal_base = ProposalBase.objects.get( title=self.proposal.title, description=self.proposal.description, abstract=self.proposal.abstract, speaker=self.proposal.speaker, ) presentation = Presentation.objects.create( title=self.proposal.title, description=self.proposal.description, abstract=self.proposal.abstract, speaker=self.proposal.speaker, section=Section.objects.first(), proposal_base=proposal_base, ) # This should be a Proposal, not a ProposalBase. self.assertIs(type(presentation.proposal), type(self.proposal))
<commit_before><commit_msg>Add test case for Presentation-Proposal connection. Presentation.proposal should return the specific class (Proposal) and not ProposalBase.<commit_after>from symposion.conference.models import Section from symposion.proposals.models import ProposalBase from symposion.schedule.models import Presentation from conf_site.proposals.tests import ProposalTestCase class PresentationProposalConnectionTestCase(ProposalTestCase): def test_presentation_proposal_model(self): """Make sure presentation.proposal is Proposal, not ProposalBase.""" # Create Presentation connected to existing Proposal. proposal_base = ProposalBase.objects.get( title=self.proposal.title, description=self.proposal.description, abstract=self.proposal.abstract, speaker=self.proposal.speaker, ) presentation = Presentation.objects.create( title=self.proposal.title, description=self.proposal.description, abstract=self.proposal.abstract, speaker=self.proposal.speaker, section=Section.objects.first(), proposal_base=proposal_base, ) # This should be a Proposal, not a ProposalBase. self.assertIs(type(presentation.proposal), type(self.proposal))
82237f41c5d1a79378a90d3463cc04a67a5b8088
tests/APIs/python/functional/load/load_pascal_voc_2012.py
tests/APIs/python/functional/load/load_pascal_voc_2012.py
#!/usr/bin/env python3 """ Test loading Pascal VOC 2012. """ from __future__ import print_function import os import dbcollection.manager as dbc # storage dir data_dir = os.path.join(os.path.expanduser("~"), 'tmp', 'download_data') # delete all cache data + dir print('\n==> dbcollection: config_cache()') dbc.config_cache(delete_cache=True, is_test=True) # download/setup dataset print('\n==> dbcollection: load()') voc2012 = dbc.load(name='pascal_voc_2012', task='detection', data_dir=data_dir, verbose=True, is_test=True) # print data from the loader print('\n==> dbcollection: info()') dbc.info(is_test=True) # print data from the loader print('\n######### info #########') print('Dataset: ' + voc2012.name) print('Task: ' + voc2012.task) print('Data path: ' + voc2012.data_dir) print('Metadata cache path: ' + voc2012.cache_path) # delete all cache data + dir print('\n==> dbcollection: config_cache()') dbc.config_cache(delete_cache=True, is_test=True)
Add load test for the pascal voc 2012 dataset
tests: Add load test for the pascal voc 2012 dataset
Python
mit
dbcollection/dbcollection,farrajota/dbcollection
tests: Add load test for the pascal voc 2012 dataset
#!/usr/bin/env python3 """ Test loading Pascal VOC 2012. """ from __future__ import print_function import os import dbcollection.manager as dbc # storage dir data_dir = os.path.join(os.path.expanduser("~"), 'tmp', 'download_data') # delete all cache data + dir print('\n==> dbcollection: config_cache()') dbc.config_cache(delete_cache=True, is_test=True) # download/setup dataset print('\n==> dbcollection: load()') voc2012 = dbc.load(name='pascal_voc_2012', task='detection', data_dir=data_dir, verbose=True, is_test=True) # print data from the loader print('\n==> dbcollection: info()') dbc.info(is_test=True) # print data from the loader print('\n######### info #########') print('Dataset: ' + voc2012.name) print('Task: ' + voc2012.task) print('Data path: ' + voc2012.data_dir) print('Metadata cache path: ' + voc2012.cache_path) # delete all cache data + dir print('\n==> dbcollection: config_cache()') dbc.config_cache(delete_cache=True, is_test=True)
<commit_before><commit_msg>tests: Add load test for the pascal voc 2012 dataset<commit_after>
#!/usr/bin/env python3 """ Test loading Pascal VOC 2012. """ from __future__ import print_function import os import dbcollection.manager as dbc # storage dir data_dir = os.path.join(os.path.expanduser("~"), 'tmp', 'download_data') # delete all cache data + dir print('\n==> dbcollection: config_cache()') dbc.config_cache(delete_cache=True, is_test=True) # download/setup dataset print('\n==> dbcollection: load()') voc2012 = dbc.load(name='pascal_voc_2012', task='detection', data_dir=data_dir, verbose=True, is_test=True) # print data from the loader print('\n==> dbcollection: info()') dbc.info(is_test=True) # print data from the loader print('\n######### info #########') print('Dataset: ' + voc2012.name) print('Task: ' + voc2012.task) print('Data path: ' + voc2012.data_dir) print('Metadata cache path: ' + voc2012.cache_path) # delete all cache data + dir print('\n==> dbcollection: config_cache()') dbc.config_cache(delete_cache=True, is_test=True)
tests: Add load test for the pascal voc 2012 dataset#!/usr/bin/env python3 """ Test loading Pascal VOC 2012. """ from __future__ import print_function import os import dbcollection.manager as dbc # storage dir data_dir = os.path.join(os.path.expanduser("~"), 'tmp', 'download_data') # delete all cache data + dir print('\n==> dbcollection: config_cache()') dbc.config_cache(delete_cache=True, is_test=True) # download/setup dataset print('\n==> dbcollection: load()') voc2012 = dbc.load(name='pascal_voc_2012', task='detection', data_dir=data_dir, verbose=True, is_test=True) # print data from the loader print('\n==> dbcollection: info()') dbc.info(is_test=True) # print data from the loader print('\n######### info #########') print('Dataset: ' + voc2012.name) print('Task: ' + voc2012.task) print('Data path: ' + voc2012.data_dir) print('Metadata cache path: ' + voc2012.cache_path) # delete all cache data + dir print('\n==> dbcollection: config_cache()') dbc.config_cache(delete_cache=True, is_test=True)
<commit_before><commit_msg>tests: Add load test for the pascal voc 2012 dataset<commit_after>#!/usr/bin/env python3 """ Test loading Pascal VOC 2012. """ from __future__ import print_function import os import dbcollection.manager as dbc # storage dir data_dir = os.path.join(os.path.expanduser("~"), 'tmp', 'download_data') # delete all cache data + dir print('\n==> dbcollection: config_cache()') dbc.config_cache(delete_cache=True, is_test=True) # download/setup dataset print('\n==> dbcollection: load()') voc2012 = dbc.load(name='pascal_voc_2012', task='detection', data_dir=data_dir, verbose=True, is_test=True) # print data from the loader print('\n==> dbcollection: info()') dbc.info(is_test=True) # print data from the loader print('\n######### info #########') print('Dataset: ' + voc2012.name) print('Task: ' + voc2012.task) print('Data path: ' + voc2012.data_dir) print('Metadata cache path: ' + voc2012.cache_path) # delete all cache data + dir print('\n==> dbcollection: config_cache()') dbc.config_cache(delete_cache=True, is_test=True)
2e25b2d8f149106be3635fcf7f195bde1614e00b
plugin/core/test_transports.py
plugin/core/test_transports.py
import unittest import io from .transports import StdioTransport import time def json_rpc_message(payload: str) -> bytes: content_length = len(payload) return b'Content-Length: ' + bytes( str(content_length), 'utf-8') + b'\r\n\r\n' + bytes(payload, 'utf-8') class FakeProcess(object): def __init__(self): self.stdin = io.BytesIO(b'foo\nbaz\n') # io.BufferedReader() self.stdout = io.BytesIO( json_rpc_message("hello") + json_rpc_message("world")) # io.BufferedWriter() self.returncode = None def poll(self): return self.returncode def exit(self, returncode): self.returncode = returncode class StdioTransportTests(unittest.TestCase): def test_read_messages(self): process = FakeProcess() t = StdioTransport(process) # type: ignore self.assertIsNotNone(t) received = [] def on_receive(msg): received.append(msg) def on_close(): pass t.start(on_receive, on_close) time.sleep(0.01) self.assertEqual(received, ["hello", "world"]) t.close()
Add test for stdio transport
Add test for stdio transport
Python
mit
tomv564/LSP
Add test for stdio transport
import unittest import io from .transports import StdioTransport import time def json_rpc_message(payload: str) -> bytes: content_length = len(payload) return b'Content-Length: ' + bytes( str(content_length), 'utf-8') + b'\r\n\r\n' + bytes(payload, 'utf-8') class FakeProcess(object): def __init__(self): self.stdin = io.BytesIO(b'foo\nbaz\n') # io.BufferedReader() self.stdout = io.BytesIO( json_rpc_message("hello") + json_rpc_message("world")) # io.BufferedWriter() self.returncode = None def poll(self): return self.returncode def exit(self, returncode): self.returncode = returncode class StdioTransportTests(unittest.TestCase): def test_read_messages(self): process = FakeProcess() t = StdioTransport(process) # type: ignore self.assertIsNotNone(t) received = [] def on_receive(msg): received.append(msg) def on_close(): pass t.start(on_receive, on_close) time.sleep(0.01) self.assertEqual(received, ["hello", "world"]) t.close()
<commit_before><commit_msg>Add test for stdio transport<commit_after>
import unittest import io from .transports import StdioTransport import time def json_rpc_message(payload: str) -> bytes: content_length = len(payload) return b'Content-Length: ' + bytes( str(content_length), 'utf-8') + b'\r\n\r\n' + bytes(payload, 'utf-8') class FakeProcess(object): def __init__(self): self.stdin = io.BytesIO(b'foo\nbaz\n') # io.BufferedReader() self.stdout = io.BytesIO( json_rpc_message("hello") + json_rpc_message("world")) # io.BufferedWriter() self.returncode = None def poll(self): return self.returncode def exit(self, returncode): self.returncode = returncode class StdioTransportTests(unittest.TestCase): def test_read_messages(self): process = FakeProcess() t = StdioTransport(process) # type: ignore self.assertIsNotNone(t) received = [] def on_receive(msg): received.append(msg) def on_close(): pass t.start(on_receive, on_close) time.sleep(0.01) self.assertEqual(received, ["hello", "world"]) t.close()
Add test for stdio transportimport unittest import io from .transports import StdioTransport import time def json_rpc_message(payload: str) -> bytes: content_length = len(payload) return b'Content-Length: ' + bytes( str(content_length), 'utf-8') + b'\r\n\r\n' + bytes(payload, 'utf-8') class FakeProcess(object): def __init__(self): self.stdin = io.BytesIO(b'foo\nbaz\n') # io.BufferedReader() self.stdout = io.BytesIO( json_rpc_message("hello") + json_rpc_message("world")) # io.BufferedWriter() self.returncode = None def poll(self): return self.returncode def exit(self, returncode): self.returncode = returncode class StdioTransportTests(unittest.TestCase): def test_read_messages(self): process = FakeProcess() t = StdioTransport(process) # type: ignore self.assertIsNotNone(t) received = [] def on_receive(msg): received.append(msg) def on_close(): pass t.start(on_receive, on_close) time.sleep(0.01) self.assertEqual(received, ["hello", "world"]) t.close()
<commit_before><commit_msg>Add test for stdio transport<commit_after>import unittest import io from .transports import StdioTransport import time def json_rpc_message(payload: str) -> bytes: content_length = len(payload) return b'Content-Length: ' + bytes( str(content_length), 'utf-8') + b'\r\n\r\n' + bytes(payload, 'utf-8') class FakeProcess(object): def __init__(self): self.stdin = io.BytesIO(b'foo\nbaz\n') # io.BufferedReader() self.stdout = io.BytesIO( json_rpc_message("hello") + json_rpc_message("world")) # io.BufferedWriter() self.returncode = None def poll(self): return self.returncode def exit(self, returncode): self.returncode = returncode class StdioTransportTests(unittest.TestCase): def test_read_messages(self): process = FakeProcess() t = StdioTransport(process) # type: ignore self.assertIsNotNone(t) received = [] def on_receive(msg): received.append(msg) def on_close(): pass t.start(on_receive, on_close) time.sleep(0.01) self.assertEqual(received, ["hello", "world"]) t.close()