commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1249c0674ec226d5513b5004f0d512fbc5ce1d61
|
scuole/states/migrations/0002_auto_20150508_1614.py
|
scuole/states/migrations/0002_auto_20150508_1614.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_texas(apps, schema_editor):
State = apps.get_model('states', 'State')
State.objects.create(
name='Texas',
slug='texas',
)
class Migration(migrations.Migration):
dependencies = [
('states', '0001_initial'),
]
operations = [
migrations.RunPython(create_texas),
]
|
Add data migration for Texas State model
|
Add data migration for Texas State model
|
Python
|
mit
|
texastribune/scuole,texastribune/scuole,texastribune/scuole,texastribune/scuole
|
Add data migration for Texas State model
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_texas(apps, schema_editor):
State = apps.get_model('states', 'State')
State.objects.create(
name='Texas',
slug='texas',
)
class Migration(migrations.Migration):
dependencies = [
('states', '0001_initial'),
]
operations = [
migrations.RunPython(create_texas),
]
|
<commit_before><commit_msg>Add data migration for Texas State model<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_texas(apps, schema_editor):
State = apps.get_model('states', 'State')
State.objects.create(
name='Texas',
slug='texas',
)
class Migration(migrations.Migration):
dependencies = [
('states', '0001_initial'),
]
operations = [
migrations.RunPython(create_texas),
]
|
Add data migration for Texas State model# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_texas(apps, schema_editor):
State = apps.get_model('states', 'State')
State.objects.create(
name='Texas',
slug='texas',
)
class Migration(migrations.Migration):
dependencies = [
('states', '0001_initial'),
]
operations = [
migrations.RunPython(create_texas),
]
|
<commit_before><commit_msg>Add data migration for Texas State model<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_texas(apps, schema_editor):
State = apps.get_model('states', 'State')
State.objects.create(
name='Texas',
slug='texas',
)
class Migration(migrations.Migration):
dependencies = [
('states', '0001_initial'),
]
operations = [
migrations.RunPython(create_texas),
]
|
|
303f79be64f9174aa7a60787a2d7e5598d353d0d
|
userprofile/migrations/0041_auto_20210428_1823.py
|
userprofile/migrations/0041_auto_20210428_1823.py
|
# Generated by Django 3.1.2 on 2021-04-28 18:23
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('userprofile', '0040_auto_20200315_1428'),
]
operations = [
migrations.AlterModelOptions(
name='profile',
options={'permissions': (('is_active_member', 'Is an active member. Catchall permission'), ('can_view_social', 'Can see social fields on UserProfile'), ('can_view_admin', 'Can see information for admin panel'))},
),
]
|
Add migration for member permission
|
Add migration for member permission
|
Python
|
mit
|
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
|
Add migration for member permission
|
# Generated by Django 3.1.2 on 2021-04-28 18:23
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('userprofile', '0040_auto_20200315_1428'),
]
operations = [
migrations.AlterModelOptions(
name='profile',
options={'permissions': (('is_active_member', 'Is an active member. Catchall permission'), ('can_view_social', 'Can see social fields on UserProfile'), ('can_view_admin', 'Can see information for admin panel'))},
),
]
|
<commit_before><commit_msg>Add migration for member permission<commit_after>
|
# Generated by Django 3.1.2 on 2021-04-28 18:23
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('userprofile', '0040_auto_20200315_1428'),
]
operations = [
migrations.AlterModelOptions(
name='profile',
options={'permissions': (('is_active_member', 'Is an active member. Catchall permission'), ('can_view_social', 'Can see social fields on UserProfile'), ('can_view_admin', 'Can see information for admin panel'))},
),
]
|
Add migration for member permission# Generated by Django 3.1.2 on 2021-04-28 18:23
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('userprofile', '0040_auto_20200315_1428'),
]
operations = [
migrations.AlterModelOptions(
name='profile',
options={'permissions': (('is_active_member', 'Is an active member. Catchall permission'), ('can_view_social', 'Can see social fields on UserProfile'), ('can_view_admin', 'Can see information for admin panel'))},
),
]
|
<commit_before><commit_msg>Add migration for member permission<commit_after># Generated by Django 3.1.2 on 2021-04-28 18:23
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('userprofile', '0040_auto_20200315_1428'),
]
operations = [
migrations.AlterModelOptions(
name='profile',
options={'permissions': (('is_active_member', 'Is an active member. Catchall permission'), ('can_view_social', 'Can see social fields on UserProfile'), ('can_view_admin', 'Can see information for admin panel'))},
),
]
|
|
2b3879b448885a471d305360f1bcf447b390755d
|
datasets/management/commands/load_freesound_examples.py
|
datasets/management/commands/load_freesound_examples.py
|
from django.core.management.base import BaseCommand
import json
from datasets.models import Dataset, Sound
class Command(BaseCommand):
help = 'Load examples from json taxonomy file. Use it as python manage.py load_freesound_false_examples ' \
'DATASET_ID PATH/TO/TAOXNOMY_FILE.json'
def add_arguments(self, parser):
parser.add_argument('dataset_id', type=int)
parser.add_argument('taxonomy_file', type=str)
def handle(self, *args, **options):
file_location = options['taxonomy_file']
dataset_id = options['dataset_id']
ds = Dataset.objects.get(id=dataset_id)
taxonomy = ds.taxonomy
data = json.load(open(file_location))
for d in data:
node = taxonomy.get_element_at_id(d['id'])
for ex_id in d['positive_examples_FS']:
sound = Sound.objects.get(freesound_id=ex_id)
node.freesound_examples.add(sound)
node.save()
|
Add command load freesound examples
|
Add command load freesound examples
|
Python
|
agpl-3.0
|
MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets
|
Add command load freesound examples
|
from django.core.management.base import BaseCommand
import json
from datasets.models import Dataset, Sound
class Command(BaseCommand):
help = 'Load examples from json taxonomy file. Use it as python manage.py load_freesound_false_examples ' \
'DATASET_ID PATH/TO/TAOXNOMY_FILE.json'
def add_arguments(self, parser):
parser.add_argument('dataset_id', type=int)
parser.add_argument('taxonomy_file', type=str)
def handle(self, *args, **options):
file_location = options['taxonomy_file']
dataset_id = options['dataset_id']
ds = Dataset.objects.get(id=dataset_id)
taxonomy = ds.taxonomy
data = json.load(open(file_location))
for d in data:
node = taxonomy.get_element_at_id(d['id'])
for ex_id in d['positive_examples_FS']:
sound = Sound.objects.get(freesound_id=ex_id)
node.freesound_examples.add(sound)
node.save()
|
<commit_before><commit_msg>Add command load freesound examples<commit_after>
|
from django.core.management.base import BaseCommand
import json
from datasets.models import Dataset, Sound
class Command(BaseCommand):
help = 'Load examples from json taxonomy file. Use it as python manage.py load_freesound_false_examples ' \
'DATASET_ID PATH/TO/TAOXNOMY_FILE.json'
def add_arguments(self, parser):
parser.add_argument('dataset_id', type=int)
parser.add_argument('taxonomy_file', type=str)
def handle(self, *args, **options):
file_location = options['taxonomy_file']
dataset_id = options['dataset_id']
ds = Dataset.objects.get(id=dataset_id)
taxonomy = ds.taxonomy
data = json.load(open(file_location))
for d in data:
node = taxonomy.get_element_at_id(d['id'])
for ex_id in d['positive_examples_FS']:
sound = Sound.objects.get(freesound_id=ex_id)
node.freesound_examples.add(sound)
node.save()
|
Add command load freesound examplesfrom django.core.management.base import BaseCommand
import json
from datasets.models import Dataset, Sound
class Command(BaseCommand):
help = 'Load examples from json taxonomy file. Use it as python manage.py load_freesound_false_examples ' \
'DATASET_ID PATH/TO/TAOXNOMY_FILE.json'
def add_arguments(self, parser):
parser.add_argument('dataset_id', type=int)
parser.add_argument('taxonomy_file', type=str)
def handle(self, *args, **options):
file_location = options['taxonomy_file']
dataset_id = options['dataset_id']
ds = Dataset.objects.get(id=dataset_id)
taxonomy = ds.taxonomy
data = json.load(open(file_location))
for d in data:
node = taxonomy.get_element_at_id(d['id'])
for ex_id in d['positive_examples_FS']:
sound = Sound.objects.get(freesound_id=ex_id)
node.freesound_examples.add(sound)
node.save()
|
<commit_before><commit_msg>Add command load freesound examples<commit_after>from django.core.management.base import BaseCommand
import json
from datasets.models import Dataset, Sound
class Command(BaseCommand):
help = 'Load examples from json taxonomy file. Use it as python manage.py load_freesound_false_examples ' \
'DATASET_ID PATH/TO/TAOXNOMY_FILE.json'
def add_arguments(self, parser):
parser.add_argument('dataset_id', type=int)
parser.add_argument('taxonomy_file', type=str)
def handle(self, *args, **options):
file_location = options['taxonomy_file']
dataset_id = options['dataset_id']
ds = Dataset.objects.get(id=dataset_id)
taxonomy = ds.taxonomy
data = json.load(open(file_location))
for d in data:
node = taxonomy.get_element_at_id(d['id'])
for ex_id in d['positive_examples_FS']:
sound = Sound.objects.get(freesound_id=ex_id)
node.freesound_examples.add(sound)
node.save()
|
|
3c28f3c79893da231e8853f88adce6dbdaa206dc
|
apps/companyprofile/migrations/0003_company_image.py
|
apps/companyprofile/migrations/0003_company_image.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('gallery', '0002_auto_20150916_1953'),
('companyprofile', '0002_auto_20151014_2132'),
]
operations = [
migrations.AddField(
model_name='company',
name='image',
field=models.ForeignKey(to='gallery.ResponsiveImage', default=None, to_field='Bilde'),
preserve_default=True,
),
]
|
Add new image field migration
|
Add new image field migration
|
Python
|
mit
|
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
|
Add new image field migration
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('gallery', '0002_auto_20150916_1953'),
('companyprofile', '0002_auto_20151014_2132'),
]
operations = [
migrations.AddField(
model_name='company',
name='image',
field=models.ForeignKey(to='gallery.ResponsiveImage', default=None, to_field='Bilde'),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add new image field migration<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('gallery', '0002_auto_20150916_1953'),
('companyprofile', '0002_auto_20151014_2132'),
]
operations = [
migrations.AddField(
model_name='company',
name='image',
field=models.ForeignKey(to='gallery.ResponsiveImage', default=None, to_field='Bilde'),
preserve_default=True,
),
]
|
Add new image field migration# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('gallery', '0002_auto_20150916_1953'),
('companyprofile', '0002_auto_20151014_2132'),
]
operations = [
migrations.AddField(
model_name='company',
name='image',
field=models.ForeignKey(to='gallery.ResponsiveImage', default=None, to_field='Bilde'),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add new image field migration<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('gallery', '0002_auto_20150916_1953'),
('companyprofile', '0002_auto_20151014_2132'),
]
operations = [
migrations.AddField(
model_name='company',
name='image',
field=models.ForeignKey(to='gallery.ResponsiveImage', default=None, to_field='Bilde'),
preserve_default=True,
),
]
|
|
d97ba0e4d71a89f346b2ece699134975ef97dd7f
|
wc_hist.py
|
wc_hist.py
|
# Create a histogram for word count data
# Dan Kolbman 2014
import sys
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
font = {'family' : 'normal',
'weight' : 'bold',
'size' : 28}
matplotlib.rc('font', **font)
def main( path ):
wc = []
pct = []
# Read wc data
with open(path) as f:
for line in f:
s = line.split()
if len(s) == 3:
wc.append( int(s[0]) )
pct.append( float(s[2]) )
avg = 0.0
for i in range(len(wc)):
avg += wc[i]*pct[i]/100.0
print('Mean word count:', avg)
fig, ax = plt.subplots()
plt.ylim(0,10)
plt.ylabel('Percent of Users')
rects1 = ax.bar(wc, pct, 1.0, color='#1FB3F2', alpha=1.0, lw=0)
ax2 = ax.twinx()
rects1 = ax2.bar(wc, pct, 1.0, color='#FF9F21', alpha=1.0, lw=0)
# Draw mean line
#plt.axvline( avg, lw=7, ls='--', color='k' )
#ax.text( avg*1.08, 5, 'Mean: '+str(round(avg*10.0)/10.0)+' words' )
#ax.set_xticks( [ 20.5, 25.5, 30.5, 35.5, 40 ] )
#ax.set_xticklabels( [ '20', '25', '30', '35', '40'] )
ax.set_yscale('log')
plt.xlim(1, 75)
plt.ylim(0,10)
#plt.xlabel('Words')
fig.set_size_inches(7,6)
plt.tight_layout()
plt.savefig('wc.png', transparent=True, dpi=100)
plt.show()
################################################################################
if(len(sys.argv) < 2):
print('Usage: python wc_hist.py word_count.dat')
elif( len(sys.argv) == 2 ):
main( sys.argv[1] )
|
Add histogram for bio word counts
|
Add histogram for bio word counts
|
Python
|
mit
|
dankolbman/CleverTind,dankolbman/CleverTind
|
Add histogram for bio word counts
|
# Create a histogram for word count data
# Dan Kolbman 2014
import sys
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
font = {'family' : 'normal',
'weight' : 'bold',
'size' : 28}
matplotlib.rc('font', **font)
def main( path ):
wc = []
pct = []
# Read wc data
with open(path) as f:
for line in f:
s = line.split()
if len(s) == 3:
wc.append( int(s[0]) )
pct.append( float(s[2]) )
avg = 0.0
for i in range(len(wc)):
avg += wc[i]*pct[i]/100.0
print('Mean word count:', avg)
fig, ax = plt.subplots()
plt.ylim(0,10)
plt.ylabel('Percent of Users')
rects1 = ax.bar(wc, pct, 1.0, color='#1FB3F2', alpha=1.0, lw=0)
ax2 = ax.twinx()
rects1 = ax2.bar(wc, pct, 1.0, color='#FF9F21', alpha=1.0, lw=0)
# Draw mean line
#plt.axvline( avg, lw=7, ls='--', color='k' )
#ax.text( avg*1.08, 5, 'Mean: '+str(round(avg*10.0)/10.0)+' words' )
#ax.set_xticks( [ 20.5, 25.5, 30.5, 35.5, 40 ] )
#ax.set_xticklabels( [ '20', '25', '30', '35', '40'] )
ax.set_yscale('log')
plt.xlim(1, 75)
plt.ylim(0,10)
#plt.xlabel('Words')
fig.set_size_inches(7,6)
plt.tight_layout()
plt.savefig('wc.png', transparent=True, dpi=100)
plt.show()
################################################################################
if(len(sys.argv) < 2):
print('Usage: python wc_hist.py word_count.dat')
elif( len(sys.argv) == 2 ):
main( sys.argv[1] )
|
<commit_before><commit_msg>Add histogram for bio word counts<commit_after>
|
# Create a histogram for word count data
# Dan Kolbman 2014
import sys
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
font = {'family' : 'normal',
'weight' : 'bold',
'size' : 28}
matplotlib.rc('font', **font)
def main( path ):
wc = []
pct = []
# Read wc data
with open(path) as f:
for line in f:
s = line.split()
if len(s) == 3:
wc.append( int(s[0]) )
pct.append( float(s[2]) )
avg = 0.0
for i in range(len(wc)):
avg += wc[i]*pct[i]/100.0
print('Mean word count:', avg)
fig, ax = plt.subplots()
plt.ylim(0,10)
plt.ylabel('Percent of Users')
rects1 = ax.bar(wc, pct, 1.0, color='#1FB3F2', alpha=1.0, lw=0)
ax2 = ax.twinx()
rects1 = ax2.bar(wc, pct, 1.0, color='#FF9F21', alpha=1.0, lw=0)
# Draw mean line
#plt.axvline( avg, lw=7, ls='--', color='k' )
#ax.text( avg*1.08, 5, 'Mean: '+str(round(avg*10.0)/10.0)+' words' )
#ax.set_xticks( [ 20.5, 25.5, 30.5, 35.5, 40 ] )
#ax.set_xticklabels( [ '20', '25', '30', '35', '40'] )
ax.set_yscale('log')
plt.xlim(1, 75)
plt.ylim(0,10)
#plt.xlabel('Words')
fig.set_size_inches(7,6)
plt.tight_layout()
plt.savefig('wc.png', transparent=True, dpi=100)
plt.show()
################################################################################
if(len(sys.argv) < 2):
print('Usage: python wc_hist.py word_count.dat')
elif( len(sys.argv) == 2 ):
main( sys.argv[1] )
|
Add histogram for bio word counts# Create a histogram for word count data
# Dan Kolbman 2014
import sys
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
font = {'family' : 'normal',
'weight' : 'bold',
'size' : 28}
matplotlib.rc('font', **font)
def main( path ):
wc = []
pct = []
# Read wc data
with open(path) as f:
for line in f:
s = line.split()
if len(s) == 3:
wc.append( int(s[0]) )
pct.append( float(s[2]) )
avg = 0.0
for i in range(len(wc)):
avg += wc[i]*pct[i]/100.0
print('Mean word count:', avg)
fig, ax = plt.subplots()
plt.ylim(0,10)
plt.ylabel('Percent of Users')
rects1 = ax.bar(wc, pct, 1.0, color='#1FB3F2', alpha=1.0, lw=0)
ax2 = ax.twinx()
rects1 = ax2.bar(wc, pct, 1.0, color='#FF9F21', alpha=1.0, lw=0)
# Draw mean line
#plt.axvline( avg, lw=7, ls='--', color='k' )
#ax.text( avg*1.08, 5, 'Mean: '+str(round(avg*10.0)/10.0)+' words' )
#ax.set_xticks( [ 20.5, 25.5, 30.5, 35.5, 40 ] )
#ax.set_xticklabels( [ '20', '25', '30', '35', '40'] )
ax.set_yscale('log')
plt.xlim(1, 75)
plt.ylim(0,10)
#plt.xlabel('Words')
fig.set_size_inches(7,6)
plt.tight_layout()
plt.savefig('wc.png', transparent=True, dpi=100)
plt.show()
################################################################################
if(len(sys.argv) < 2):
print('Usage: python wc_hist.py word_count.dat')
elif( len(sys.argv) == 2 ):
main( sys.argv[1] )
|
<commit_before><commit_msg>Add histogram for bio word counts<commit_after># Create a histogram for word count data
# Dan Kolbman 2014
import sys
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
font = {'family' : 'normal',
'weight' : 'bold',
'size' : 28}
matplotlib.rc('font', **font)
def main( path ):
wc = []
pct = []
# Read wc data
with open(path) as f:
for line in f:
s = line.split()
if len(s) == 3:
wc.append( int(s[0]) )
pct.append( float(s[2]) )
avg = 0.0
for i in range(len(wc)):
avg += wc[i]*pct[i]/100.0
print('Mean word count:', avg)
fig, ax = plt.subplots()
plt.ylim(0,10)
plt.ylabel('Percent of Users')
rects1 = ax.bar(wc, pct, 1.0, color='#1FB3F2', alpha=1.0, lw=0)
ax2 = ax.twinx()
rects1 = ax2.bar(wc, pct, 1.0, color='#FF9F21', alpha=1.0, lw=0)
# Draw mean line
#plt.axvline( avg, lw=7, ls='--', color='k' )
#ax.text( avg*1.08, 5, 'Mean: '+str(round(avg*10.0)/10.0)+' words' )
#ax.set_xticks( [ 20.5, 25.5, 30.5, 35.5, 40 ] )
#ax.set_xticklabels( [ '20', '25', '30', '35', '40'] )
ax.set_yscale('log')
plt.xlim(1, 75)
plt.ylim(0,10)
#plt.xlabel('Words')
fig.set_size_inches(7,6)
plt.tight_layout()
plt.savefig('wc.png', transparent=True, dpi=100)
plt.show()
################################################################################
if(len(sys.argv) < 2):
print('Usage: python wc_hist.py word_count.dat')
elif( len(sys.argv) == 2 ):
main( sys.argv[1] )
|
|
d6651c4c5be5074c45c54b0f8fd844cd25837a7b
|
ocradmin/storage/utils.py
|
ocradmin/storage/utils.py
|
"""
Utils for document storage.
"""
import json
from . import base
class DocumentEncoder(json.JSONEncoder):
"""
Encoder for JSONifying documents.
"""
def default(self, doc):
"""Flatten node for JSON encoding."""
if issubclass(doc.__class__, base.BaseDocument):
return dict(
label=doc.label,
pid=doc.pid
)
return super(DocumentEncoder, self).default(doc)
|
Add a (basic) JSON encoder for documents
|
Add a (basic) JSON encoder for documents
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
Add a (basic) JSON encoder for documents
|
"""
Utils for document storage.
"""
import json
from . import base
class DocumentEncoder(json.JSONEncoder):
"""
Encoder for JSONifying documents.
"""
def default(self, doc):
"""Flatten node for JSON encoding."""
if issubclass(doc.__class__, base.BaseDocument):
return dict(
label=doc.label,
pid=doc.pid
)
return super(DocumentEncoder, self).default(doc)
|
<commit_before><commit_msg>Add a (basic) JSON encoder for documents<commit_after>
|
"""
Utils for document storage.
"""
import json
from . import base
class DocumentEncoder(json.JSONEncoder):
"""
Encoder for JSONifying documents.
"""
def default(self, doc):
"""Flatten node for JSON encoding."""
if issubclass(doc.__class__, base.BaseDocument):
return dict(
label=doc.label,
pid=doc.pid
)
return super(DocumentEncoder, self).default(doc)
|
Add a (basic) JSON encoder for documents"""
Utils for document storage.
"""
import json
from . import base
class DocumentEncoder(json.JSONEncoder):
"""
Encoder for JSONifying documents.
"""
def default(self, doc):
"""Flatten node for JSON encoding."""
if issubclass(doc.__class__, base.BaseDocument):
return dict(
label=doc.label,
pid=doc.pid
)
return super(DocumentEncoder, self).default(doc)
|
<commit_before><commit_msg>Add a (basic) JSON encoder for documents<commit_after>"""
Utils for document storage.
"""
import json
from . import base
class DocumentEncoder(json.JSONEncoder):
"""
Encoder for JSONifying documents.
"""
def default(self, doc):
"""Flatten node for JSON encoding."""
if issubclass(doc.__class__, base.BaseDocument):
return dict(
label=doc.label,
pid=doc.pid
)
return super(DocumentEncoder, self).default(doc)
|
|
e41172d55df9f1a0cb142b6a59625eef59dfa519
|
common/lib/xmodule/xmodule/tests/test_capa_module.py
|
common/lib/xmodule/xmodule/tests/test_capa_module.py
|
import json
from mock import Mock
import unittest
from xmodule.capa_module import CapaModule
from xmodule.modulestore import Location
from lxml import etree
from . import test_system
class CapaFactory(object):
"""
A helper class to create problem modules with various parameters for testing.
"""
sample_problem_xml = """<?xml version="1.0"?>
<problem>
<text>
<p>What is pi, to two decimal placs?</p>
</text>
<numericalresponse answer="3.14">
<textline math="1" size="30"/>
</numericalresponse>
</problem>
"""
num = 0
@staticmethod
def next_num():
CapaFactory.num += 1
return CapaFactory.num
@staticmethod
def create():
definition = {'data': CapaFactory.sample_problem_xml,}
location = Location(["i4x", "edX", "capa_test", "problem",
"SampleProblem{0}".format(CapaFactory.next_num())])
metadata = {}
descriptor = Mock(weight="1")
instance_state = None
module = CapaModule(test_system, location,
definition, descriptor,
instance_state, None, metadata=metadata)
return module
class CapaModuleTest(unittest.TestCase):
def test_import(self):
module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
other_module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
self.assertNotEqual(module.url_name, other_module.url_name,
"Factory should be creating unique names for each problem")
|
Add start of test framework for capa
|
Add start of test framework for capa
|
Python
|
agpl-3.0
|
utecuy/edx-platform,fintech-circle/edx-platform,chand3040/cloud_that,halvertoluke/edx-platform,vasyarv/edx-platform,longmen21/edx-platform,motion2015/edx-platform,mjirayu/sit_academy,ZLLab-Mooc/edx-platform,pomegranited/edx-platform,nanolearning/edx-platform,morenopc/edx-platform,ZLLab-Mooc/edx-platform,nanolearning/edx-platform,eestay/edx-platform,openfun/edx-platform,xuxiao19910803/edx,caesar2164/edx-platform,knehez/edx-platform,analyseuc3m/ANALYSE-v1,shubhdev/edx-platform,cpennington/edx-platform,dsajkl/reqiop,appliedx/edx-platform,stvstnfrd/edx-platform,4eek/edx-platform,ampax/edx-platform,IITBinterns13/edx-platform-dev,utecuy/edx-platform,IITBinterns13/edx-platform-dev,beni55/edx-platform,motion2015/edx-platform,rhndg/openedx,inares/edx-platform,rationalAgent/edx-platform-custom,don-github/edx-platform,TsinghuaX/edx-platform,wwj718/edx-platform,teltek/edx-platform,syjeon/new_edx,Shrhawk/edx-platform,jamesblunt/edx-platform,apigee/edx-platform,Semi-global/edx-platform,EduPepperPD/pepper2013,defance/edx-platform,JioEducation/edx-platform,wwj718/ANALYSE,carsongee/edx-platform,hkawasaki/kawasaki-aio8-2,LICEF/edx-platform,iivic/BoiseStateX,jamiefolsom/edx-platform,Livit/Livit.Learn.EdX,cyanna/edx-platform,SravanthiSinha/edx-platform,mbareta/edx-platform-ft,hkawasaki/kawasaki-aio8-2,jswope00/griffinx,motion2015/a3,cecep-edu/edx-platform,Edraak/edx-platform,jswope00/GAI,DefyVentures/edx-platform,ovnicraft/edx-platform,abdoosh00/edraak,shurihell/testasia,romain-li/edx-platform,Edraak/edx-platform,wwj718/edx-platform,playm2mboy/edx-platform,a-parhom/edx-platform,adoosii/edx-platform,zadgroup/edx-platform,kmoocdev2/edx-platform,adoosii/edx-platform,longmen21/edx-platform,edry/edx-platform,ak2703/edx-platform,shubhdev/edx-platform,zadgroup/edx-platform,hastexo/edx-platform,Ayub-Khan/edx-platform,hkawasaki/kawasaki-aio8-1,sudheerchintala/LearnEraPlatForm,kxliugang/edx-platform,dsajkl/123,playm2mboy/edx-platform,eduNEXT/edx-platform,kursitet/edx-platform,hamzehd/edx-platform,yokose-ks/edx-platform,doismellburning/edx-platform,dsajkl/123,ampax/edx-platform,nttks/jenkins-test,DNFcode/edx-platform,antonve/s4-project-mooc,appliedx/edx-platform,sudheerchintala/LearnEraPlatForm,philanthropy-u/edx-platform,DNFcode/edx-platform,devs1991/test_edx_docmode,IndonesiaX/edx-platform,apigee/edx-platform,gsehub/edx-platform,y12uc231/edx-platform,stvstnfrd/edx-platform,appsembler/edx-platform,jbzdak/edx-platform,jamiefolsom/edx-platform,olexiim/edx-platform,hkawasaki/kawasaki-aio8-1,shabab12/edx-platform,morenopc/edx-platform,jjmiranda/edx-platform,jonathan-beard/edx-platform,ak2703/edx-platform,appsembler/edx-platform,proversity-org/edx-platform,arbrandes/edx-platform,martynovp/edx-platform,kmoocdev/edx-platform,shurihell/testasia,y12uc231/edx-platform,pabloborrego93/edx-platform,IndonesiaX/edx-platform,kursitet/edx-platform,caesar2164/edx-platform,don-github/edx-platform,xuxiao19910803/edx,nanolearningllc/edx-platform-cypress-2,tiagochiavericosta/edx-platform,wwj718/ANALYSE,fintech-circle/edx-platform,mushtaqak/edx-platform,PepperPD/edx-pepper-platform,dcosentino/edx-platform,shurihell/testasia,Kalyzee/edx-platform,mitocw/edx-platform,a-parhom/edx-platform,mushtaqak/edx-platform,4eek/edx-platform,olexiim/edx-platform,cognitiveclass/edx-platform,JCBarahona/edX,defance/edx-platform,bigdatauniversity/edx-platform,kursitet/edx-platform,CredoReference/edx-platform,motion2015/edx-platform,don-github/edx-platform,knehez/edx-platform,doganov/edx-platform,cpennington/edx-platform,hkawasaki/kawasaki-aio8-0,rationalAgent/edx-platform-custom,polimediaupv/edx-platform,angelapper/edx-platform,waheedahmed/edx-platform,kxliugang/edx-platform,hamzehd/edx-platform,franosincic/edx-platform,chauhanhardik/populo,Edraak/edx-platform,MakeHer/edx-platform,ahmadiga/min_edx,simbs/edx-platform,ampax/edx-platform,hkawasaki/kawasaki-aio8-2,chrisndodge/edx-platform,DefyVentures/edx-platform,chauhanhardik/populo_2,Semi-global/edx-platform,vasyarv/edx-platform,pku9104038/edx-platform,chrisndodge/edx-platform,nagyistoce/edx-platform,LICEF/edx-platform,appliedx/edx-platform,MakeHer/edx-platform,chauhanhardik/populo,raccoongang/edx-platform,antonve/s4-project-mooc,pomegranited/edx-platform,B-MOOC/edx-platform,WatanabeYasumasa/edx-platform,RPI-OPENEDX/edx-platform,ZLLab-Mooc/edx-platform,torchingloom/edx-platform,dkarakats/edx-platform,jolyonb/edx-platform,chauhanhardik/populo,philanthropy-u/edx-platform,bigdatauniversity/edx-platform,devs1991/test_edx_docmode,carsongee/edx-platform,xuxiao19910803/edx-platform,ferabra/edx-platform,cyanna/edx-platform,jjmiranda/edx-platform,pabloborrego93/edx-platform,jswope00/griffinx,cselis86/edx-platform,mtlchun/edx,zubair-arbi/edx-platform,procangroup/edx-platform,ovnicraft/edx-platform,zerobatu/edx-platform,chauhanhardik/populo,chrisndodge/edx-platform,halvertoluke/edx-platform,bitifirefly/edx-platform,eemirtekin/edx-platform,eduNEXT/edunext-platform,LearnEra/LearnEraPlaftform,jruiperezv/ANALYSE,eduNEXT/edx-platform,fly19890211/edx-platform,AkA84/edx-platform,gsehub/edx-platform,defance/edx-platform,kmoocdev/edx-platform,bitifirefly/edx-platform,shurihell/testasia,MakeHer/edx-platform,rationalAgent/edx-platform-custom,martynovp/edx-platform,ovnicraft/edx-platform,sameetb-cuelogic/edx-platform-test,proversity-org/edx-platform,nttks/jenkins-test,deepsrijit1105/edx-platform,Shrhawk/edx-platform,jolyonb/edx-platform,shubhdev/openedx,angelapper/edx-platform,mahendra-r/edx-platform,jbassen/edx-platform,Edraak/edx-platform,dkarakats/edx-platform,nttks/edx-platform,hmcmooc/muddx-platform,ahmedaljazzar/edx-platform,syjeon/new_edx,Unow/edx-platform,mtlchun/edx,jazkarta/edx-platform-for-isc,10clouds/edx-platform,jzoldak/edx-platform,pepeportela/edx-platform,jamiefolsom/edx-platform,playm2mboy/edx-platform,B-MOOC/edx-platform,morenopc/edx-platform,pku9104038/edx-platform,RPI-OPENEDX/edx-platform,xuxiao19910803/edx,leansoft/edx-platform,utecuy/edx-platform,torchingloom/edx-platform,Edraak/circleci-edx-platform,bigdatauniversity/edx-platform,nanolearning/edx-platform,dsajkl/123,jazztpt/edx-platform,4eek/edx-platform,waheedahmed/edx-platform,arifsetiawan/edx-platform,Kalyzee/edx-platform,zhenzhai/edx-platform,tiagochiavericosta/edx-platform,hamzehd/edx-platform,EduPepperPD/pepper2013,pdehaye/theming-edx-platform,shashank971/edx-platform,solashirai/edx-platform,amir-qayyum-khan/edx-platform,prarthitm/edxplatform,knehez/edx-platform,kmoocdev/edx-platform,chauhanhardik/populo_2,wwj718/edx-platform,bdero/edx-platform,Softmotions/edx-platform,arifsetiawan/edx-platform,etzhou/edx-platform,jazkarta/edx-platform-for-isc,eemirtekin/edx-platform,beacloudgenius/edx-platform,shabab12/edx-platform,PepperPD/edx-pepper-platform,msegado/edx-platform,Softmotions/edx-platform,RPI-OPENEDX/edx-platform,JCBarahona/edX,xinjiguaike/edx-platform,Shrhawk/edx-platform,ahmadio/edx-platform,rhndg/openedx,mahendra-r/edx-platform,abdoosh00/edx-rtl-final,dcosentino/edx-platform,shubhdev/openedx,naresh21/synergetics-edx-platform,jazztpt/edx-platform,xingyepei/edx-platform,cpennington/edx-platform,chudaol/edx-platform,louyihua/edx-platform,nanolearningllc/edx-platform-cypress,antonve/s4-project-mooc,zubair-arbi/edx-platform,zubair-arbi/edx-platform,marcore/edx-platform,fintech-circle/edx-platform,auferack08/edx-platform,amir-qayyum-khan/edx-platform,mjg2203/edx-platform-seas,kalebhartje/schoolboost,edx-solutions/edx-platform,shabab12/edx-platform,kalebhartje/schoolboost,jswope00/GAI,beacloudgenius/edx-platform,rue89-tech/edx-platform,UOMx/edx-platform,vasyarv/edx-platform,jbzdak/edx-platform,benpatterson/edx-platform,SivilTaram/edx-platform,doismellburning/edx-platform,polimediaupv/edx-platform,SivilTaram/edx-platform,ESOedX/edx-platform,leansoft/edx-platform,halvertoluke/edx-platform,synergeticsedx/deployment-wipro,mtlchun/edx,alexthered/kienhoc-platform,shubhdev/edx-platform,ampax/edx-platform-backup,Kalyzee/edx-platform,hastexo/edx-platform,edry/edx-platform,peterm-itr/edx-platform,ESOedX/edx-platform,dkarakats/edx-platform,xuxiao19910803/edx,DefyVentures/edx-platform,zofuthan/edx-platform,eduNEXT/edx-platform,abdoosh00/edraak,zerobatu/edx-platform,UOMx/edx-platform,nanolearningllc/edx-platform-cypress-2,unicri/edx-platform,Edraak/edx-platform,antoviaque/edx-platform,kursitet/edx-platform,sudheerchintala/LearnEraPlatForm,eestay/edx-platform,kalebhartje/schoolboost,y12uc231/edx-platform,franosincic/edx-platform,devs1991/test_edx_docmode,Lektorium-LLC/edx-platform,SravanthiSinha/edx-platform,mcgachey/edx-platform,vikas1885/test1,pku9104038/edx-platform,PepperPD/edx-pepper-platform,deepsrijit1105/edx-platform,andyzsf/edx,inares/edx-platform,jazkarta/edx-platform,morpheby/levelup-by,OmarIthawi/edx-platform,rue89-tech/edx-platform,doganov/edx-platform,devs1991/test_edx_docmode,antonve/s4-project-mooc,MakeHer/edx-platform,mushtaqak/edx-platform,jonathan-beard/edx-platform,jzoldak/edx-platform,EduPepperPDTesting/pepper2013-testing,dsajkl/reqiop,jswope00/griffinx,TeachAtTUM/edx-platform,mbareta/edx-platform-ft,hamzehd/edx-platform,alu042/edx-platform,Edraak/circleci-edx-platform,SivilTaram/edx-platform,syjeon/new_edx,JioEducation/edx-platform,nagyistoce/edx-platform,procangroup/edx-platform,vismartltd/edx-platform,leansoft/edx-platform,gymnasium/edx-platform,praveen-pal/edx-platform,cyanna/edx-platform,mahendra-r/edx-platform,Livit/Livit.Learn.EdX,ZLLab-Mooc/edx-platform,ovnicraft/edx-platform,pomegranited/edx-platform,zhenzhai/edx-platform,ampax/edx-platform-backup,sameetb-cuelogic/edx-platform-test,ak2703/edx-platform,xuxiao19910803/edx,JCBarahona/edX,miptliot/edx-platform,analyseuc3m/ANALYSE-v1,appliedx/edx-platform,iivic/BoiseStateX,ubc/edx-platform,leansoft/edx-platform,rismalrv/edx-platform,AkA84/edx-platform,J861449197/edx-platform,IONISx/edx-platform,kursitet/edx-platform,kalebhartje/schoolboost,mjirayu/sit_academy,synergeticsedx/deployment-wipro,OmarIthawi/edx-platform,pelikanchik/edx-platform,bitifirefly/edx-platform,chauhanhardik/populo_2,arifsetiawan/edx-platform,chauhanhardik/populo_2,Semi-global/edx-platform,J861449197/edx-platform,IndonesiaX/edx-platform,adoosii/edx-platform,B-MOOC/edx-platform,UOMx/edx-platform,lduarte1991/edx-platform,shurihell/testasia,pepeportela/edx-platform,rhndg/openedx,vasyarv/edx-platform,IITBinterns13/edx-platform-dev,knehez/edx-platform,proversity-org/edx-platform,cecep-edu/edx-platform,pepeportela/edx-platform,kmoocdev/edx-platform,tanmaykm/edx-platform,dsajkl/reqiop,mitocw/edx-platform,fintech-circle/edx-platform,eduNEXT/edunext-platform,UXE/local-edx,torchingloom/edx-platform,edry/edx-platform,inares/edx-platform,Ayub-Khan/edx-platform,shashank971/edx-platform,unicri/edx-platform,MSOpenTech/edx-platform,xuxiao19910803/edx-platform,martynovp/edx-platform,gymnasium/edx-platform,ubc/edx-platform,EduPepperPDTesting/pepper2013-testing,lduarte1991/edx-platform,jbzdak/edx-platform,4eek/edx-platform,morenopc/edx-platform,hastexo/edx-platform,vikas1885/test1,EduPepperPD/pepper2013,jzoldak/edx-platform,Edraak/circleci-edx-platform,pelikanchik/edx-platform,shashank971/edx-platform,bitifirefly/edx-platform,nttks/edx-platform,benpatterson/edx-platform,etzhou/edx-platform,CourseTalk/edx-platform,pelikanchik/edx-platform,antonve/s4-project-mooc,ovnicraft/edx-platform,IONISx/edx-platform,apigee/edx-platform,auferack08/edx-platform,defance/edx-platform,rue89-tech/edx-platform,romain-li/edx-platform,analyseuc3m/ANALYSE-v1,eemirtekin/edx-platform,wwj718/ANALYSE,jjmiranda/edx-platform,stvstnfrd/edx-platform,jswope00/GAI,martynovp/edx-platform,hkawasaki/kawasaki-aio8-1,Lektorium-LLC/edx-platform,simbs/edx-platform,fly19890211/edx-platform,Endika/edx-platform,kmoocdev2/edx-platform,abdoosh00/edraak,pomegranited/edx-platform,longmen21/edx-platform,rismalrv/edx-platform,rhndg/openedx,vismartltd/edx-platform,jbassen/edx-platform,vikas1885/test1,mjirayu/sit_academy,zadgroup/edx-platform,alu042/edx-platform,benpatterson/edx-platform,Ayub-Khan/edx-platform,jazztpt/edx-platform,alexthered/kienhoc-platform,arbrandes/edx-platform,mbareta/edx-platform-ft,UXE/local-edx,LearnEra/LearnEraPlaftform,msegado/edx-platform,EduPepperPD/pepper2013,naresh21/synergetics-edx-platform,Softmotions/edx-platform,zofuthan/edx-platform,prarthitm/edxplatform,Stanford-Online/edx-platform,jazkarta/edx-platform,analyseuc3m/ANALYSE-v1,stvstnfrd/edx-platform,jbassen/edx-platform,apigee/edx-platform,dcosentino/edx-platform,zerobatu/edx-platform,eduNEXT/edunext-platform,morpheby/levelup-by,carsongee/edx-platform,xinjiguaike/edx-platform,xuxiao19910803/edx-platform,zerobatu/edx-platform,atsolakid/edx-platform,halvertoluke/edx-platform,AkA84/edx-platform,dsajkl/reqiop,jazkarta/edx-platform,cognitiveclass/edx-platform,cognitiveclass/edx-platform,cognitiveclass/edx-platform,sameetb-cuelogic/edx-platform-test,ferabra/edx-platform,ferabra/edx-platform,DNFcode/edx-platform,polimediaupv/edx-platform,mcgachey/edx-platform,marcore/edx-platform,itsjeyd/edx-platform,xuxiao19910803/edx-platform,mjirayu/sit_academy,romain-li/edx-platform,CourseTalk/edx-platform,gsehub/edx-platform,gsehub/edx-platform,B-MOOC/edx-platform,shubhdev/edxOnBaadal,etzhou/edx-platform,louyihua/edx-platform,RPI-OPENEDX/edx-platform,MSOpenTech/edx-platform,sudheerchintala/LearnEraPlatForm,TeachAtTUM/edx-platform,nttks/jenkins-test,beni55/edx-platform,benpatterson/edx-platform,morpheby/levelup-by,shashank971/edx-platform,IONISx/edx-platform,antoviaque/edx-platform,EduPepperPDTesting/pepper2013-testing,kmoocdev2/edx-platform,olexiim/edx-platform,teltek/edx-platform,polimediaupv/edx-platform,beni55/edx-platform,mahendra-r/edx-platform,CredoReference/edx-platform,MSOpenTech/edx-platform,procangroup/edx-platform,ahmadiga/min_edx,eemirtekin/edx-platform,BehavioralInsightsTeam/edx-platform,doganov/edx-platform,solashirai/edx-platform,xingyepei/edx-platform,ZLLab-Mooc/edx-platform,peterm-itr/edx-platform,alexthered/kienhoc-platform,kamalx/edx-platform,rismalrv/edx-platform,eestay/edx-platform,tanmaykm/edx-platform,chudaol/edx-platform,LearnEra/LearnEraPlaftform,shubhdev/edxOnBaadal,appsembler/edx-platform,ferabra/edx-platform,valtech-mooc/edx-platform,xuxiao19910803/edx-platform,edry/edx-platform,adoosii/edx-platform,PepperPD/edx-pepper-platform,shubhdev/openedx,nanolearningllc/edx-platform-cypress-2,UXE/local-edx,yokose-ks/edx-platform,chand3040/cloud_that,lduarte1991/edx-platform,pdehaye/theming-edx-platform,longmen21/edx-platform,4eek/edx-platform,vasyarv/edx-platform,UXE/local-edx,philanthropy-u/edx-platform,alu042/edx-platform,pdehaye/theming-edx-platform,xinjiguaike/edx-platform,angelapper/edx-platform,simbs/edx-platform,ahmadiga/min_edx,pepeportela/edx-platform,EDUlib/edx-platform,nikolas/edx-platform,tanmaykm/edx-platform,JioEducation/edx-platform,iivic/BoiseStateX,mitocw/edx-platform,Unow/edx-platform,marcore/edx-platform,jelugbo/tundex,mjg2203/edx-platform-seas,jamiefolsom/edx-platform,pku9104038/edx-platform,nanolearningllc/edx-platform-cypress,ak2703/edx-platform,kmoocdev2/edx-platform,jswope00/griffinx,appliedx/edx-platform,mjg2203/edx-platform-seas,zofuthan/edx-platform,doganov/edx-platform,playm2mboy/edx-platform,jbzdak/edx-platform,ampax/edx-platform-backup,devs1991/test_edx_docmode,vismartltd/edx-platform,jruiperezv/ANALYSE,jswope00/GAI,zofuthan/edx-platform,SivilTaram/edx-platform,10clouds/edx-platform,10clouds/edx-platform,rationalAgent/edx-platform-custom,RPI-OPENEDX/edx-platform,CourseTalk/edx-platform,arifsetiawan/edx-platform,jamesblunt/edx-platform,openfun/edx-platform,shubhdev/edxOnBaadal,nttks/edx-platform,peterm-itr/edx-platform,rismalrv/edx-platform,mushtaqak/edx-platform,raccoongang/edx-platform,nttks/edx-platform,franosincic/edx-platform,eduNEXT/edx-platform,etzhou/edx-platform,DefyVentures/edx-platform,shubhdev/openedx,naresh21/synergetics-edx-platform,shubhdev/edxOnBaadal,ubc/edx-platform,amir-qayyum-khan/edx-platform,rhndg/openedx,openfun/edx-platform,Softmotions/edx-platform,gymnasium/edx-platform,rue89-tech/edx-platform,EDUlib/edx-platform,edx-solutions/edx-platform,sameetb-cuelogic/edx-platform-test,nanolearningllc/edx-platform-cypress-2,Shrhawk/edx-platform,atsolakid/edx-platform,AkA84/edx-platform,jazztpt/edx-platform,bdero/edx-platform,EduPepperPDTesting/pepper2013-testing,amir-qayyum-khan/edx-platform,rationalAgent/edx-platform-custom,simbs/edx-platform,MakeHer/edx-platform,Ayub-Khan/edx-platform,ahmadio/edx-platform,jolyonb/edx-platform,knehez/edx-platform,waheedahmed/edx-platform,WatanabeYasumasa/edx-platform,PepperPD/edx-pepper-platform,LearnEra/LearnEraPlaftform,Kalyzee/edx-platform,motion2015/a3,tiagochiavericosta/edx-platform,peterm-itr/edx-platform,nikolas/edx-platform,ESOedX/edx-platform,motion2015/a3,procangroup/edx-platform,praveen-pal/edx-platform,doismellburning/edx-platform,edx/edx-platform,gymnasium/edx-platform,MSOpenTech/edx-platform,arbrandes/edx-platform,mcgachey/edx-platform,wwj718/edx-platform,leansoft/edx-platform,TsinghuaX/edx-platform,hmcmooc/muddx-platform,romain-li/edx-platform,utecuy/edx-platform,auferack08/edx-platform,zubair-arbi/edx-platform,carsongee/edx-platform,praveen-pal/edx-platform,EDUlib/edx-platform,cselis86/edx-platform,y12uc231/edx-platform,msegado/edx-platform,jazkarta/edx-platform-for-isc,ahmedaljazzar/edx-platform,TsinghuaX/edx-platform,caesar2164/edx-platform,ubc/edx-platform,ferabra/edx-platform,TeachAtTUM/edx-platform,Endika/edx-platform,simbs/edx-platform,mushtaqak/edx-platform,mjg2203/edx-platform-seas,olexiim/edx-platform,jazztpt/edx-platform,DNFcode/edx-platform,J861449197/edx-platform,zadgroup/edx-platform,fly19890211/edx-platform,vikas1885/test1,msegado/edx-platform,tanmaykm/edx-platform,kamalx/edx-platform,hkawasaki/kawasaki-aio8-0,ahmadio/edx-platform,itsjeyd/edx-platform,hastexo/edx-platform,mjirayu/sit_academy,edx-solutions/edx-platform,bigdatauniversity/edx-platform,ampax/edx-platform,Edraak/edraak-platform,pabloborrego93/edx-platform,CredoReference/edx-platform,andyzsf/edx,andyzsf/edx,cselis86/edx-platform,eestay/edx-platform,chudaol/edx-platform,nanolearningllc/edx-platform-cypress-2,jamesblunt/edx-platform,Stanford-Online/edx-platform,jbassen/edx-platform,pabloborrego93/edx-platform,motion2015/edx-platform,jbzdak/edx-platform,edry/edx-platform,miptliot/edx-platform,Edraak/edraak-platform,franosincic/edx-platform,bdero/edx-platform,Livit/Livit.Learn.EdX,beni55/edx-platform,martynovp/edx-platform,hamzehd/edx-platform,naresh21/synergetics-edx-platform,jbassen/edx-platform,raccoongang/edx-platform,hmcmooc/muddx-platform,bdero/edx-platform,rismalrv/edx-platform,nikolas/edx-platform,deepsrijit1105/edx-platform,EduPepperPDTesting/pepper2013-testing,vikas1885/test1,jruiperezv/ANALYSE,louyihua/edx-platform,chand3040/cloud_that,DNFcode/edx-platform,auferack08/edx-platform,SravanthiSinha/edx-platform,DefyVentures/edx-platform,jamiefolsom/edx-platform,xingyepei/edx-platform,jolyonb/edx-platform,eestay/edx-platform,beacloudgenius/edx-platform,ampax/edx-platform-backup,Shrhawk/edx-platform,shabab12/edx-platform,shubhdev/edx-platform,jonathan-beard/edx-platform,Semi-global/edx-platform,deepsrijit1105/edx-platform,hkawasaki/kawasaki-aio8-0,ESOedX/edx-platform,utecuy/edx-platform,jelugbo/tundex,AkA84/edx-platform,OmarIthawi/edx-platform,don-github/edx-platform,adoosii/edx-platform,beacloudgenius/edx-platform,Lektorium-LLC/edx-platform,UOMx/edx-platform,olexiim/edx-platform,Semi-global/edx-platform,ahmadiga/min_edx,zhenzhai/edx-platform,atsolakid/edx-platform,IndonesiaX/edx-platform,jruiperezv/ANALYSE,JCBarahona/edX,morenopc/edx-platform,jzoldak/edx-platform,BehavioralInsightsTeam/edx-platform,WatanabeYasumasa/edx-platform,EduPepperPDTesting/pepper2013-testing,shubhdev/openedx,OmarIthawi/edx-platform,jelugbo/tundex,xingyepei/edx-platform,appsembler/edx-platform,angelapper/edx-platform,jazkarta/edx-platform,sameetb-cuelogic/edx-platform-test,lduarte1991/edx-platform,chand3040/cloud_that,ahmedaljazzar/edx-platform,solashirai/edx-platform,antoviaque/edx-platform,valtech-mooc/edx-platform,EDUlib/edx-platform,ampax/edx-platform-backup,nagyistoce/edx-platform,nagyistoce/edx-platform,shashank971/edx-platform,Unow/edx-platform,nagyistoce/edx-platform,miptliot/edx-platform,y12uc231/edx-platform,prarthitm/edxplatform,dcosentino/edx-platform,TeachAtTUM/edx-platform,longmen21/edx-platform,abdoosh00/edx-rtl-final,unicri/edx-platform,Softmotions/edx-platform,SivilTaram/edx-platform,shubhdev/edxOnBaadal,valtech-mooc/edx-platform,CourseTalk/edx-platform,arbrandes/edx-platform,Edraak/circleci-edx-platform,mahendra-r/edx-platform,synergeticsedx/deployment-wipro,jjmiranda/edx-platform,CredoReference/edx-platform,rue89-tech/edx-platform,nanolearning/edx-platform,mbareta/edx-platform-ft,edx/edx-platform,J861449197/edx-platform,chauhanhardik/populo_2,hkawasaki/kawasaki-aio8-0,etzhou/edx-platform,proversity-org/edx-platform,nanolearningllc/edx-platform-cypress,prarthitm/edxplatform,dsajkl/123,solashirai/edx-platform,valtech-mooc/edx-platform,unicri/edx-platform,romain-li/edx-platform,wwj718/edx-platform,cecep-edu/edx-platform,openfun/edx-platform,beni55/edx-platform,mcgachey/edx-platform,devs1991/test_edx_docmode,yokose-ks/edx-platform,franosincic/edx-platform,xingyepei/edx-platform,hkawasaki/kawasaki-aio8-2,Ayub-Khan/edx-platform,cselis86/edx-platform,nanolearning/edx-platform,synergeticsedx/deployment-wipro,alu042/edx-platform,jamesblunt/edx-platform,Livit/Livit.Learn.EdX,Lektorium-LLC/edx-platform,B-MOOC/edx-platform,Unow/edx-platform,jelugbo/tundex,ubc/edx-platform,kmoocdev/edx-platform,arifsetiawan/edx-platform,mitocw/edx-platform,chrisndodge/edx-platform,benpatterson/edx-platform,cognitiveclass/edx-platform,nanolearningllc/edx-platform-cypress,wwj718/ANALYSE,Endika/edx-platform,MSOpenTech/edx-platform,kxliugang/edx-platform,iivic/BoiseStateX,motion2015/edx-platform,nikolas/edx-platform,vismartltd/edx-platform,bitifirefly/edx-platform,iivic/BoiseStateX,pdehaye/theming-edx-platform,atsolakid/edx-platform,doismellburning/edx-platform,a-parhom/edx-platform,itsjeyd/edx-platform,dkarakats/edx-platform,atsolakid/edx-platform,Edraak/edraak-platform,yokose-ks/edx-platform,LICEF/edx-platform,cpennington/edx-platform,vismartltd/edx-platform,doganov/edx-platform,SravanthiSinha/edx-platform,inares/edx-platform,10clouds/edx-platform,zerobatu/edx-platform,Kalyzee/edx-platform,BehavioralInsightsTeam/edx-platform,fly19890211/edx-platform,ahmadio/edx-platform,caesar2164/edx-platform,jonathan-beard/edx-platform,xinjiguaike/edx-platform,motion2015/a3,eduNEXT/edunext-platform,TsinghuaX/edx-platform,chudaol/edx-platform,teltek/edx-platform,zubair-arbi/edx-platform,jonathan-beard/edx-platform,tiagochiavericosta/edx-platform,kxliugang/edx-platform,tiagochiavericosta/edx-platform,mtlchun/edx,beacloudgenius/edx-platform,devs1991/test_edx_docmode,jazkarta/edx-platform-for-isc,raccoongang/edx-platform,abdoosh00/edx-rtl-final,xinjiguaike/edx-platform,abdoosh00/edx-rtl-final,itsjeyd/edx-platform,JioEducation/edx-platform,yokose-ks/edx-platform,edx/edx-platform,miptliot/edx-platform,cselis86/edx-platform,cyanna/edx-platform,jazkarta/edx-platform,philanthropy-u/edx-platform,teltek/edx-platform,ahmadiga/min_edx,kamalx/edx-platform,nttks/edx-platform,hkawasaki/kawasaki-aio8-1,msegado/edx-platform,syjeon/new_edx,zadgroup/edx-platform,valtech-mooc/edx-platform,SravanthiSinha/edx-platform,andyzsf/edx,cecep-edu/edx-platform,kamalx/edx-platform,doismellburning/edx-platform,alexthered/kienhoc-platform,zofuthan/edx-platform,hmcmooc/muddx-platform,motion2015/a3,BehavioralInsightsTeam/edx-platform,a-parhom/edx-platform,LICEF/edx-platform,cecep-edu/edx-platform,unicri/edx-platform,kxliugang/edx-platform,abdoosh00/edraak,EduPepperPD/pepper2013,alexthered/kienhoc-platform,pomegranited/edx-platform,edx/edx-platform,chand3040/cloud_that,nanolearningllc/edx-platform-cypress,waheedahmed/edx-platform,dcosentino/edx-platform,morpheby/levelup-by,nttks/jenkins-test,LICEF/edx-platform,eemirtekin/edx-platform,torchingloom/edx-platform,zhenzhai/edx-platform,IONISx/edx-platform,J861449197/edx-platform,Endika/edx-platform,mtlchun/edx,edx-solutions/edx-platform,Edraak/circleci-edx-platform,wwj718/ANALYSE,openfun/edx-platform,torchingloom/edx-platform,IITBinterns13/edx-platform-dev,mcgachey/edx-platform,cyanna/edx-platform,jswope00/griffinx,waheedahmed/edx-platform,bigdatauniversity/edx-platform,chauhanhardik/populo,jamesblunt/edx-platform,dsajkl/123,inares/edx-platform,Stanford-Online/edx-platform,nikolas/edx-platform,nttks/jenkins-test,jruiperezv/ANALYSE,halvertoluke/edx-platform,antoviaque/edx-platform,don-github/edx-platform,Stanford-Online/edx-platform,kamalx/edx-platform,jelugbo/tundex,jazkarta/edx-platform-for-isc,IONISx/edx-platform,marcore/edx-platform,IndonesiaX/edx-platform,WatanabeYasumasa/edx-platform,JCBarahona/edX,louyihua/edx-platform,dkarakats/edx-platform,fly19890211/edx-platform,shubhdev/edx-platform,ak2703/edx-platform,kmoocdev2/edx-platform,zhenzhai/edx-platform,Edraak/edraak-platform,solashirai/edx-platform,playm2mboy/edx-platform,praveen-pal/edx-platform,ahmedaljazzar/edx-platform,chudaol/edx-platform,polimediaupv/edx-platform,ahmadio/edx-platform,devs1991/test_edx_docmode,pelikanchik/edx-platform,kalebhartje/schoolboost
|
Add start of test framework for capa
|
import json
from mock import Mock
import unittest
from xmodule.capa_module import CapaModule
from xmodule.modulestore import Location
from lxml import etree
from . import test_system
class CapaFactory(object):
"""
A helper class to create problem modules with various parameters for testing.
"""
sample_problem_xml = """<?xml version="1.0"?>
<problem>
<text>
<p>What is pi, to two decimal placs?</p>
</text>
<numericalresponse answer="3.14">
<textline math="1" size="30"/>
</numericalresponse>
</problem>
"""
num = 0
@staticmethod
def next_num():
CapaFactory.num += 1
return CapaFactory.num
@staticmethod
def create():
definition = {'data': CapaFactory.sample_problem_xml,}
location = Location(["i4x", "edX", "capa_test", "problem",
"SampleProblem{0}".format(CapaFactory.next_num())])
metadata = {}
descriptor = Mock(weight="1")
instance_state = None
module = CapaModule(test_system, location,
definition, descriptor,
instance_state, None, metadata=metadata)
return module
class CapaModuleTest(unittest.TestCase):
def test_import(self):
module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
other_module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
self.assertNotEqual(module.url_name, other_module.url_name,
"Factory should be creating unique names for each problem")
|
<commit_before><commit_msg>Add start of test framework for capa<commit_after>
|
import json
from mock import Mock
import unittest
from xmodule.capa_module import CapaModule
from xmodule.modulestore import Location
from lxml import etree
from . import test_system
class CapaFactory(object):
"""
A helper class to create problem modules with various parameters for testing.
"""
sample_problem_xml = """<?xml version="1.0"?>
<problem>
<text>
<p>What is pi, to two decimal placs?</p>
</text>
<numericalresponse answer="3.14">
<textline math="1" size="30"/>
</numericalresponse>
</problem>
"""
num = 0
@staticmethod
def next_num():
CapaFactory.num += 1
return CapaFactory.num
@staticmethod
def create():
definition = {'data': CapaFactory.sample_problem_xml,}
location = Location(["i4x", "edX", "capa_test", "problem",
"SampleProblem{0}".format(CapaFactory.next_num())])
metadata = {}
descriptor = Mock(weight="1")
instance_state = None
module = CapaModule(test_system, location,
definition, descriptor,
instance_state, None, metadata=metadata)
return module
class CapaModuleTest(unittest.TestCase):
def test_import(self):
module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
other_module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
self.assertNotEqual(module.url_name, other_module.url_name,
"Factory should be creating unique names for each problem")
|
Add start of test framework for capaimport json
from mock import Mock
import unittest
from xmodule.capa_module import CapaModule
from xmodule.modulestore import Location
from lxml import etree
from . import test_system
class CapaFactory(object):
"""
A helper class to create problem modules with various parameters for testing.
"""
sample_problem_xml = """<?xml version="1.0"?>
<problem>
<text>
<p>What is pi, to two decimal placs?</p>
</text>
<numericalresponse answer="3.14">
<textline math="1" size="30"/>
</numericalresponse>
</problem>
"""
num = 0
@staticmethod
def next_num():
CapaFactory.num += 1
return CapaFactory.num
@staticmethod
def create():
definition = {'data': CapaFactory.sample_problem_xml,}
location = Location(["i4x", "edX", "capa_test", "problem",
"SampleProblem{0}".format(CapaFactory.next_num())])
metadata = {}
descriptor = Mock(weight="1")
instance_state = None
module = CapaModule(test_system, location,
definition, descriptor,
instance_state, None, metadata=metadata)
return module
class CapaModuleTest(unittest.TestCase):
def test_import(self):
module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
other_module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
self.assertNotEqual(module.url_name, other_module.url_name,
"Factory should be creating unique names for each problem")
|
<commit_before><commit_msg>Add start of test framework for capa<commit_after>import json
from mock import Mock
import unittest
from xmodule.capa_module import CapaModule
from xmodule.modulestore import Location
from lxml import etree
from . import test_system
class CapaFactory(object):
"""
A helper class to create problem modules with various parameters for testing.
"""
sample_problem_xml = """<?xml version="1.0"?>
<problem>
<text>
<p>What is pi, to two decimal placs?</p>
</text>
<numericalresponse answer="3.14">
<textline math="1" size="30"/>
</numericalresponse>
</problem>
"""
num = 0
@staticmethod
def next_num():
CapaFactory.num += 1
return CapaFactory.num
@staticmethod
def create():
definition = {'data': CapaFactory.sample_problem_xml,}
location = Location(["i4x", "edX", "capa_test", "problem",
"SampleProblem{0}".format(CapaFactory.next_num())])
metadata = {}
descriptor = Mock(weight="1")
instance_state = None
module = CapaModule(test_system, location,
definition, descriptor,
instance_state, None, metadata=metadata)
return module
class CapaModuleTest(unittest.TestCase):
def test_import(self):
module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
other_module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
self.assertNotEqual(module.url_name, other_module.url_name,
"Factory should be creating unique names for each problem")
|
|
8b90c944678877ce9d68780590ad28573bf617a8
|
tests/formats_test/misc_test.py
|
tests/formats_test/misc_test.py
|
import unittest
from blivet.formats.fs import *
from blivet.size import Size
class FSOverheadTestCase(unittest.TestCase):
def test_required_size_FS(self):
# FS is abstract parent which doesn't have metadata
self.assertEqual(FS.get_required_size(Size("100 MiB")), Size("100 MiB"))
self.assertEqual(Ext2FS.get_required_size(Size("100 MiB")), Size(Decimal(Size("100 MiB")) / Decimal(0.93)))
def test_biggest_overhead_FS(self):
self.assertTrue(FS.biggest_overhead_FS() is BTRFS)
self.assertTrue(FS.biggest_overhead_FS([FATFS, Ext2FS, Ext3FS, Ext4FS]) is Ext4FS)
with self.assertRaises(ValueError):
FS.biggest_overhead_FS([])
# only classes with FS parent will be used
with self.assertRaises(ValueError):
class Dummy(object):
pass
FS.biggest_overhead_FS([Dummy])
|
Add tests for FS overhead methods
|
Add tests for FS overhead methods
(ported from commit 9accd0e360ed4bede667b5f8a608e8c519ebd831)
|
Python
|
lgpl-2.1
|
vojtechtrefny/blivet,jkonecny12/blivet,jkonecny12/blivet,rhinstaller/blivet,vpodzime/blivet,vojtechtrefny/blivet,AdamWill/blivet,rhinstaller/blivet,AdamWill/blivet,vpodzime/blivet,rvykydal/blivet,rvykydal/blivet
|
Add tests for FS overhead methods
(ported from commit 9accd0e360ed4bede667b5f8a608e8c519ebd831)
|
import unittest
from blivet.formats.fs import *
from blivet.size import Size
class FSOverheadTestCase(unittest.TestCase):
def test_required_size_FS(self):
# FS is abstract parent which doesn't have metadata
self.assertEqual(FS.get_required_size(Size("100 MiB")), Size("100 MiB"))
self.assertEqual(Ext2FS.get_required_size(Size("100 MiB")), Size(Decimal(Size("100 MiB")) / Decimal(0.93)))
def test_biggest_overhead_FS(self):
self.assertTrue(FS.biggest_overhead_FS() is BTRFS)
self.assertTrue(FS.biggest_overhead_FS([FATFS, Ext2FS, Ext3FS, Ext4FS]) is Ext4FS)
with self.assertRaises(ValueError):
FS.biggest_overhead_FS([])
# only classes with FS parent will be used
with self.assertRaises(ValueError):
class Dummy(object):
pass
FS.biggest_overhead_FS([Dummy])
|
<commit_before><commit_msg>Add tests for FS overhead methods
(ported from commit 9accd0e360ed4bede667b5f8a608e8c519ebd831)<commit_after>
|
import unittest
from blivet.formats.fs import *
from blivet.size import Size
class FSOverheadTestCase(unittest.TestCase):
def test_required_size_FS(self):
# FS is abstract parent which doesn't have metadata
self.assertEqual(FS.get_required_size(Size("100 MiB")), Size("100 MiB"))
self.assertEqual(Ext2FS.get_required_size(Size("100 MiB")), Size(Decimal(Size("100 MiB")) / Decimal(0.93)))
def test_biggest_overhead_FS(self):
self.assertTrue(FS.biggest_overhead_FS() is BTRFS)
self.assertTrue(FS.biggest_overhead_FS([FATFS, Ext2FS, Ext3FS, Ext4FS]) is Ext4FS)
with self.assertRaises(ValueError):
FS.biggest_overhead_FS([])
# only classes with FS parent will be used
with self.assertRaises(ValueError):
class Dummy(object):
pass
FS.biggest_overhead_FS([Dummy])
|
Add tests for FS overhead methods
(ported from commit 9accd0e360ed4bede667b5f8a608e8c519ebd831)import unittest
from blivet.formats.fs import *
from blivet.size import Size
class FSOverheadTestCase(unittest.TestCase):
def test_required_size_FS(self):
# FS is abstract parent which doesn't have metadata
self.assertEqual(FS.get_required_size(Size("100 MiB")), Size("100 MiB"))
self.assertEqual(Ext2FS.get_required_size(Size("100 MiB")), Size(Decimal(Size("100 MiB")) / Decimal(0.93)))
def test_biggest_overhead_FS(self):
self.assertTrue(FS.biggest_overhead_FS() is BTRFS)
self.assertTrue(FS.biggest_overhead_FS([FATFS, Ext2FS, Ext3FS, Ext4FS]) is Ext4FS)
with self.assertRaises(ValueError):
FS.biggest_overhead_FS([])
# only classes with FS parent will be used
with self.assertRaises(ValueError):
class Dummy(object):
pass
FS.biggest_overhead_FS([Dummy])
|
<commit_before><commit_msg>Add tests for FS overhead methods
(ported from commit 9accd0e360ed4bede667b5f8a608e8c519ebd831)<commit_after>import unittest
from blivet.formats.fs import *
from blivet.size import Size
class FSOverheadTestCase(unittest.TestCase):
def test_required_size_FS(self):
# FS is abstract parent which doesn't have metadata
self.assertEqual(FS.get_required_size(Size("100 MiB")), Size("100 MiB"))
self.assertEqual(Ext2FS.get_required_size(Size("100 MiB")), Size(Decimal(Size("100 MiB")) / Decimal(0.93)))
def test_biggest_overhead_FS(self):
self.assertTrue(FS.biggest_overhead_FS() is BTRFS)
self.assertTrue(FS.biggest_overhead_FS([FATFS, Ext2FS, Ext3FS, Ext4FS]) is Ext4FS)
with self.assertRaises(ValueError):
FS.biggest_overhead_FS([])
# only classes with FS parent will be used
with self.assertRaises(ValueError):
class Dummy(object):
pass
FS.biggest_overhead_FS([Dummy])
|
|
a8497dcbeaa9d39a98402c6e46c86cd10c7d4de3
|
tests/unit/client_regression.py
|
tests/unit/client_regression.py
|
"""Unit tests for the client library.
TODO: we have plans to move ./client.py to integration/, since those
are really integration tests. Once that's done we should move this to
./client.py; it's here now to avoid name collisions/conflicts.
"""
import flask
import pytest
from schema import Schema
from hil import config, rest
from hil.client.base import FailedAPICallException
from hil.client.client import Client
from hil.test_common import HybridHTTPClient, fail_on_log_warnings, \
fresh_database, server_init, config_testsuite
fail_on_log_warnings = pytest.fixture(fail_on_log_warnings)
fresh_database = pytest.fixture(fresh_database)
server_init = pytest.fixture(server_init)
@pytest.fixture()
def configure():
"""Fixture to load the HIL config."""
config_testsuite()
config.load_extensions()
pytestmark = pytest.mark.usefixtures('fail_on_log_warnings',
'configure',
'fresh_database',
'server_init')
def test_non_json_response():
"""The client library should raise an error when the response body is
unexpectedly not JSON.
"""
# Endpoint is arbitrary:
endpoint = 'http:/127.0.0.1:9933'
client = Client(endpoint, HybridHTTPClient(endpoint))
# Override one of the API calls with a different implementation:
# pylint: disable=unused-variable
@rest.rest_call('GET', '/nodes/free', Schema({}))
def list_free_nodes():
"""Mock API call for testing; always raises an error."""
flask.abort(500)
try:
client.node.list('free')
assert False, 'Client library did not report an error!'
except FailedAPICallException as e:
# Make sure it's the right error:
assert e.error_type == 'unknown', 'Wrong error type.'
|
Add a regression test for the error-silencing issue.
|
Add a regression test for the error-silencing issue.
|
Python
|
apache-2.0
|
CCI-MOC/haas
|
Add a regression test for the error-silencing issue.
|
"""Unit tests for the client library.
TODO: we have plans to move ./client.py to integration/, since those
are really integration tests. Once that's done we should move this to
./client.py; it's here now to avoid name collisions/conflicts.
"""
import flask
import pytest
from schema import Schema
from hil import config, rest
from hil.client.base import FailedAPICallException
from hil.client.client import Client
from hil.test_common import HybridHTTPClient, fail_on_log_warnings, \
fresh_database, server_init, config_testsuite
fail_on_log_warnings = pytest.fixture(fail_on_log_warnings)
fresh_database = pytest.fixture(fresh_database)
server_init = pytest.fixture(server_init)
@pytest.fixture()
def configure():
"""Fixture to load the HIL config."""
config_testsuite()
config.load_extensions()
pytestmark = pytest.mark.usefixtures('fail_on_log_warnings',
'configure',
'fresh_database',
'server_init')
def test_non_json_response():
"""The client library should raise an error when the response body is
unexpectedly not JSON.
"""
# Endpoint is arbitrary:
endpoint = 'http:/127.0.0.1:9933'
client = Client(endpoint, HybridHTTPClient(endpoint))
# Override one of the API calls with a different implementation:
# pylint: disable=unused-variable
@rest.rest_call('GET', '/nodes/free', Schema({}))
def list_free_nodes():
"""Mock API call for testing; always raises an error."""
flask.abort(500)
try:
client.node.list('free')
assert False, 'Client library did not report an error!'
except FailedAPICallException as e:
# Make sure it's the right error:
assert e.error_type == 'unknown', 'Wrong error type.'
|
<commit_before><commit_msg>Add a regression test for the error-silencing issue.<commit_after>
|
"""Unit tests for the client library.
TODO: we have plans to move ./client.py to integration/, since those
are really integration tests. Once that's done we should move this to
./client.py; it's here now to avoid name collisions/conflicts.
"""
import flask
import pytest
from schema import Schema
from hil import config, rest
from hil.client.base import FailedAPICallException
from hil.client.client import Client
from hil.test_common import HybridHTTPClient, fail_on_log_warnings, \
fresh_database, server_init, config_testsuite
fail_on_log_warnings = pytest.fixture(fail_on_log_warnings)
fresh_database = pytest.fixture(fresh_database)
server_init = pytest.fixture(server_init)
@pytest.fixture()
def configure():
"""Fixture to load the HIL config."""
config_testsuite()
config.load_extensions()
pytestmark = pytest.mark.usefixtures('fail_on_log_warnings',
'configure',
'fresh_database',
'server_init')
def test_non_json_response():
"""The client library should raise an error when the response body is
unexpectedly not JSON.
"""
# Endpoint is arbitrary:
endpoint = 'http:/127.0.0.1:9933'
client = Client(endpoint, HybridHTTPClient(endpoint))
# Override one of the API calls with a different implementation:
# pylint: disable=unused-variable
@rest.rest_call('GET', '/nodes/free', Schema({}))
def list_free_nodes():
"""Mock API call for testing; always raises an error."""
flask.abort(500)
try:
client.node.list('free')
assert False, 'Client library did not report an error!'
except FailedAPICallException as e:
# Make sure it's the right error:
assert e.error_type == 'unknown', 'Wrong error type.'
|
Add a regression test for the error-silencing issue."""Unit tests for the client library.
TODO: we have plans to move ./client.py to integration/, since those
are really integration tests. Once that's done we should move this to
./client.py; it's here now to avoid name collisions/conflicts.
"""
import flask
import pytest
from schema import Schema
from hil import config, rest
from hil.client.base import FailedAPICallException
from hil.client.client import Client
from hil.test_common import HybridHTTPClient, fail_on_log_warnings, \
fresh_database, server_init, config_testsuite
fail_on_log_warnings = pytest.fixture(fail_on_log_warnings)
fresh_database = pytest.fixture(fresh_database)
server_init = pytest.fixture(server_init)
@pytest.fixture()
def configure():
"""Fixture to load the HIL config."""
config_testsuite()
config.load_extensions()
pytestmark = pytest.mark.usefixtures('fail_on_log_warnings',
'configure',
'fresh_database',
'server_init')
def test_non_json_response():
"""The client library should raise an error when the response body is
unexpectedly not JSON.
"""
# Endpoint is arbitrary:
endpoint = 'http:/127.0.0.1:9933'
client = Client(endpoint, HybridHTTPClient(endpoint))
# Override one of the API calls with a different implementation:
# pylint: disable=unused-variable
@rest.rest_call('GET', '/nodes/free', Schema({}))
def list_free_nodes():
"""Mock API call for testing; always raises an error."""
flask.abort(500)
try:
client.node.list('free')
assert False, 'Client library did not report an error!'
except FailedAPICallException as e:
# Make sure it's the right error:
assert e.error_type == 'unknown', 'Wrong error type.'
|
<commit_before><commit_msg>Add a regression test for the error-silencing issue.<commit_after>"""Unit tests for the client library.
TODO: we have plans to move ./client.py to integration/, since those
are really integration tests. Once that's done we should move this to
./client.py; it's here now to avoid name collisions/conflicts.
"""
import flask
import pytest
from schema import Schema
from hil import config, rest
from hil.client.base import FailedAPICallException
from hil.client.client import Client
from hil.test_common import HybridHTTPClient, fail_on_log_warnings, \
fresh_database, server_init, config_testsuite
fail_on_log_warnings = pytest.fixture(fail_on_log_warnings)
fresh_database = pytest.fixture(fresh_database)
server_init = pytest.fixture(server_init)
@pytest.fixture()
def configure():
"""Fixture to load the HIL config."""
config_testsuite()
config.load_extensions()
pytestmark = pytest.mark.usefixtures('fail_on_log_warnings',
'configure',
'fresh_database',
'server_init')
def test_non_json_response():
"""The client library should raise an error when the response body is
unexpectedly not JSON.
"""
# Endpoint is arbitrary:
endpoint = 'http:/127.0.0.1:9933'
client = Client(endpoint, HybridHTTPClient(endpoint))
# Override one of the API calls with a different implementation:
# pylint: disable=unused-variable
@rest.rest_call('GET', '/nodes/free', Schema({}))
def list_free_nodes():
"""Mock API call for testing; always raises an error."""
flask.abort(500)
try:
client.node.list('free')
assert False, 'Client library did not report an error!'
except FailedAPICallException as e:
# Make sure it's the right error:
assert e.error_type == 'unknown', 'Wrong error type.'
|
|
a96046b4b7372cb942509b5e9778d54124319617
|
bin/rofi_menu.py
|
bin/rofi_menu.py
|
from typing import Dict, Callable
from rofi import Rofi
def menu(r: Rofi, prompt: str, options: Dict[str, Callable], *args, **kwargs):
"""
Create a menu using rofi to execute on of some options, all args not documented
are passed directly into Rofi.select
:param options: A dict of strings to show on the menu the action they execute
:param rofi: the rofi instance to use
:returns: The name of the option selected and the value returned by the
function executed, or, if none is selected, None
"""
index, key = r.select(prompt, options.keys(), *args, **kwargs)
if key == -1: return None
name, action = list(options.items())[index]
result = action()
return name, action
|
Add a common helper for selection menus
|
[rofi] Add a common helper for selection menus
|
Python
|
mit
|
mpardalos/dotfiles,mpardalos/dotfiles
|
[rofi] Add a common helper for selection menus
|
from typing import Dict, Callable
from rofi import Rofi
def menu(r: Rofi, prompt: str, options: Dict[str, Callable], *args, **kwargs):
"""
Create a menu using rofi to execute on of some options, all args not documented
are passed directly into Rofi.select
:param options: A dict of strings to show on the menu the action they execute
:param rofi: the rofi instance to use
:returns: The name of the option selected and the value returned by the
function executed, or, if none is selected, None
"""
index, key = r.select(prompt, options.keys(), *args, **kwargs)
if key == -1: return None
name, action = list(options.items())[index]
result = action()
return name, action
|
<commit_before><commit_msg>[rofi] Add a common helper for selection menus<commit_after>
|
from typing import Dict, Callable
from rofi import Rofi
def menu(r: Rofi, prompt: str, options: Dict[str, Callable], *args, **kwargs):
"""
Create a menu using rofi to execute on of some options, all args not documented
are passed directly into Rofi.select
:param options: A dict of strings to show on the menu the action they execute
:param rofi: the rofi instance to use
:returns: The name of the option selected and the value returned by the
function executed, or, if none is selected, None
"""
index, key = r.select(prompt, options.keys(), *args, **kwargs)
if key == -1: return None
name, action = list(options.items())[index]
result = action()
return name, action
|
[rofi] Add a common helper for selection menusfrom typing import Dict, Callable
from rofi import Rofi
def menu(r: Rofi, prompt: str, options: Dict[str, Callable], *args, **kwargs):
"""
Create a menu using rofi to execute on of some options, all args not documented
are passed directly into Rofi.select
:param options: A dict of strings to show on the menu the action they execute
:param rofi: the rofi instance to use
:returns: The name of the option selected and the value returned by the
function executed, or, if none is selected, None
"""
index, key = r.select(prompt, options.keys(), *args, **kwargs)
if key == -1: return None
name, action = list(options.items())[index]
result = action()
return name, action
|
<commit_before><commit_msg>[rofi] Add a common helper for selection menus<commit_after>from typing import Dict, Callable
from rofi import Rofi
def menu(r: Rofi, prompt: str, options: Dict[str, Callable], *args, **kwargs):
"""
Create a menu using rofi to execute on of some options, all args not documented
are passed directly into Rofi.select
:param options: A dict of strings to show on the menu the action they execute
:param rofi: the rofi instance to use
:returns: The name of the option selected and the value returned by the
function executed, or, if none is selected, None
"""
index, key = r.select(prompt, options.keys(), *args, **kwargs)
if key == -1: return None
name, action = list(options.items())[index]
result = action()
return name, action
|
|
096424ea7809e5512a932c79eff6676695c1d27e
|
telethon/network/connection/connection.py
|
telethon/network/connection/connection.py
|
import abc
import asyncio
class Connection(abc.ABC):
"""
The `Connection` class is a wrapper around ``asyncio.open_connection``.
Subclasses are meant to communicate with this class through a queue.
This class provides a reliable interface that will stay connected
under any conditions for as long as the user doesn't disconnect or
the input parameters to auto-reconnect dictate otherwise.
"""
# TODO Support proxy. Support timeout?
def __init__(self, ip, port, *, loop):
self._ip = ip
self._port = port
self._loop = loop
self._reader = None
self._writer = None
self._disconnected = asyncio.Event(loop=loop)
self._disconnected.set()
self._send_task = None
self._recv_task = None
self._send_queue = asyncio.Queue(1)
self._recv_queue = asyncio.Queue(1)
async def connect(self):
"""
Establishes a connection with the server.
"""
self._reader, self._writer = await asyncio.open_connection(
self._ip, self._port, loop=self._loop)
self._disconnected.clear()
self._send_task = self._loop.create_task(self._send_loop())
self._recv_task = self._loop.create_task(self._send_loop())
def disconnect(self):
"""
Disconnects from the server.
"""
self._disconnected.set()
self._writer.close()
def send(self, data):
"""
Sends a packet of data through this connection mode.
This method returns a coroutine.
"""
return self._send_queue.put(data)
def recv(self):
"""
Receives a packet of data through this connection mode.
This method returns a coroutine.
"""
return self._recv_queue.get()
# TODO Get/put to the queue with cancellation
async def _send_loop(self):
"""
This loop is constantly popping items off the queue to send them.
"""
while not self._disconnected.is_set():
self._send(await self._send_queue.get())
await self._writer.drain()
async def _recv_loop(self):
"""
This loop is constantly putting items on the queue as they're read.
"""
while not self._disconnected.is_set():
data = await self._recv()
await self._recv_queue.put(data)
@abc.abstractmethod
def _send(self, data):
"""
This method should be implemented differently under each
connection mode and serialize the data into the packet
the way it should be sent through `self._writer`.
"""
raise NotImplementedError
@abc.abstractmethod
async def _recv(self):
"""
This method should be implemented differently under each
connection mode and deserialize the data from the packet
the way it should be read from `self._reader`.
"""
raise NotImplementedError
|
Create a new Connection class to work through queues
|
Create a new Connection class to work through queues
|
Python
|
mit
|
LonamiWebs/Telethon,expectocode/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon
|
Create a new Connection class to work through queues
|
import abc
import asyncio
class Connection(abc.ABC):
"""
The `Connection` class is a wrapper around ``asyncio.open_connection``.
Subclasses are meant to communicate with this class through a queue.
This class provides a reliable interface that will stay connected
under any conditions for as long as the user doesn't disconnect or
the input parameters to auto-reconnect dictate otherwise.
"""
# TODO Support proxy. Support timeout?
def __init__(self, ip, port, *, loop):
self._ip = ip
self._port = port
self._loop = loop
self._reader = None
self._writer = None
self._disconnected = asyncio.Event(loop=loop)
self._disconnected.set()
self._send_task = None
self._recv_task = None
self._send_queue = asyncio.Queue(1)
self._recv_queue = asyncio.Queue(1)
async def connect(self):
"""
Establishes a connection with the server.
"""
self._reader, self._writer = await asyncio.open_connection(
self._ip, self._port, loop=self._loop)
self._disconnected.clear()
self._send_task = self._loop.create_task(self._send_loop())
self._recv_task = self._loop.create_task(self._send_loop())
def disconnect(self):
"""
Disconnects from the server.
"""
self._disconnected.set()
self._writer.close()
def send(self, data):
"""
Sends a packet of data through this connection mode.
This method returns a coroutine.
"""
return self._send_queue.put(data)
def recv(self):
"""
Receives a packet of data through this connection mode.
This method returns a coroutine.
"""
return self._recv_queue.get()
# TODO Get/put to the queue with cancellation
async def _send_loop(self):
"""
This loop is constantly popping items off the queue to send them.
"""
while not self._disconnected.is_set():
self._send(await self._send_queue.get())
await self._writer.drain()
async def _recv_loop(self):
"""
This loop is constantly putting items on the queue as they're read.
"""
while not self._disconnected.is_set():
data = await self._recv()
await self._recv_queue.put(data)
@abc.abstractmethod
def _send(self, data):
"""
This method should be implemented differently under each
connection mode and serialize the data into the packet
the way it should be sent through `self._writer`.
"""
raise NotImplementedError
@abc.abstractmethod
async def _recv(self):
"""
This method should be implemented differently under each
connection mode and deserialize the data from the packet
the way it should be read from `self._reader`.
"""
raise NotImplementedError
|
<commit_before><commit_msg>Create a new Connection class to work through queues<commit_after>
|
import abc
import asyncio
class Connection(abc.ABC):
"""
The `Connection` class is a wrapper around ``asyncio.open_connection``.
Subclasses are meant to communicate with this class through a queue.
This class provides a reliable interface that will stay connected
under any conditions for as long as the user doesn't disconnect or
the input parameters to auto-reconnect dictate otherwise.
"""
# TODO Support proxy. Support timeout?
def __init__(self, ip, port, *, loop):
self._ip = ip
self._port = port
self._loop = loop
self._reader = None
self._writer = None
self._disconnected = asyncio.Event(loop=loop)
self._disconnected.set()
self._send_task = None
self._recv_task = None
self._send_queue = asyncio.Queue(1)
self._recv_queue = asyncio.Queue(1)
async def connect(self):
"""
Establishes a connection with the server.
"""
self._reader, self._writer = await asyncio.open_connection(
self._ip, self._port, loop=self._loop)
self._disconnected.clear()
self._send_task = self._loop.create_task(self._send_loop())
self._recv_task = self._loop.create_task(self._send_loop())
def disconnect(self):
"""
Disconnects from the server.
"""
self._disconnected.set()
self._writer.close()
def send(self, data):
"""
Sends a packet of data through this connection mode.
This method returns a coroutine.
"""
return self._send_queue.put(data)
def recv(self):
"""
Receives a packet of data through this connection mode.
This method returns a coroutine.
"""
return self._recv_queue.get()
# TODO Get/put to the queue with cancellation
async def _send_loop(self):
"""
This loop is constantly popping items off the queue to send them.
"""
while not self._disconnected.is_set():
self._send(await self._send_queue.get())
await self._writer.drain()
async def _recv_loop(self):
"""
This loop is constantly putting items on the queue as they're read.
"""
while not self._disconnected.is_set():
data = await self._recv()
await self._recv_queue.put(data)
@abc.abstractmethod
def _send(self, data):
"""
This method should be implemented differently under each
connection mode and serialize the data into the packet
the way it should be sent through `self._writer`.
"""
raise NotImplementedError
@abc.abstractmethod
async def _recv(self):
"""
This method should be implemented differently under each
connection mode and deserialize the data from the packet
the way it should be read from `self._reader`.
"""
raise NotImplementedError
|
Create a new Connection class to work through queuesimport abc
import asyncio
class Connection(abc.ABC):
"""
The `Connection` class is a wrapper around ``asyncio.open_connection``.
Subclasses are meant to communicate with this class through a queue.
This class provides a reliable interface that will stay connected
under any conditions for as long as the user doesn't disconnect or
the input parameters to auto-reconnect dictate otherwise.
"""
# TODO Support proxy. Support timeout?
def __init__(self, ip, port, *, loop):
self._ip = ip
self._port = port
self._loop = loop
self._reader = None
self._writer = None
self._disconnected = asyncio.Event(loop=loop)
self._disconnected.set()
self._send_task = None
self._recv_task = None
self._send_queue = asyncio.Queue(1)
self._recv_queue = asyncio.Queue(1)
async def connect(self):
"""
Establishes a connection with the server.
"""
self._reader, self._writer = await asyncio.open_connection(
self._ip, self._port, loop=self._loop)
self._disconnected.clear()
self._send_task = self._loop.create_task(self._send_loop())
self._recv_task = self._loop.create_task(self._send_loop())
def disconnect(self):
"""
Disconnects from the server.
"""
self._disconnected.set()
self._writer.close()
def send(self, data):
"""
Sends a packet of data through this connection mode.
This method returns a coroutine.
"""
return self._send_queue.put(data)
def recv(self):
"""
Receives a packet of data through this connection mode.
This method returns a coroutine.
"""
return self._recv_queue.get()
# TODO Get/put to the queue with cancellation
async def _send_loop(self):
"""
This loop is constantly popping items off the queue to send them.
"""
while not self._disconnected.is_set():
self._send(await self._send_queue.get())
await self._writer.drain()
async def _recv_loop(self):
"""
This loop is constantly putting items on the queue as they're read.
"""
while not self._disconnected.is_set():
data = await self._recv()
await self._recv_queue.put(data)
@abc.abstractmethod
def _send(self, data):
"""
This method should be implemented differently under each
connection mode and serialize the data into the packet
the way it should be sent through `self._writer`.
"""
raise NotImplementedError
@abc.abstractmethod
async def _recv(self):
"""
This method should be implemented differently under each
connection mode and deserialize the data from the packet
the way it should be read from `self._reader`.
"""
raise NotImplementedError
|
<commit_before><commit_msg>Create a new Connection class to work through queues<commit_after>import abc
import asyncio
class Connection(abc.ABC):
"""
The `Connection` class is a wrapper around ``asyncio.open_connection``.
Subclasses are meant to communicate with this class through a queue.
This class provides a reliable interface that will stay connected
under any conditions for as long as the user doesn't disconnect or
the input parameters to auto-reconnect dictate otherwise.
"""
# TODO Support proxy. Support timeout?
def __init__(self, ip, port, *, loop):
self._ip = ip
self._port = port
self._loop = loop
self._reader = None
self._writer = None
self._disconnected = asyncio.Event(loop=loop)
self._disconnected.set()
self._send_task = None
self._recv_task = None
self._send_queue = asyncio.Queue(1)
self._recv_queue = asyncio.Queue(1)
async def connect(self):
"""
Establishes a connection with the server.
"""
self._reader, self._writer = await asyncio.open_connection(
self._ip, self._port, loop=self._loop)
self._disconnected.clear()
self._send_task = self._loop.create_task(self._send_loop())
self._recv_task = self._loop.create_task(self._send_loop())
def disconnect(self):
"""
Disconnects from the server.
"""
self._disconnected.set()
self._writer.close()
def send(self, data):
"""
Sends a packet of data through this connection mode.
This method returns a coroutine.
"""
return self._send_queue.put(data)
def recv(self):
"""
Receives a packet of data through this connection mode.
This method returns a coroutine.
"""
return self._recv_queue.get()
# TODO Get/put to the queue with cancellation
async def _send_loop(self):
"""
This loop is constantly popping items off the queue to send them.
"""
while not self._disconnected.is_set():
self._send(await self._send_queue.get())
await self._writer.drain()
async def _recv_loop(self):
"""
This loop is constantly putting items on the queue as they're read.
"""
while not self._disconnected.is_set():
data = await self._recv()
await self._recv_queue.put(data)
@abc.abstractmethod
def _send(self, data):
"""
This method should be implemented differently under each
connection mode and serialize the data into the packet
the way it should be sent through `self._writer`.
"""
raise NotImplementedError
@abc.abstractmethod
async def _recv(self):
"""
This method should be implemented differently under each
connection mode and deserialize the data from the packet
the way it should be read from `self._reader`.
"""
raise NotImplementedError
|
|
35942980d9cd25be3ca6b28ab1eec92f55556f5f
|
snippet/example/python/ratelimit.py
|
snippet/example/python/ratelimit.py
|
import time
from threading import Thread
from queue import Queue
class RateLimiter:
def __init__(self, limit, delay=0.01):
num = int(limit * delay)
if num < 1:
raise ValueError("limit * delay < 1")
self._limit_num = limit
self._delay = delay
self._num_per_delay = num
self._queue = Queue(limit)
self._thread = Thread(target=self._start)
self._thread.daemon = True
self._thread.start()
def _start(self):
total = int(self._limit_num * self._delay)
while True:
diff = total - self._queue.qsize()
while diff > 0:
self._queue.put(None)
diff -= 1
time.sleep(self._delay)
def get_token(self):
self._queue.get()
self._queue.task_done()
if __name__ == "__main__":
num = 100
r = RateLimiter(10, 0.1)
while num:
r.get_token()
print(num)
num -= 1
|
Add the python rate limiting example
|
Add the python rate limiting example
|
Python
|
mit
|
xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet
|
Add the python rate limiting example
|
import time
from threading import Thread
from queue import Queue
class RateLimiter:
def __init__(self, limit, delay=0.01):
num = int(limit * delay)
if num < 1:
raise ValueError("limit * delay < 1")
self._limit_num = limit
self._delay = delay
self._num_per_delay = num
self._queue = Queue(limit)
self._thread = Thread(target=self._start)
self._thread.daemon = True
self._thread.start()
def _start(self):
total = int(self._limit_num * self._delay)
while True:
diff = total - self._queue.qsize()
while diff > 0:
self._queue.put(None)
diff -= 1
time.sleep(self._delay)
def get_token(self):
self._queue.get()
self._queue.task_done()
if __name__ == "__main__":
num = 100
r = RateLimiter(10, 0.1)
while num:
r.get_token()
print(num)
num -= 1
|
<commit_before><commit_msg>Add the python rate limiting example<commit_after>
|
import time
from threading import Thread
from queue import Queue
class RateLimiter:
def __init__(self, limit, delay=0.01):
num = int(limit * delay)
if num < 1:
raise ValueError("limit * delay < 1")
self._limit_num = limit
self._delay = delay
self._num_per_delay = num
self._queue = Queue(limit)
self._thread = Thread(target=self._start)
self._thread.daemon = True
self._thread.start()
def _start(self):
total = int(self._limit_num * self._delay)
while True:
diff = total - self._queue.qsize()
while diff > 0:
self._queue.put(None)
diff -= 1
time.sleep(self._delay)
def get_token(self):
self._queue.get()
self._queue.task_done()
if __name__ == "__main__":
num = 100
r = RateLimiter(10, 0.1)
while num:
r.get_token()
print(num)
num -= 1
|
Add the python rate limiting exampleimport time
from threading import Thread
from queue import Queue
class RateLimiter:
def __init__(self, limit, delay=0.01):
num = int(limit * delay)
if num < 1:
raise ValueError("limit * delay < 1")
self._limit_num = limit
self._delay = delay
self._num_per_delay = num
self._queue = Queue(limit)
self._thread = Thread(target=self._start)
self._thread.daemon = True
self._thread.start()
def _start(self):
total = int(self._limit_num * self._delay)
while True:
diff = total - self._queue.qsize()
while diff > 0:
self._queue.put(None)
diff -= 1
time.sleep(self._delay)
def get_token(self):
self._queue.get()
self._queue.task_done()
if __name__ == "__main__":
num = 100
r = RateLimiter(10, 0.1)
while num:
r.get_token()
print(num)
num -= 1
|
<commit_before><commit_msg>Add the python rate limiting example<commit_after>import time
from threading import Thread
from queue import Queue
class RateLimiter:
def __init__(self, limit, delay=0.01):
num = int(limit * delay)
if num < 1:
raise ValueError("limit * delay < 1")
self._limit_num = limit
self._delay = delay
self._num_per_delay = num
self._queue = Queue(limit)
self._thread = Thread(target=self._start)
self._thread.daemon = True
self._thread.start()
def _start(self):
total = int(self._limit_num * self._delay)
while True:
diff = total - self._queue.qsize()
while diff > 0:
self._queue.put(None)
diff -= 1
time.sleep(self._delay)
def get_token(self):
self._queue.get()
self._queue.task_done()
if __name__ == "__main__":
num = 100
r = RateLimiter(10, 0.1)
while num:
r.get_token()
print(num)
num -= 1
|
|
35ea2314cb8620020726627810b7aade27fba14b
|
sdoutil.py
|
sdoutil.py
|
import logging
logging.basicConfig(level=logging.INFO) # dev_appserver.py --log_level debug .
log = logging.getLogger(__name__)
import io
import threading
from google.appengine.api import app_identity
from google.appengine.api import mail
def sdo_send_mail(to=None,subject=None,msg=None):
if not to:
log.error("No mail recipient!")
return
if not subject:
subject = "Infomation from " + app_identity.get_application_id()
if not msg:
msg = "Empty message"
sender = 'manager@{}.appspotmail.com'.format(app_identity.get_application_id())
log.info("sdo_send_mail To: %s From: %s Subject: %s Msg: %s" % (to,sender,subject,msg))
mail.send_mail(sender=sender,to=to,subject=subject,body=msg)
# Wrap io.SringIO to ensure that all things written are of type unicode
class sdoStringIO(io.StringIO):
def write(self,s):
if isinstance(s,str):
s = s.decode('utf-8')
if not isinstance(s,unicode):
s = unicode(s)
ret = super(sdoStringIO,self).write(s)
return ret
#On thread varible storage
ThreadVars = threading.local()
def getAppVar(var):
ret = getattr(ThreadVars, var, None)
#log.debug("got var %s as %s" % (var,ret))
return ret
def setAppVar(var,val):
#log.debug("Setting var %s to %s" % (var,val))
setattr(ThreadVars,var,val)
CLOUDSTAT = "CloudStat"
CLOUDEXTRAMETA = "CloudExtraMeta"
|
Move some shared functionality into util file
|
Move some shared functionality into util file
|
Python
|
apache-2.0
|
vholland/schemaorg,vholland/schemaorg,vholland/schemaorg,schemaorg/schemaorg,vholland/schemaorg,schemaorg/schemaorg,schemaorg/schemaorg,schemaorg/schemaorg,schemaorg/schemaorg
|
Move some shared functionality into util file
|
import logging
logging.basicConfig(level=logging.INFO) # dev_appserver.py --log_level debug .
log = logging.getLogger(__name__)
import io
import threading
from google.appengine.api import app_identity
from google.appengine.api import mail
def sdo_send_mail(to=None,subject=None,msg=None):
if not to:
log.error("No mail recipient!")
return
if not subject:
subject = "Infomation from " + app_identity.get_application_id()
if not msg:
msg = "Empty message"
sender = 'manager@{}.appspotmail.com'.format(app_identity.get_application_id())
log.info("sdo_send_mail To: %s From: %s Subject: %s Msg: %s" % (to,sender,subject,msg))
mail.send_mail(sender=sender,to=to,subject=subject,body=msg)
# Wrap io.SringIO to ensure that all things written are of type unicode
class sdoStringIO(io.StringIO):
def write(self,s):
if isinstance(s,str):
s = s.decode('utf-8')
if not isinstance(s,unicode):
s = unicode(s)
ret = super(sdoStringIO,self).write(s)
return ret
#On thread varible storage
ThreadVars = threading.local()
def getAppVar(var):
ret = getattr(ThreadVars, var, None)
#log.debug("got var %s as %s" % (var,ret))
return ret
def setAppVar(var,val):
#log.debug("Setting var %s to %s" % (var,val))
setattr(ThreadVars,var,val)
CLOUDSTAT = "CloudStat"
CLOUDEXTRAMETA = "CloudExtraMeta"
|
<commit_before><commit_msg>Move some shared functionality into util file<commit_after>
|
import logging
logging.basicConfig(level=logging.INFO) # dev_appserver.py --log_level debug .
log = logging.getLogger(__name__)
import io
import threading
from google.appengine.api import app_identity
from google.appengine.api import mail
def sdo_send_mail(to=None,subject=None,msg=None):
if not to:
log.error("No mail recipient!")
return
if not subject:
subject = "Infomation from " + app_identity.get_application_id()
if not msg:
msg = "Empty message"
sender = 'manager@{}.appspotmail.com'.format(app_identity.get_application_id())
log.info("sdo_send_mail To: %s From: %s Subject: %s Msg: %s" % (to,sender,subject,msg))
mail.send_mail(sender=sender,to=to,subject=subject,body=msg)
# Wrap io.SringIO to ensure that all things written are of type unicode
class sdoStringIO(io.StringIO):
def write(self,s):
if isinstance(s,str):
s = s.decode('utf-8')
if not isinstance(s,unicode):
s = unicode(s)
ret = super(sdoStringIO,self).write(s)
return ret
#On thread varible storage
ThreadVars = threading.local()
def getAppVar(var):
ret = getattr(ThreadVars, var, None)
#log.debug("got var %s as %s" % (var,ret))
return ret
def setAppVar(var,val):
#log.debug("Setting var %s to %s" % (var,val))
setattr(ThreadVars,var,val)
CLOUDSTAT = "CloudStat"
CLOUDEXTRAMETA = "CloudExtraMeta"
|
Move some shared functionality into util fileimport logging
logging.basicConfig(level=logging.INFO) # dev_appserver.py --log_level debug .
log = logging.getLogger(__name__)
import io
import threading
from google.appengine.api import app_identity
from google.appengine.api import mail
def sdo_send_mail(to=None,subject=None,msg=None):
if not to:
log.error("No mail recipient!")
return
if not subject:
subject = "Infomation from " + app_identity.get_application_id()
if not msg:
msg = "Empty message"
sender = 'manager@{}.appspotmail.com'.format(app_identity.get_application_id())
log.info("sdo_send_mail To: %s From: %s Subject: %s Msg: %s" % (to,sender,subject,msg))
mail.send_mail(sender=sender,to=to,subject=subject,body=msg)
# Wrap io.SringIO to ensure that all things written are of type unicode
class sdoStringIO(io.StringIO):
def write(self,s):
if isinstance(s,str):
s = s.decode('utf-8')
if not isinstance(s,unicode):
s = unicode(s)
ret = super(sdoStringIO,self).write(s)
return ret
#On thread varible storage
ThreadVars = threading.local()
def getAppVar(var):
ret = getattr(ThreadVars, var, None)
#log.debug("got var %s as %s" % (var,ret))
return ret
def setAppVar(var,val):
#log.debug("Setting var %s to %s" % (var,val))
setattr(ThreadVars,var,val)
CLOUDSTAT = "CloudStat"
CLOUDEXTRAMETA = "CloudExtraMeta"
|
<commit_before><commit_msg>Move some shared functionality into util file<commit_after>import logging
logging.basicConfig(level=logging.INFO) # dev_appserver.py --log_level debug .
log = logging.getLogger(__name__)
import io
import threading
from google.appengine.api import app_identity
from google.appengine.api import mail
def sdo_send_mail(to=None,subject=None,msg=None):
if not to:
log.error("No mail recipient!")
return
if not subject:
subject = "Infomation from " + app_identity.get_application_id()
if not msg:
msg = "Empty message"
sender = 'manager@{}.appspotmail.com'.format(app_identity.get_application_id())
log.info("sdo_send_mail To: %s From: %s Subject: %s Msg: %s" % (to,sender,subject,msg))
mail.send_mail(sender=sender,to=to,subject=subject,body=msg)
# Wrap io.SringIO to ensure that all things written are of type unicode
class sdoStringIO(io.StringIO):
def write(self,s):
if isinstance(s,str):
s = s.decode('utf-8')
if not isinstance(s,unicode):
s = unicode(s)
ret = super(sdoStringIO,self).write(s)
return ret
#On thread varible storage
ThreadVars = threading.local()
def getAppVar(var):
ret = getattr(ThreadVars, var, None)
#log.debug("got var %s as %s" % (var,ret))
return ret
def setAppVar(var,val):
#log.debug("Setting var %s to %s" % (var,val))
setattr(ThreadVars,var,val)
CLOUDSTAT = "CloudStat"
CLOUDEXTRAMETA = "CloudExtraMeta"
|
|
2894e0b8c99e57f5318987309cf2d5861a82d560
|
custom/enikshay/management/commands/bad_private_episodes.py
|
custom/enikshay/management/commands/bad_private_episodes.py
|
from __future__ import absolute_import, print_function
import csv
import datetime
import six
from django.core.management.base import BaseCommand
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import (
get_all_episode_ids,
get_all_occurrence_cases_from_person,
iter_all_active_person_episode_cases,
)
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('domain')
def handle(self, domain, **options):
ca = CaseAccessors(domain)
case_ids = get_all_episode_ids(domain)
cases = iter_all_active_person_episode_cases(domain, case_ids, sector='private')
bad_episodes = {}
for person, _ in with_progress_bar(cases, length=len(case_ids)):
occurrence_cases = get_all_occurrence_cases_from_person(domain, person.case_id)
for occurrence_case in occurrence_cases:
episode_cases = ca.get_reverse_indexed_cases([occurrence_case.case_id])
open_episode_cases = [case for case in episode_cases
if not case.closed and case.type == 'episode' and
case.dynamic_case_properties().get('episode_type') == "confirmed_tb"]
if len(open_episode_cases) > 1:
bad_episodes[occurrence_case] = [c.case_id for c in open_episode_cases]
print(len(bad_episodes), " bad episodes")
filename = 'bad_episodes-{}.csv'.format(datetime.datetime.now().strftime('%Y-%m-%d_%H.%M.%S'))
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerow(['occurrence_id', 'episode_ids'])
for occurrence_id, bad_cases in six.iteritems(bad_episodes):
bad_cases.insert(0, occurrence_id)
writer.writerow(bad_cases)
print("Output saved in: {}".format(filename))
|
Add management command to find bad case structures
|
Add management command to find bad case structures
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
Add management command to find bad case structures
|
from __future__ import absolute_import, print_function
import csv
import datetime
import six
from django.core.management.base import BaseCommand
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import (
get_all_episode_ids,
get_all_occurrence_cases_from_person,
iter_all_active_person_episode_cases,
)
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('domain')
def handle(self, domain, **options):
ca = CaseAccessors(domain)
case_ids = get_all_episode_ids(domain)
cases = iter_all_active_person_episode_cases(domain, case_ids, sector='private')
bad_episodes = {}
for person, _ in with_progress_bar(cases, length=len(case_ids)):
occurrence_cases = get_all_occurrence_cases_from_person(domain, person.case_id)
for occurrence_case in occurrence_cases:
episode_cases = ca.get_reverse_indexed_cases([occurrence_case.case_id])
open_episode_cases = [case for case in episode_cases
if not case.closed and case.type == 'episode' and
case.dynamic_case_properties().get('episode_type') == "confirmed_tb"]
if len(open_episode_cases) > 1:
bad_episodes[occurrence_case] = [c.case_id for c in open_episode_cases]
print(len(bad_episodes), " bad episodes")
filename = 'bad_episodes-{}.csv'.format(datetime.datetime.now().strftime('%Y-%m-%d_%H.%M.%S'))
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerow(['occurrence_id', 'episode_ids'])
for occurrence_id, bad_cases in six.iteritems(bad_episodes):
bad_cases.insert(0, occurrence_id)
writer.writerow(bad_cases)
print("Output saved in: {}".format(filename))
|
<commit_before><commit_msg>Add management command to find bad case structures<commit_after>
|
from __future__ import absolute_import, print_function
import csv
import datetime
import six
from django.core.management.base import BaseCommand
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import (
get_all_episode_ids,
get_all_occurrence_cases_from_person,
iter_all_active_person_episode_cases,
)
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('domain')
def handle(self, domain, **options):
ca = CaseAccessors(domain)
case_ids = get_all_episode_ids(domain)
cases = iter_all_active_person_episode_cases(domain, case_ids, sector='private')
bad_episodes = {}
for person, _ in with_progress_bar(cases, length=len(case_ids)):
occurrence_cases = get_all_occurrence_cases_from_person(domain, person.case_id)
for occurrence_case in occurrence_cases:
episode_cases = ca.get_reverse_indexed_cases([occurrence_case.case_id])
open_episode_cases = [case for case in episode_cases
if not case.closed and case.type == 'episode' and
case.dynamic_case_properties().get('episode_type') == "confirmed_tb"]
if len(open_episode_cases) > 1:
bad_episodes[occurrence_case] = [c.case_id for c in open_episode_cases]
print(len(bad_episodes), " bad episodes")
filename = 'bad_episodes-{}.csv'.format(datetime.datetime.now().strftime('%Y-%m-%d_%H.%M.%S'))
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerow(['occurrence_id', 'episode_ids'])
for occurrence_id, bad_cases in six.iteritems(bad_episodes):
bad_cases.insert(0, occurrence_id)
writer.writerow(bad_cases)
print("Output saved in: {}".format(filename))
|
Add management command to find bad case structuresfrom __future__ import absolute_import, print_function
import csv
import datetime
import six
from django.core.management.base import BaseCommand
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import (
get_all_episode_ids,
get_all_occurrence_cases_from_person,
iter_all_active_person_episode_cases,
)
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('domain')
def handle(self, domain, **options):
ca = CaseAccessors(domain)
case_ids = get_all_episode_ids(domain)
cases = iter_all_active_person_episode_cases(domain, case_ids, sector='private')
bad_episodes = {}
for person, _ in with_progress_bar(cases, length=len(case_ids)):
occurrence_cases = get_all_occurrence_cases_from_person(domain, person.case_id)
for occurrence_case in occurrence_cases:
episode_cases = ca.get_reverse_indexed_cases([occurrence_case.case_id])
open_episode_cases = [case for case in episode_cases
if not case.closed and case.type == 'episode' and
case.dynamic_case_properties().get('episode_type') == "confirmed_tb"]
if len(open_episode_cases) > 1:
bad_episodes[occurrence_case] = [c.case_id for c in open_episode_cases]
print(len(bad_episodes), " bad episodes")
filename = 'bad_episodes-{}.csv'.format(datetime.datetime.now().strftime('%Y-%m-%d_%H.%M.%S'))
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerow(['occurrence_id', 'episode_ids'])
for occurrence_id, bad_cases in six.iteritems(bad_episodes):
bad_cases.insert(0, occurrence_id)
writer.writerow(bad_cases)
print("Output saved in: {}".format(filename))
|
<commit_before><commit_msg>Add management command to find bad case structures<commit_after>from __future__ import absolute_import, print_function
import csv
import datetime
import six
from django.core.management.base import BaseCommand
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import (
get_all_episode_ids,
get_all_occurrence_cases_from_person,
iter_all_active_person_episode_cases,
)
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('domain')
def handle(self, domain, **options):
ca = CaseAccessors(domain)
case_ids = get_all_episode_ids(domain)
cases = iter_all_active_person_episode_cases(domain, case_ids, sector='private')
bad_episodes = {}
for person, _ in with_progress_bar(cases, length=len(case_ids)):
occurrence_cases = get_all_occurrence_cases_from_person(domain, person.case_id)
for occurrence_case in occurrence_cases:
episode_cases = ca.get_reverse_indexed_cases([occurrence_case.case_id])
open_episode_cases = [case for case in episode_cases
if not case.closed and case.type == 'episode' and
case.dynamic_case_properties().get('episode_type') == "confirmed_tb"]
if len(open_episode_cases) > 1:
bad_episodes[occurrence_case] = [c.case_id for c in open_episode_cases]
print(len(bad_episodes), " bad episodes")
filename = 'bad_episodes-{}.csv'.format(datetime.datetime.now().strftime('%Y-%m-%d_%H.%M.%S'))
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerow(['occurrence_id', 'episode_ids'])
for occurrence_id, bad_cases in six.iteritems(bad_episodes):
bad_cases.insert(0, occurrence_id)
writer.writerow(bad_cases)
print("Output saved in: {}".format(filename))
|
|
de3c03af76ebb4f092f798a390a463fe78cc27f0
|
examples/device/audio_test/src/plot_audio_samples.py
|
examples/device/audio_test/src/plot_audio_samples.py
|
import sounddevice as sd
import matplotlib.pyplot as plt
import numpy as np
if __name__ == '__main__':
# devList = sd.query_devices()
# print(devList)
fs = 48000 # Sample rate
duration = 100e-3 # Duration of recording
device = 'Microphone (MicNode) MME' # MME is needed since there are more than one MicNode device APIs (at least in Windows)
myrecording = sd.rec(int(duration * fs), samplerate=fs, channels=1, dtype='int16', device=device)
print('Waiting...')
sd.wait() # Wait until recording is finished
print('Done!')
time = np.arange(0, duration, 1 / fs) # time vector
plt.plot(time, myrecording)
plt.xlabel('Time [s]')
plt.ylabel('Amplitude')
plt.title('MicNode')
plt.show()
|
Add python script to plot audio sample data.
|
Add python script to plot audio sample data.
|
Python
|
mit
|
hathach/tinyusb,hathach/tinyusb,hathach/tinyusb
|
Add python script to plot audio sample data.
|
import sounddevice as sd
import matplotlib.pyplot as plt
import numpy as np
if __name__ == '__main__':
# devList = sd.query_devices()
# print(devList)
fs = 48000 # Sample rate
duration = 100e-3 # Duration of recording
device = 'Microphone (MicNode) MME' # MME is needed since there are more than one MicNode device APIs (at least in Windows)
myrecording = sd.rec(int(duration * fs), samplerate=fs, channels=1, dtype='int16', device=device)
print('Waiting...')
sd.wait() # Wait until recording is finished
print('Done!')
time = np.arange(0, duration, 1 / fs) # time vector
plt.plot(time, myrecording)
plt.xlabel('Time [s]')
plt.ylabel('Amplitude')
plt.title('MicNode')
plt.show()
|
<commit_before><commit_msg>Add python script to plot audio sample data.<commit_after>
|
import sounddevice as sd
import matplotlib.pyplot as plt
import numpy as np
if __name__ == '__main__':
# devList = sd.query_devices()
# print(devList)
fs = 48000 # Sample rate
duration = 100e-3 # Duration of recording
device = 'Microphone (MicNode) MME' # MME is needed since there are more than one MicNode device APIs (at least in Windows)
myrecording = sd.rec(int(duration * fs), samplerate=fs, channels=1, dtype='int16', device=device)
print('Waiting...')
sd.wait() # Wait until recording is finished
print('Done!')
time = np.arange(0, duration, 1 / fs) # time vector
plt.plot(time, myrecording)
plt.xlabel('Time [s]')
plt.ylabel('Amplitude')
plt.title('MicNode')
plt.show()
|
Add python script to plot audio sample data.import sounddevice as sd
import matplotlib.pyplot as plt
import numpy as np
if __name__ == '__main__':
# devList = sd.query_devices()
# print(devList)
fs = 48000 # Sample rate
duration = 100e-3 # Duration of recording
device = 'Microphone (MicNode) MME' # MME is needed since there are more than one MicNode device APIs (at least in Windows)
myrecording = sd.rec(int(duration * fs), samplerate=fs, channels=1, dtype='int16', device=device)
print('Waiting...')
sd.wait() # Wait until recording is finished
print('Done!')
time = np.arange(0, duration, 1 / fs) # time vector
plt.plot(time, myrecording)
plt.xlabel('Time [s]')
plt.ylabel('Amplitude')
plt.title('MicNode')
plt.show()
|
<commit_before><commit_msg>Add python script to plot audio sample data.<commit_after>import sounddevice as sd
import matplotlib.pyplot as plt
import numpy as np
if __name__ == '__main__':
# devList = sd.query_devices()
# print(devList)
fs = 48000 # Sample rate
duration = 100e-3 # Duration of recording
device = 'Microphone (MicNode) MME' # MME is needed since there are more than one MicNode device APIs (at least in Windows)
myrecording = sd.rec(int(duration * fs), samplerate=fs, channels=1, dtype='int16', device=device)
print('Waiting...')
sd.wait() # Wait until recording is finished
print('Done!')
time = np.arange(0, duration, 1 / fs) # time vector
plt.plot(time, myrecording)
plt.xlabel('Time [s]')
plt.ylabel('Amplitude')
plt.title('MicNode')
plt.show()
|
|
0c3f5008dd66b0bb8dfd2a4993def7d0c7a5bf84
|
greyjay/articles/migrations/0091_articlepage_reading_time.py
|
greyjay/articles/migrations/0091_articlepage_reading_time.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-05-03 18:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('articles', '0090_auto_20170502_1621'),
]
operations = [
migrations.AddField(
model_name='articlepage',
name='reading_time',
field=models.PositiveIntegerField(default=0),
),
]
|
Add reading_time to article pages.
|
Add reading_time to article pages.
|
Python
|
mit
|
CIGIHub/greyjay,CIGIHub/greyjay,CIGIHub/greyjay
|
Add reading_time to article pages.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-05-03 18:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('articles', '0090_auto_20170502_1621'),
]
operations = [
migrations.AddField(
model_name='articlepage',
name='reading_time',
field=models.PositiveIntegerField(default=0),
),
]
|
<commit_before><commit_msg>Add reading_time to article pages.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-05-03 18:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('articles', '0090_auto_20170502_1621'),
]
operations = [
migrations.AddField(
model_name='articlepage',
name='reading_time',
field=models.PositiveIntegerField(default=0),
),
]
|
Add reading_time to article pages.# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-05-03 18:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('articles', '0090_auto_20170502_1621'),
]
operations = [
migrations.AddField(
model_name='articlepage',
name='reading_time',
field=models.PositiveIntegerField(default=0),
),
]
|
<commit_before><commit_msg>Add reading_time to article pages.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-05-03 18:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('articles', '0090_auto_20170502_1621'),
]
operations = [
migrations.AddField(
model_name='articlepage',
name='reading_time',
field=models.PositiveIntegerField(default=0),
),
]
|
|
bef2796fc1df98d15c7198ee26b2526f42150b59
|
infrastructure/migrations/0016_auto_20210907_0131.py
|
infrastructure/migrations/0016_auto_20210907_0131.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2021-09-06 23:31
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('infrastructure', '0015_financialyear_active'),
]
operations = [
migrations.CreateModel(
name='AnnualSpendFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('document', models.FileField(upload_to='annual/')),
('status', models.IntegerField(default=3)),
('financial_year', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='infrastructure.FinancialYear')),
],
),
migrations.AlterField(
model_name='budgetphase',
name='code',
field=models.CharField(blank=True, max_length=10),
),
]
|
Add migration for annual spend changes
|
Add migration for annual spend changes
|
Python
|
mit
|
Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data
|
Add migration for annual spend changes
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2021-09-06 23:31
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('infrastructure', '0015_financialyear_active'),
]
operations = [
migrations.CreateModel(
name='AnnualSpendFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('document', models.FileField(upload_to='annual/')),
('status', models.IntegerField(default=3)),
('financial_year', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='infrastructure.FinancialYear')),
],
),
migrations.AlterField(
model_name='budgetphase',
name='code',
field=models.CharField(blank=True, max_length=10),
),
]
|
<commit_before><commit_msg>Add migration for annual spend changes<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2021-09-06 23:31
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('infrastructure', '0015_financialyear_active'),
]
operations = [
migrations.CreateModel(
name='AnnualSpendFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('document', models.FileField(upload_to='annual/')),
('status', models.IntegerField(default=3)),
('financial_year', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='infrastructure.FinancialYear')),
],
),
migrations.AlterField(
model_name='budgetphase',
name='code',
field=models.CharField(blank=True, max_length=10),
),
]
|
Add migration for annual spend changes# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2021-09-06 23:31
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('infrastructure', '0015_financialyear_active'),
]
operations = [
migrations.CreateModel(
name='AnnualSpendFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('document', models.FileField(upload_to='annual/')),
('status', models.IntegerField(default=3)),
('financial_year', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='infrastructure.FinancialYear')),
],
),
migrations.AlterField(
model_name='budgetphase',
name='code',
field=models.CharField(blank=True, max_length=10),
),
]
|
<commit_before><commit_msg>Add migration for annual spend changes<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2021-09-06 23:31
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('infrastructure', '0015_financialyear_active'),
]
operations = [
migrations.CreateModel(
name='AnnualSpendFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('document', models.FileField(upload_to='annual/')),
('status', models.IntegerField(default=3)),
('financial_year', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='infrastructure.FinancialYear')),
],
),
migrations.AlterField(
model_name='budgetphase',
name='code',
field=models.CharField(blank=True, max_length=10),
),
]
|
|
35eff67c8be55abb452181d035d081739fecbf8f
|
test/ut/test_plugin_text.py
|
test/ut/test_plugin_text.py
|
#!/usr/bin/env python3
#-*- encoding: utf-8 -*-
import os, sys, tempfile, unittest
import lxml.etree as etree
ECMDS_INSTALL_DIR = os.path.normpath(os.path.join(
os.path.dirname(os.path.realpath(sys.argv[0])),
"..", ".."
))
sys.path.insert(1, ECMDS_INSTALL_DIR + os.sep + 'lib')
from net.ecromedos.error import ECMDSPluginError
import net.ecromedos.plugins.text as text
class UTTestPluginText(unittest.TestCase):
def test_escapeLatexSpecialChars(self):
content = "<root>Here comes text: []{}#&_%$^\\~-:;!?\"`'=\n</root>"
root = etree.fromstring(content)
plugin = text.getInstance({})
root.text = plugin.process(root.text, "latex")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>Here comes text{}{\\string:}{} {[}{]}\\{{}\\}{}\\#{}\\&{}\\_{}\\%{}\\${}\\^{}\\textbackslash{}\\textasciitilde{}{}{\\string-}{}{}{\\string:}{}{}{\\string;}{}{}{\\string!}{}{}{\\string?}{}{}{\\string"}{}{}{\\string`}{}{}{\\string\'}{}{}{\\string=}{}\n</root>'
self.assertEqual(result, expected_result)
#end function
#end class
if __name__ == "__main__":
unittest.main()
|
Add test for text plugin
|
Add test for text plugin
|
Python
|
mit
|
tobijk/ecromedos,tobijk/ecromedos
|
Add test for text plugin
|
#!/usr/bin/env python3
#-*- encoding: utf-8 -*-
import os, sys, tempfile, unittest
import lxml.etree as etree
ECMDS_INSTALL_DIR = os.path.normpath(os.path.join(
os.path.dirname(os.path.realpath(sys.argv[0])),
"..", ".."
))
sys.path.insert(1, ECMDS_INSTALL_DIR + os.sep + 'lib')
from net.ecromedos.error import ECMDSPluginError
import net.ecromedos.plugins.text as text
class UTTestPluginText(unittest.TestCase):
def test_escapeLatexSpecialChars(self):
content = "<root>Here comes text: []{}#&_%$^\\~-:;!?\"`'=\n</root>"
root = etree.fromstring(content)
plugin = text.getInstance({})
root.text = plugin.process(root.text, "latex")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>Here comes text{}{\\string:}{} {[}{]}\\{{}\\}{}\\#{}\\&{}\\_{}\\%{}\\${}\\^{}\\textbackslash{}\\textasciitilde{}{}{\\string-}{}{}{\\string:}{}{}{\\string;}{}{}{\\string!}{}{}{\\string?}{}{}{\\string"}{}{}{\\string`}{}{}{\\string\'}{}{}{\\string=}{}\n</root>'
self.assertEqual(result, expected_result)
#end function
#end class
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add test for text plugin<commit_after>
|
#!/usr/bin/env python3
#-*- encoding: utf-8 -*-
import os, sys, tempfile, unittest
import lxml.etree as etree
ECMDS_INSTALL_DIR = os.path.normpath(os.path.join(
os.path.dirname(os.path.realpath(sys.argv[0])),
"..", ".."
))
sys.path.insert(1, ECMDS_INSTALL_DIR + os.sep + 'lib')
from net.ecromedos.error import ECMDSPluginError
import net.ecromedos.plugins.text as text
class UTTestPluginText(unittest.TestCase):
def test_escapeLatexSpecialChars(self):
content = "<root>Here comes text: []{}#&_%$^\\~-:;!?\"`'=\n</root>"
root = etree.fromstring(content)
plugin = text.getInstance({})
root.text = plugin.process(root.text, "latex")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>Here comes text{}{\\string:}{} {[}{]}\\{{}\\}{}\\#{}\\&{}\\_{}\\%{}\\${}\\^{}\\textbackslash{}\\textasciitilde{}{}{\\string-}{}{}{\\string:}{}{}{\\string;}{}{}{\\string!}{}{}{\\string?}{}{}{\\string"}{}{}{\\string`}{}{}{\\string\'}{}{}{\\string=}{}\n</root>'
self.assertEqual(result, expected_result)
#end function
#end class
if __name__ == "__main__":
unittest.main()
|
Add test for text plugin#!/usr/bin/env python3
#-*- encoding: utf-8 -*-
import os, sys, tempfile, unittest
import lxml.etree as etree
ECMDS_INSTALL_DIR = os.path.normpath(os.path.join(
os.path.dirname(os.path.realpath(sys.argv[0])),
"..", ".."
))
sys.path.insert(1, ECMDS_INSTALL_DIR + os.sep + 'lib')
from net.ecromedos.error import ECMDSPluginError
import net.ecromedos.plugins.text as text
class UTTestPluginText(unittest.TestCase):
def test_escapeLatexSpecialChars(self):
content = "<root>Here comes text: []{}#&_%$^\\~-:;!?\"`'=\n</root>"
root = etree.fromstring(content)
plugin = text.getInstance({})
root.text = plugin.process(root.text, "latex")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>Here comes text{}{\\string:}{} {[}{]}\\{{}\\}{}\\#{}\\&{}\\_{}\\%{}\\${}\\^{}\\textbackslash{}\\textasciitilde{}{}{\\string-}{}{}{\\string:}{}{}{\\string;}{}{}{\\string!}{}{}{\\string?}{}{}{\\string"}{}{}{\\string`}{}{}{\\string\'}{}{}{\\string=}{}\n</root>'
self.assertEqual(result, expected_result)
#end function
#end class
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add test for text plugin<commit_after>#!/usr/bin/env python3
#-*- encoding: utf-8 -*-
import os, sys, tempfile, unittest
import lxml.etree as etree
ECMDS_INSTALL_DIR = os.path.normpath(os.path.join(
os.path.dirname(os.path.realpath(sys.argv[0])),
"..", ".."
))
sys.path.insert(1, ECMDS_INSTALL_DIR + os.sep + 'lib')
from net.ecromedos.error import ECMDSPluginError
import net.ecromedos.plugins.text as text
class UTTestPluginText(unittest.TestCase):
def test_escapeLatexSpecialChars(self):
content = "<root>Here comes text: []{}#&_%$^\\~-:;!?\"`'=\n</root>"
root = etree.fromstring(content)
plugin = text.getInstance({})
root.text = plugin.process(root.text, "latex")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>Here comes text{}{\\string:}{} {[}{]}\\{{}\\}{}\\#{}\\&{}\\_{}\\%{}\\${}\\^{}\\textbackslash{}\\textasciitilde{}{}{\\string-}{}{}{\\string:}{}{}{\\string;}{}{}{\\string!}{}{}{\\string?}{}{}{\\string"}{}{}{\\string`}{}{}{\\string\'}{}{}{\\string=}{}\n</root>'
self.assertEqual(result, expected_result)
#end function
#end class
if __name__ == "__main__":
unittest.main()
|
|
c465ecc0db52132c5c1692eb57f6cba8d70ce800
|
Artifactorial/migrations/0006_directory_on_delete.py
|
Artifactorial/migrations/0006_directory_on_delete.py
|
# Generated by Django 2.2.3 on 2019-08-06 09:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [("Artifactorial", "0005_share_user")]
operations = [
migrations.AlterField(
model_name="directory",
name="group",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="auth.Group",
),
),
migrations.AlterField(
model_name="directory",
name="user",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
]
|
Add missing migration after Django upgrade
|
Add missing migration after Django upgrade
|
Python
|
mit
|
ivoire/Artifactorial,ivoire/Artifactorial,ivoire/Artifactorial
|
Add missing migration after Django upgrade
|
# Generated by Django 2.2.3 on 2019-08-06 09:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [("Artifactorial", "0005_share_user")]
operations = [
migrations.AlterField(
model_name="directory",
name="group",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="auth.Group",
),
),
migrations.AlterField(
model_name="directory",
name="user",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
]
|
<commit_before><commit_msg>Add missing migration after Django upgrade<commit_after>
|
# Generated by Django 2.2.3 on 2019-08-06 09:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [("Artifactorial", "0005_share_user")]
operations = [
migrations.AlterField(
model_name="directory",
name="group",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="auth.Group",
),
),
migrations.AlterField(
model_name="directory",
name="user",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
]
|
Add missing migration after Django upgrade# Generated by Django 2.2.3 on 2019-08-06 09:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [("Artifactorial", "0005_share_user")]
operations = [
migrations.AlterField(
model_name="directory",
name="group",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="auth.Group",
),
),
migrations.AlterField(
model_name="directory",
name="user",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
]
|
<commit_before><commit_msg>Add missing migration after Django upgrade<commit_after># Generated by Django 2.2.3 on 2019-08-06 09:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [("Artifactorial", "0005_share_user")]
operations = [
migrations.AlterField(
model_name="directory",
name="group",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="auth.Group",
),
),
migrations.AlterField(
model_name="directory",
name="user",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
]
|
|
46ceb59f9e2c7cd20f3c20a6d79c360243eed06e
|
src/scripts/create_conll.py
|
src/scripts/create_conll.py
|
#!/usr/bin/env python3
""""Create a CoNLL corpus from FrameNet fulltext data tokens
This CoNLL corpus will then be lemmatized using WordNet, and parsed using
TurboParser.
"""
from pathlib import Path
from xml.etree import ElementTree as ET
import os
from nltk.corpus import wordnet
from paths import FRAMENET_FULLTEXT
xmlns = 'http://framenet.icsi.berkeley.edu'
conll_dir = Path(FRAMENET_FULLTEXT).parents[1] / 'framenet_turpobarsed'
os.makedirs(str(conll_dir), exist_ok=True)
for fulltext_filename in Path(FRAMENET_FULLTEXT).glob('*.xml'):
fulltext_xml = ET.ElementTree(file=str(fulltext_filename))
conll_file = open(str(conll_dir / (fulltext_filename.stem + '.conll')), 'w')
for sentence in fulltext_xml.findall('{{{}}}sentence'.format(xmlns)):
word_id = 1
sentence_text = sentence.find('{{{}}}text'.format(xmlns)).text
for word_label in sentence.findall('{{{0}}}annotationSet/{{{0}}}layer[@name="PENN"]/{{{0}}}label'.format(xmlns)):
start = int(word_label.get('start'))
end = int(word_label.get('end'))
word = sentence_text[start:end+1]
morphy_lemma = wordnet.morphy(word.lower())
lemma = morphy_lemma if morphy_lemma is not None else word
print('\t'.join([str(word_id), word, lemma] + ['_'] * 7), file=conll_file)
word_id += 1
print(file=conll_file)
print('Wrote files in {}'.format(str(conll_dir)))
|
Add script to create CoNLL corpus from FN fulltext
|
Add script to create CoNLL corpus from FN fulltext
|
Python
|
agpl-3.0
|
aymara/knowledgesrl,aymara/knowledgesrl
|
Add script to create CoNLL corpus from FN fulltext
|
#!/usr/bin/env python3
""""Create a CoNLL corpus from FrameNet fulltext data tokens
This CoNLL corpus will then be lemmatized using WordNet, and parsed using
TurboParser.
"""
from pathlib import Path
from xml.etree import ElementTree as ET
import os
from nltk.corpus import wordnet
from paths import FRAMENET_FULLTEXT
xmlns = 'http://framenet.icsi.berkeley.edu'
conll_dir = Path(FRAMENET_FULLTEXT).parents[1] / 'framenet_turpobarsed'
os.makedirs(str(conll_dir), exist_ok=True)
for fulltext_filename in Path(FRAMENET_FULLTEXT).glob('*.xml'):
fulltext_xml = ET.ElementTree(file=str(fulltext_filename))
conll_file = open(str(conll_dir / (fulltext_filename.stem + '.conll')), 'w')
for sentence in fulltext_xml.findall('{{{}}}sentence'.format(xmlns)):
word_id = 1
sentence_text = sentence.find('{{{}}}text'.format(xmlns)).text
for word_label in sentence.findall('{{{0}}}annotationSet/{{{0}}}layer[@name="PENN"]/{{{0}}}label'.format(xmlns)):
start = int(word_label.get('start'))
end = int(word_label.get('end'))
word = sentence_text[start:end+1]
morphy_lemma = wordnet.morphy(word.lower())
lemma = morphy_lemma if morphy_lemma is not None else word
print('\t'.join([str(word_id), word, lemma] + ['_'] * 7), file=conll_file)
word_id += 1
print(file=conll_file)
print('Wrote files in {}'.format(str(conll_dir)))
|
<commit_before><commit_msg>Add script to create CoNLL corpus from FN fulltext<commit_after>
|
#!/usr/bin/env python3
""""Create a CoNLL corpus from FrameNet fulltext data tokens
This CoNLL corpus will then be lemmatized using WordNet, and parsed using
TurboParser.
"""
from pathlib import Path
from xml.etree import ElementTree as ET
import os
from nltk.corpus import wordnet
from paths import FRAMENET_FULLTEXT
xmlns = 'http://framenet.icsi.berkeley.edu'
conll_dir = Path(FRAMENET_FULLTEXT).parents[1] / 'framenet_turpobarsed'
os.makedirs(str(conll_dir), exist_ok=True)
for fulltext_filename in Path(FRAMENET_FULLTEXT).glob('*.xml'):
fulltext_xml = ET.ElementTree(file=str(fulltext_filename))
conll_file = open(str(conll_dir / (fulltext_filename.stem + '.conll')), 'w')
for sentence in fulltext_xml.findall('{{{}}}sentence'.format(xmlns)):
word_id = 1
sentence_text = sentence.find('{{{}}}text'.format(xmlns)).text
for word_label in sentence.findall('{{{0}}}annotationSet/{{{0}}}layer[@name="PENN"]/{{{0}}}label'.format(xmlns)):
start = int(word_label.get('start'))
end = int(word_label.get('end'))
word = sentence_text[start:end+1]
morphy_lemma = wordnet.morphy(word.lower())
lemma = morphy_lemma if morphy_lemma is not None else word
print('\t'.join([str(word_id), word, lemma] + ['_'] * 7), file=conll_file)
word_id += 1
print(file=conll_file)
print('Wrote files in {}'.format(str(conll_dir)))
|
Add script to create CoNLL corpus from FN fulltext#!/usr/bin/env python3
""""Create a CoNLL corpus from FrameNet fulltext data tokens
This CoNLL corpus will then be lemmatized using WordNet, and parsed using
TurboParser.
"""
from pathlib import Path
from xml.etree import ElementTree as ET
import os
from nltk.corpus import wordnet
from paths import FRAMENET_FULLTEXT
xmlns = 'http://framenet.icsi.berkeley.edu'
conll_dir = Path(FRAMENET_FULLTEXT).parents[1] / 'framenet_turpobarsed'
os.makedirs(str(conll_dir), exist_ok=True)
for fulltext_filename in Path(FRAMENET_FULLTEXT).glob('*.xml'):
fulltext_xml = ET.ElementTree(file=str(fulltext_filename))
conll_file = open(str(conll_dir / (fulltext_filename.stem + '.conll')), 'w')
for sentence in fulltext_xml.findall('{{{}}}sentence'.format(xmlns)):
word_id = 1
sentence_text = sentence.find('{{{}}}text'.format(xmlns)).text
for word_label in sentence.findall('{{{0}}}annotationSet/{{{0}}}layer[@name="PENN"]/{{{0}}}label'.format(xmlns)):
start = int(word_label.get('start'))
end = int(word_label.get('end'))
word = sentence_text[start:end+1]
morphy_lemma = wordnet.morphy(word.lower())
lemma = morphy_lemma if morphy_lemma is not None else word
print('\t'.join([str(word_id), word, lemma] + ['_'] * 7), file=conll_file)
word_id += 1
print(file=conll_file)
print('Wrote files in {}'.format(str(conll_dir)))
|
<commit_before><commit_msg>Add script to create CoNLL corpus from FN fulltext<commit_after>#!/usr/bin/env python3
""""Create a CoNLL corpus from FrameNet fulltext data tokens
This CoNLL corpus will then be lemmatized using WordNet, and parsed using
TurboParser.
"""
from pathlib import Path
from xml.etree import ElementTree as ET
import os
from nltk.corpus import wordnet
from paths import FRAMENET_FULLTEXT
xmlns = 'http://framenet.icsi.berkeley.edu'
conll_dir = Path(FRAMENET_FULLTEXT).parents[1] / 'framenet_turpobarsed'
os.makedirs(str(conll_dir), exist_ok=True)
for fulltext_filename in Path(FRAMENET_FULLTEXT).glob('*.xml'):
fulltext_xml = ET.ElementTree(file=str(fulltext_filename))
conll_file = open(str(conll_dir / (fulltext_filename.stem + '.conll')), 'w')
for sentence in fulltext_xml.findall('{{{}}}sentence'.format(xmlns)):
word_id = 1
sentence_text = sentence.find('{{{}}}text'.format(xmlns)).text
for word_label in sentence.findall('{{{0}}}annotationSet/{{{0}}}layer[@name="PENN"]/{{{0}}}label'.format(xmlns)):
start = int(word_label.get('start'))
end = int(word_label.get('end'))
word = sentence_text[start:end+1]
morphy_lemma = wordnet.morphy(word.lower())
lemma = morphy_lemma if morphy_lemma is not None else word
print('\t'.join([str(word_id), word, lemma] + ['_'] * 7), file=conll_file)
word_id += 1
print(file=conll_file)
print('Wrote files in {}'.format(str(conll_dir)))
|
|
1b10e46754d2bc7783ff14632f145203db5bb67e
|
features/contributions/migrations/0002_auto_20170309_1515.py
|
features/contributions/migrations/0002_auto_20170309_1515.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-09 14:15
from __future__ import unicode_literals
from django.db import migrations
def copy_texts(apps, schema_editor):
ContentType = apps.get_model('contenttypes.ContentType')
Contribution = apps.get_model('contributions.Contribution')
ContributionText = apps.get_model('contributions.Text')
ReplyKey = apps.get_model('contributions.ReplyKey')
Text = apps.get_model('texts.Text')
id_map = {}
for text in Text.objects.all():
ct = ContributionText.objects.create(text=text.text)
contribution = Contribution.objects.create(
container_type=text.container_type,
container_id=text.container_id,
contribution_id=ct.id,
contribution_type=ContentType.objects.get_for_model(ct),
author=text.author)
contribution.time_created = text.time_created
contribution.save()
id_map[text.id] = contribution.id
for rk in text.replykey_set.all():
ReplyKey.objects.create(contribution=contribution, gestalt=rk.gestalt, key=rk.key)
for text in Text.objects.filter(in_reply_to__isnull=False):
contribution = Contribution.objects.get(id=id_map[text.id])
contribution.in_reply_to_id = id_map[text.in_reply_to_id]
contribution.save()
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('contributions', '0001_initial'),
('texts', '0008_text_in_reply_to'),
]
operations = [
migrations.RunPython(copy_texts),
]
|
Add migration to copy texts to contributions
|
Add migration to copy texts to contributions
|
Python
|
agpl-3.0
|
stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten
|
Add migration to copy texts to contributions
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-09 14:15
from __future__ import unicode_literals
from django.db import migrations
def copy_texts(apps, schema_editor):
ContentType = apps.get_model('contenttypes.ContentType')
Contribution = apps.get_model('contributions.Contribution')
ContributionText = apps.get_model('contributions.Text')
ReplyKey = apps.get_model('contributions.ReplyKey')
Text = apps.get_model('texts.Text')
id_map = {}
for text in Text.objects.all():
ct = ContributionText.objects.create(text=text.text)
contribution = Contribution.objects.create(
container_type=text.container_type,
container_id=text.container_id,
contribution_id=ct.id,
contribution_type=ContentType.objects.get_for_model(ct),
author=text.author)
contribution.time_created = text.time_created
contribution.save()
id_map[text.id] = contribution.id
for rk in text.replykey_set.all():
ReplyKey.objects.create(contribution=contribution, gestalt=rk.gestalt, key=rk.key)
for text in Text.objects.filter(in_reply_to__isnull=False):
contribution = Contribution.objects.get(id=id_map[text.id])
contribution.in_reply_to_id = id_map[text.in_reply_to_id]
contribution.save()
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('contributions', '0001_initial'),
('texts', '0008_text_in_reply_to'),
]
operations = [
migrations.RunPython(copy_texts),
]
|
<commit_before><commit_msg>Add migration to copy texts to contributions<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-09 14:15
from __future__ import unicode_literals
from django.db import migrations
def copy_texts(apps, schema_editor):
ContentType = apps.get_model('contenttypes.ContentType')
Contribution = apps.get_model('contributions.Contribution')
ContributionText = apps.get_model('contributions.Text')
ReplyKey = apps.get_model('contributions.ReplyKey')
Text = apps.get_model('texts.Text')
id_map = {}
for text in Text.objects.all():
ct = ContributionText.objects.create(text=text.text)
contribution = Contribution.objects.create(
container_type=text.container_type,
container_id=text.container_id,
contribution_id=ct.id,
contribution_type=ContentType.objects.get_for_model(ct),
author=text.author)
contribution.time_created = text.time_created
contribution.save()
id_map[text.id] = contribution.id
for rk in text.replykey_set.all():
ReplyKey.objects.create(contribution=contribution, gestalt=rk.gestalt, key=rk.key)
for text in Text.objects.filter(in_reply_to__isnull=False):
contribution = Contribution.objects.get(id=id_map[text.id])
contribution.in_reply_to_id = id_map[text.in_reply_to_id]
contribution.save()
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('contributions', '0001_initial'),
('texts', '0008_text_in_reply_to'),
]
operations = [
migrations.RunPython(copy_texts),
]
|
Add migration to copy texts to contributions# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-09 14:15
from __future__ import unicode_literals
from django.db import migrations
def copy_texts(apps, schema_editor):
ContentType = apps.get_model('contenttypes.ContentType')
Contribution = apps.get_model('contributions.Contribution')
ContributionText = apps.get_model('contributions.Text')
ReplyKey = apps.get_model('contributions.ReplyKey')
Text = apps.get_model('texts.Text')
id_map = {}
for text in Text.objects.all():
ct = ContributionText.objects.create(text=text.text)
contribution = Contribution.objects.create(
container_type=text.container_type,
container_id=text.container_id,
contribution_id=ct.id,
contribution_type=ContentType.objects.get_for_model(ct),
author=text.author)
contribution.time_created = text.time_created
contribution.save()
id_map[text.id] = contribution.id
for rk in text.replykey_set.all():
ReplyKey.objects.create(contribution=contribution, gestalt=rk.gestalt, key=rk.key)
for text in Text.objects.filter(in_reply_to__isnull=False):
contribution = Contribution.objects.get(id=id_map[text.id])
contribution.in_reply_to_id = id_map[text.in_reply_to_id]
contribution.save()
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('contributions', '0001_initial'),
('texts', '0008_text_in_reply_to'),
]
operations = [
migrations.RunPython(copy_texts),
]
|
<commit_before><commit_msg>Add migration to copy texts to contributions<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-09 14:15
from __future__ import unicode_literals
from django.db import migrations
def copy_texts(apps, schema_editor):
ContentType = apps.get_model('contenttypes.ContentType')
Contribution = apps.get_model('contributions.Contribution')
ContributionText = apps.get_model('contributions.Text')
ReplyKey = apps.get_model('contributions.ReplyKey')
Text = apps.get_model('texts.Text')
id_map = {}
for text in Text.objects.all():
ct = ContributionText.objects.create(text=text.text)
contribution = Contribution.objects.create(
container_type=text.container_type,
container_id=text.container_id,
contribution_id=ct.id,
contribution_type=ContentType.objects.get_for_model(ct),
author=text.author)
contribution.time_created = text.time_created
contribution.save()
id_map[text.id] = contribution.id
for rk in text.replykey_set.all():
ReplyKey.objects.create(contribution=contribution, gestalt=rk.gestalt, key=rk.key)
for text in Text.objects.filter(in_reply_to__isnull=False):
contribution = Contribution.objects.get(id=id_map[text.id])
contribution.in_reply_to_id = id_map[text.in_reply_to_id]
contribution.save()
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('contributions', '0001_initial'),
('texts', '0008_text_in_reply_to'),
]
operations = [
migrations.RunPython(copy_texts),
]
|
|
8370a3419144b63461f4add0003292d7d5d9f03e
|
glitter/blocks/banner/migrations/0004_delete_empty_blocks.py
|
glitter/blocks/banner/migrations/0004_delete_empty_blocks.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def remove_empty_blocks(apps, schema_editor):
BannerBlock = apps.get_model('glitter_banner', 'BannerBlock')
ContentBlock = apps.get_model('glitter', 'ContentBlock')
empty_blocks = BannerBlock.objects.filter(bannerinline=None)
content_block_ids = empty_blocks.values_list('content_block_id', flat=True)
ContentBlock.objects.filter(id__in=content_block_ids).delete()
empty_blocks.delete()
class Migration(migrations.Migration):
dependencies = [
('glitter_banner', '0003_make_banner_link_optional'),
('glitter', '0001_initial'),
]
operations = [
migrations.RunPython(remove_empty_blocks, reverse_code=migrations.RunPython.noop),
]
|
Clean up banner blocks without inlines
|
Clean up banner blocks without inlines
|
Python
|
bsd-3-clause
|
developersociety/django-glitter,developersociety/django-glitter,blancltd/django-glitter,blancltd/django-glitter,blancltd/django-glitter,developersociety/django-glitter
|
Clean up banner blocks without inlines
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def remove_empty_blocks(apps, schema_editor):
BannerBlock = apps.get_model('glitter_banner', 'BannerBlock')
ContentBlock = apps.get_model('glitter', 'ContentBlock')
empty_blocks = BannerBlock.objects.filter(bannerinline=None)
content_block_ids = empty_blocks.values_list('content_block_id', flat=True)
ContentBlock.objects.filter(id__in=content_block_ids).delete()
empty_blocks.delete()
class Migration(migrations.Migration):
dependencies = [
('glitter_banner', '0003_make_banner_link_optional'),
('glitter', '0001_initial'),
]
operations = [
migrations.RunPython(remove_empty_blocks, reverse_code=migrations.RunPython.noop),
]
|
<commit_before><commit_msg>Clean up banner blocks without inlines<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def remove_empty_blocks(apps, schema_editor):
BannerBlock = apps.get_model('glitter_banner', 'BannerBlock')
ContentBlock = apps.get_model('glitter', 'ContentBlock')
empty_blocks = BannerBlock.objects.filter(bannerinline=None)
content_block_ids = empty_blocks.values_list('content_block_id', flat=True)
ContentBlock.objects.filter(id__in=content_block_ids).delete()
empty_blocks.delete()
class Migration(migrations.Migration):
dependencies = [
('glitter_banner', '0003_make_banner_link_optional'),
('glitter', '0001_initial'),
]
operations = [
migrations.RunPython(remove_empty_blocks, reverse_code=migrations.RunPython.noop),
]
|
Clean up banner blocks without inlines# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def remove_empty_blocks(apps, schema_editor):
BannerBlock = apps.get_model('glitter_banner', 'BannerBlock')
ContentBlock = apps.get_model('glitter', 'ContentBlock')
empty_blocks = BannerBlock.objects.filter(bannerinline=None)
content_block_ids = empty_blocks.values_list('content_block_id', flat=True)
ContentBlock.objects.filter(id__in=content_block_ids).delete()
empty_blocks.delete()
class Migration(migrations.Migration):
dependencies = [
('glitter_banner', '0003_make_banner_link_optional'),
('glitter', '0001_initial'),
]
operations = [
migrations.RunPython(remove_empty_blocks, reverse_code=migrations.RunPython.noop),
]
|
<commit_before><commit_msg>Clean up banner blocks without inlines<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def remove_empty_blocks(apps, schema_editor):
BannerBlock = apps.get_model('glitter_banner', 'BannerBlock')
ContentBlock = apps.get_model('glitter', 'ContentBlock')
empty_blocks = BannerBlock.objects.filter(bannerinline=None)
content_block_ids = empty_blocks.values_list('content_block_id', flat=True)
ContentBlock.objects.filter(id__in=content_block_ids).delete()
empty_blocks.delete()
class Migration(migrations.Migration):
dependencies = [
('glitter_banner', '0003_make_banner_link_optional'),
('glitter', '0001_initial'),
]
operations = [
migrations.RunPython(remove_empty_blocks, reverse_code=migrations.RunPython.noop),
]
|
|
c5b4b6b8cea87e382d4b411139d822fbcabe5248
|
thinc/tests/unit/test_hash_embed.py
|
thinc/tests/unit/test_hash_embed.py
|
import pytest
import numpy
from ...neural._classes.hash_embed import HashEmbed
def test_init():
model = HashEmbed(64, 1000)
assert model.nV == 1000
assert model.nO == 64
assert model.vectors.shape == (1000, 64)
#assert model.word_weights.shape == (1000,)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2)
model2 = HashEmbed(64, 1000, seed=1)
arr = numpy.ones((1,), dtype='uint64')
vector1 = model1(arr)
vector2 = model2(arr)
assert vector1.sum() != vector2.sum()
|
Add HashEmbed class and tests
|
Add HashEmbed class and tests
|
Python
|
mit
|
spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc
|
Add HashEmbed class and tests
|
import pytest
import numpy
from ...neural._classes.hash_embed import HashEmbed
def test_init():
model = HashEmbed(64, 1000)
assert model.nV == 1000
assert model.nO == 64
assert model.vectors.shape == (1000, 64)
#assert model.word_weights.shape == (1000,)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2)
model2 = HashEmbed(64, 1000, seed=1)
arr = numpy.ones((1,), dtype='uint64')
vector1 = model1(arr)
vector2 = model2(arr)
assert vector1.sum() != vector2.sum()
|
<commit_before><commit_msg>Add HashEmbed class and tests<commit_after>
|
import pytest
import numpy
from ...neural._classes.hash_embed import HashEmbed
def test_init():
model = HashEmbed(64, 1000)
assert model.nV == 1000
assert model.nO == 64
assert model.vectors.shape == (1000, 64)
#assert model.word_weights.shape == (1000,)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2)
model2 = HashEmbed(64, 1000, seed=1)
arr = numpy.ones((1,), dtype='uint64')
vector1 = model1(arr)
vector2 = model2(arr)
assert vector1.sum() != vector2.sum()
|
Add HashEmbed class and testsimport pytest
import numpy
from ...neural._classes.hash_embed import HashEmbed
def test_init():
model = HashEmbed(64, 1000)
assert model.nV == 1000
assert model.nO == 64
assert model.vectors.shape == (1000, 64)
#assert model.word_weights.shape == (1000,)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2)
model2 = HashEmbed(64, 1000, seed=1)
arr = numpy.ones((1,), dtype='uint64')
vector1 = model1(arr)
vector2 = model2(arr)
assert vector1.sum() != vector2.sum()
|
<commit_before><commit_msg>Add HashEmbed class and tests<commit_after>import pytest
import numpy
from ...neural._classes.hash_embed import HashEmbed
def test_init():
model = HashEmbed(64, 1000)
assert model.nV == 1000
assert model.nO == 64
assert model.vectors.shape == (1000, 64)
#assert model.word_weights.shape == (1000,)
def test_seed_changes_bucket():
model1 = HashEmbed(64, 1000, seed=2)
model2 = HashEmbed(64, 1000, seed=1)
arr = numpy.ones((1,), dtype='uint64')
vector1 = model1(arr)
vector2 = model2(arr)
assert vector1.sum() != vector2.sum()
|
|
107976a8f6fab4e46eeaab0659237380624ee091
|
results/migrations/0018_fix_2015_resultevent_winners.py
|
results/migrations/0018_fix_2015_resultevent_winners.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def fix_2015_winners(apps, schema_editor):
ResultEvent = apps.get_model('results', 'ResultEvent')
Person = apps.get_model('popolo', 'Person')
for re in ResultEvent.objects.filter(election__slug=2015):
# Through some mistake or other, all the winners of the 2015
# were set to the person with ID 1. Find the right person
# instead:
if re.winner.id == 1:
re.winner = Person.objects.get(
memberships__extra__elected=True,
memberships__extra__election=re.election,
memberships__post=re.post_new,
memberships__on_behalf_of=re.winner_party)
re.save()
class Migration(migrations.Migration):
dependencies = [
('results', '0017_rename_post_name_to_old_post_name'),
]
operations = [
migrations.RunPython(fix_2015_winners),
]
|
Fix the 2015 ResultEvent winners
|
Fix the 2015 ResultEvent winners
Due to a bad data migration in the past, all ResultEvent objects for the
2015 general election were associated with the Person with ID 1. This
data migration fixes the winner field on the results.
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
Fix the 2015 ResultEvent winners
Due to a bad data migration in the past, all ResultEvent objects for the
2015 general election were associated with the Person with ID 1. This
data migration fixes the winner field on the results.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def fix_2015_winners(apps, schema_editor):
ResultEvent = apps.get_model('results', 'ResultEvent')
Person = apps.get_model('popolo', 'Person')
for re in ResultEvent.objects.filter(election__slug=2015):
# Through some mistake or other, all the winners of the 2015
# were set to the person with ID 1. Find the right person
# instead:
if re.winner.id == 1:
re.winner = Person.objects.get(
memberships__extra__elected=True,
memberships__extra__election=re.election,
memberships__post=re.post_new,
memberships__on_behalf_of=re.winner_party)
re.save()
class Migration(migrations.Migration):
dependencies = [
('results', '0017_rename_post_name_to_old_post_name'),
]
operations = [
migrations.RunPython(fix_2015_winners),
]
|
<commit_before><commit_msg>Fix the 2015 ResultEvent winners
Due to a bad data migration in the past, all ResultEvent objects for the
2015 general election were associated with the Person with ID 1. This
data migration fixes the winner field on the results.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def fix_2015_winners(apps, schema_editor):
ResultEvent = apps.get_model('results', 'ResultEvent')
Person = apps.get_model('popolo', 'Person')
for re in ResultEvent.objects.filter(election__slug=2015):
# Through some mistake or other, all the winners of the 2015
# were set to the person with ID 1. Find the right person
# instead:
if re.winner.id == 1:
re.winner = Person.objects.get(
memberships__extra__elected=True,
memberships__extra__election=re.election,
memberships__post=re.post_new,
memberships__on_behalf_of=re.winner_party)
re.save()
class Migration(migrations.Migration):
dependencies = [
('results', '0017_rename_post_name_to_old_post_name'),
]
operations = [
migrations.RunPython(fix_2015_winners),
]
|
Fix the 2015 ResultEvent winners
Due to a bad data migration in the past, all ResultEvent objects for the
2015 general election were associated with the Person with ID 1. This
data migration fixes the winner field on the results.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def fix_2015_winners(apps, schema_editor):
ResultEvent = apps.get_model('results', 'ResultEvent')
Person = apps.get_model('popolo', 'Person')
for re in ResultEvent.objects.filter(election__slug=2015):
# Through some mistake or other, all the winners of the 2015
# were set to the person with ID 1. Find the right person
# instead:
if re.winner.id == 1:
re.winner = Person.objects.get(
memberships__extra__elected=True,
memberships__extra__election=re.election,
memberships__post=re.post_new,
memberships__on_behalf_of=re.winner_party)
re.save()
class Migration(migrations.Migration):
dependencies = [
('results', '0017_rename_post_name_to_old_post_name'),
]
operations = [
migrations.RunPython(fix_2015_winners),
]
|
<commit_before><commit_msg>Fix the 2015 ResultEvent winners
Due to a bad data migration in the past, all ResultEvent objects for the
2015 general election were associated with the Person with ID 1. This
data migration fixes the winner field on the results.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def fix_2015_winners(apps, schema_editor):
ResultEvent = apps.get_model('results', 'ResultEvent')
Person = apps.get_model('popolo', 'Person')
for re in ResultEvent.objects.filter(election__slug=2015):
# Through some mistake or other, all the winners of the 2015
# were set to the person with ID 1. Find the right person
# instead:
if re.winner.id == 1:
re.winner = Person.objects.get(
memberships__extra__elected=True,
memberships__extra__election=re.election,
memberships__post=re.post_new,
memberships__on_behalf_of=re.winner_party)
re.save()
class Migration(migrations.Migration):
dependencies = [
('results', '0017_rename_post_name_to_old_post_name'),
]
operations = [
migrations.RunPython(fix_2015_winners),
]
|
|
8299d4bf1705571ad789459c07abbd32733c8a18
|
tests/test_geant4_compat.py
|
tests/test_geant4_compat.py
|
import sys
import os
sys.path = [os.path.join(os.path.dirname(__file__), "..")] + sys.path
from physt import h1, h2, histogramdd
from physt.compat import geant4
import numpy as np
import pytest
class TestGeant4Compat(object):
def test_read_h1(self):
path = os.path.join(os.path.dirname(__file__), "data/geant-h1.csv")
h = geant4.load_csv(path)
assert h.shape == (100,)
assert h.ndim == 1
assert h.name == "Edep in absorber"
assert h.total == 10000
def test_read_h2(self):
path = os.path.join(os.path.dirname(__file__), "data/geant-h2.csv")
h = geant4.load_csv(path)
assert h.ndim == 2
assert h.shape == (50,50)
assert h.name == "Drift Chamber 1 X vs Y"
assert h.total == 292
|
Test for Geant4 (better ones for 2D would be nice)
|
Test for Geant4 (better ones for 2D would be nice)
|
Python
|
mit
|
janpipek/physt
|
Test for Geant4 (better ones for 2D would be nice)
|
import sys
import os
sys.path = [os.path.join(os.path.dirname(__file__), "..")] + sys.path
from physt import h1, h2, histogramdd
from physt.compat import geant4
import numpy as np
import pytest
class TestGeant4Compat(object):
def test_read_h1(self):
path = os.path.join(os.path.dirname(__file__), "data/geant-h1.csv")
h = geant4.load_csv(path)
assert h.shape == (100,)
assert h.ndim == 1
assert h.name == "Edep in absorber"
assert h.total == 10000
def test_read_h2(self):
path = os.path.join(os.path.dirname(__file__), "data/geant-h2.csv")
h = geant4.load_csv(path)
assert h.ndim == 2
assert h.shape == (50,50)
assert h.name == "Drift Chamber 1 X vs Y"
assert h.total == 292
|
<commit_before><commit_msg>Test for Geant4 (better ones for 2D would be nice)<commit_after>
|
import sys
import os
sys.path = [os.path.join(os.path.dirname(__file__), "..")] + sys.path
from physt import h1, h2, histogramdd
from physt.compat import geant4
import numpy as np
import pytest
class TestGeant4Compat(object):
def test_read_h1(self):
path = os.path.join(os.path.dirname(__file__), "data/geant-h1.csv")
h = geant4.load_csv(path)
assert h.shape == (100,)
assert h.ndim == 1
assert h.name == "Edep in absorber"
assert h.total == 10000
def test_read_h2(self):
path = os.path.join(os.path.dirname(__file__), "data/geant-h2.csv")
h = geant4.load_csv(path)
assert h.ndim == 2
assert h.shape == (50,50)
assert h.name == "Drift Chamber 1 X vs Y"
assert h.total == 292
|
Test for Geant4 (better ones for 2D would be nice)import sys
import os
sys.path = [os.path.join(os.path.dirname(__file__), "..")] + sys.path
from physt import h1, h2, histogramdd
from physt.compat import geant4
import numpy as np
import pytest
class TestGeant4Compat(object):
def test_read_h1(self):
path = os.path.join(os.path.dirname(__file__), "data/geant-h1.csv")
h = geant4.load_csv(path)
assert h.shape == (100,)
assert h.ndim == 1
assert h.name == "Edep in absorber"
assert h.total == 10000
def test_read_h2(self):
path = os.path.join(os.path.dirname(__file__), "data/geant-h2.csv")
h = geant4.load_csv(path)
assert h.ndim == 2
assert h.shape == (50,50)
assert h.name == "Drift Chamber 1 X vs Y"
assert h.total == 292
|
<commit_before><commit_msg>Test for Geant4 (better ones for 2D would be nice)<commit_after>import sys
import os
sys.path = [os.path.join(os.path.dirname(__file__), "..")] + sys.path
from physt import h1, h2, histogramdd
from physt.compat import geant4
import numpy as np
import pytest
class TestGeant4Compat(object):
def test_read_h1(self):
path = os.path.join(os.path.dirname(__file__), "data/geant-h1.csv")
h = geant4.load_csv(path)
assert h.shape == (100,)
assert h.ndim == 1
assert h.name == "Edep in absorber"
assert h.total == 10000
def test_read_h2(self):
path = os.path.join(os.path.dirname(__file__), "data/geant-h2.csv")
h = geant4.load_csv(path)
assert h.ndim == 2
assert h.shape == (50,50)
assert h.name == "Drift Chamber 1 X vs Y"
assert h.total == 292
|
|
39615b2da86743565f80ce99fa92de16b9500e3e
|
taiga/projects/migrations/0044_auto_20160531_1150.py
|
taiga/projects/migrations/0044_auto_20160531_1150.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-05-31 11:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0043_auto_20160530_1004'),
]
operations = [
migrations.AlterField(
model_name='project',
name='blocked_code',
field=models.CharField(blank=True, choices=[('blocked-by-nonpayment', 'This project is blocked due to payment failure'), ('blocked-by-staff', 'This project is blocked by admin staff'), ('blocked-by-owner-leaving', 'This project is blocked because the owner left'), ('blocked-by-deleting', "This project is blocked while it's deleted")], default=None, max_length=255, null=True, verbose_name='blocked code'),
),
]
|
Fix migrations between master and stable
|
[Backport] Fix migrations between master and stable
|
Python
|
agpl-3.0
|
taigaio/taiga-back,xdevelsistemas/taiga-back-community,xdevelsistemas/taiga-back-community,dayatz/taiga-back,taigaio/taiga-back,dayatz/taiga-back,xdevelsistemas/taiga-back-community,dayatz/taiga-back,taigaio/taiga-back
|
[Backport] Fix migrations between master and stable
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-05-31 11:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0043_auto_20160530_1004'),
]
operations = [
migrations.AlterField(
model_name='project',
name='blocked_code',
field=models.CharField(blank=True, choices=[('blocked-by-nonpayment', 'This project is blocked due to payment failure'), ('blocked-by-staff', 'This project is blocked by admin staff'), ('blocked-by-owner-leaving', 'This project is blocked because the owner left'), ('blocked-by-deleting', "This project is blocked while it's deleted")], default=None, max_length=255, null=True, verbose_name='blocked code'),
),
]
|
<commit_before><commit_msg>[Backport] Fix migrations between master and stable<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-05-31 11:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0043_auto_20160530_1004'),
]
operations = [
migrations.AlterField(
model_name='project',
name='blocked_code',
field=models.CharField(blank=True, choices=[('blocked-by-nonpayment', 'This project is blocked due to payment failure'), ('blocked-by-staff', 'This project is blocked by admin staff'), ('blocked-by-owner-leaving', 'This project is blocked because the owner left'), ('blocked-by-deleting', "This project is blocked while it's deleted")], default=None, max_length=255, null=True, verbose_name='blocked code'),
),
]
|
[Backport] Fix migrations between master and stable# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-05-31 11:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0043_auto_20160530_1004'),
]
operations = [
migrations.AlterField(
model_name='project',
name='blocked_code',
field=models.CharField(blank=True, choices=[('blocked-by-nonpayment', 'This project is blocked due to payment failure'), ('blocked-by-staff', 'This project is blocked by admin staff'), ('blocked-by-owner-leaving', 'This project is blocked because the owner left'), ('blocked-by-deleting', "This project is blocked while it's deleted")], default=None, max_length=255, null=True, verbose_name='blocked code'),
),
]
|
<commit_before><commit_msg>[Backport] Fix migrations between master and stable<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-05-31 11:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0043_auto_20160530_1004'),
]
operations = [
migrations.AlterField(
model_name='project',
name='blocked_code',
field=models.CharField(blank=True, choices=[('blocked-by-nonpayment', 'This project is blocked due to payment failure'), ('blocked-by-staff', 'This project is blocked by admin staff'), ('blocked-by-owner-leaving', 'This project is blocked because the owner left'), ('blocked-by-deleting', "This project is blocked while it's deleted")], default=None, max_length=255, null=True, verbose_name='blocked code'),
),
]
|
|
aceba92f510b367ffe17c720e602e767e0bb4457
|
Infinite_while_example.py
|
Infinite_while_example.py
|
#!/usr/bin/env python3
start = int(input("Enter the starting index: "))
end = int(input("Enter the last index: "))
if start > end:
print("Invalid starting and last index !")
else:
print("The numbers from ",start,"till ",end,"are:")
#Infinite While loop case
while(True):
if start > end:
break
if start <= end:
print(start)
start = start + 1
|
Add solution to Infinite While loop
|
Add solution to Infinite While loop
|
Python
|
mit
|
khusi-anu/dgplug-python
|
Add solution to Infinite While loop
|
#!/usr/bin/env python3
start = int(input("Enter the starting index: "))
end = int(input("Enter the last index: "))
if start > end:
print("Invalid starting and last index !")
else:
print("The numbers from ",start,"till ",end,"are:")
#Infinite While loop case
while(True):
if start > end:
break
if start <= end:
print(start)
start = start + 1
|
<commit_before><commit_msg>Add solution to Infinite While loop<commit_after>
|
#!/usr/bin/env python3
start = int(input("Enter the starting index: "))
end = int(input("Enter the last index: "))
if start > end:
print("Invalid starting and last index !")
else:
print("The numbers from ",start,"till ",end,"are:")
#Infinite While loop case
while(True):
if start > end:
break
if start <= end:
print(start)
start = start + 1
|
Add solution to Infinite While loop#!/usr/bin/env python3
start = int(input("Enter the starting index: "))
end = int(input("Enter the last index: "))
if start > end:
print("Invalid starting and last index !")
else:
print("The numbers from ",start,"till ",end,"are:")
#Infinite While loop case
while(True):
if start > end:
break
if start <= end:
print(start)
start = start + 1
|
<commit_before><commit_msg>Add solution to Infinite While loop<commit_after>#!/usr/bin/env python3
start = int(input("Enter the starting index: "))
end = int(input("Enter the last index: "))
if start > end:
print("Invalid starting and last index !")
else:
print("The numbers from ",start,"till ",end,"are:")
#Infinite While loop case
while(True):
if start > end:
break
if start <= end:
print(start)
start = start + 1
|
|
2cda4506be6b39442607eb24e0ec2ee7f11fcdc0
|
teami18n/teami18n/migrations/0002_populate_countries.py
|
teami18n/teami18n/migrations/0002_populate_countries.py
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django_countries import countries
from ..models import Country
class Migration(DataMigration):
def forwards(self, orm):
for code, __ in countries:
country = Country(code=code)
country.save()
def backwards(self, orm):
pass
models = {
u'teami18n.country': {
'Meta': {'object_name': 'Country'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['teami18n']
symmetrical = True
|
Add migration to create all country objects
|
Add migration to create all country objects
|
Python
|
mit
|
team-i18n/hackaway,team-i18n/hackaway,team-i18n/hackaway
|
Add migration to create all country objects
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django_countries import countries
from ..models import Country
class Migration(DataMigration):
def forwards(self, orm):
for code, __ in countries:
country = Country(code=code)
country.save()
def backwards(self, orm):
pass
models = {
u'teami18n.country': {
'Meta': {'object_name': 'Country'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['teami18n']
symmetrical = True
|
<commit_before><commit_msg>Add migration to create all country objects<commit_after>
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django_countries import countries
from ..models import Country
class Migration(DataMigration):
def forwards(self, orm):
for code, __ in countries:
country = Country(code=code)
country.save()
def backwards(self, orm):
pass
models = {
u'teami18n.country': {
'Meta': {'object_name': 'Country'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['teami18n']
symmetrical = True
|
Add migration to create all country objects# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django_countries import countries
from ..models import Country
class Migration(DataMigration):
def forwards(self, orm):
for code, __ in countries:
country = Country(code=code)
country.save()
def backwards(self, orm):
pass
models = {
u'teami18n.country': {
'Meta': {'object_name': 'Country'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['teami18n']
symmetrical = True
|
<commit_before><commit_msg>Add migration to create all country objects<commit_after># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django_countries import countries
from ..models import Country
class Migration(DataMigration):
def forwards(self, orm):
for code, __ in countries:
country = Country(code=code)
country.save()
def backwards(self, orm):
pass
models = {
u'teami18n.country': {
'Meta': {'object_name': 'Country'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['teami18n']
symmetrical = True
|
|
2ad125802c3508be830f860154ad48a2153fe898
|
warehouse/database/utils.py
|
warehouse/database/utils.py
|
def table_args(args):
def wrapper(cls):
targs = args
bases = cls.__mro__[1:]
for base in bases:
if hasattr(base, "__table_args__"):
targs = targs + base.__table_args__
return targs
return wrapper
|
Add utility function for grabbing __table_args__
|
Add utility function for grabbing __table_args__
|
Python
|
bsd-2-clause
|
davidfischer/warehouse
|
Add utility function for grabbing __table_args__
|
def table_args(args):
def wrapper(cls):
targs = args
bases = cls.__mro__[1:]
for base in bases:
if hasattr(base, "__table_args__"):
targs = targs + base.__table_args__
return targs
return wrapper
|
<commit_before><commit_msg>Add utility function for grabbing __table_args__<commit_after>
|
def table_args(args):
def wrapper(cls):
targs = args
bases = cls.__mro__[1:]
for base in bases:
if hasattr(base, "__table_args__"):
targs = targs + base.__table_args__
return targs
return wrapper
|
Add utility function for grabbing __table_args__def table_args(args):
def wrapper(cls):
targs = args
bases = cls.__mro__[1:]
for base in bases:
if hasattr(base, "__table_args__"):
targs = targs + base.__table_args__
return targs
return wrapper
|
<commit_before><commit_msg>Add utility function for grabbing __table_args__<commit_after>def table_args(args):
def wrapper(cls):
targs = args
bases = cls.__mro__[1:]
for base in bases:
if hasattr(base, "__table_args__"):
targs = targs + base.__table_args__
return targs
return wrapper
|
|
52e650a9181ce1e8bd8a2c0b1281f81bf6747874
|
calvin/actorstore/systemactors/std/ClassicDelay.py
|
calvin/actorstore/systemactors/std/ClassicDelay.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.actor.actor import Actor, ActionResult, manage, condition, guard
class ClassicDelay(Actor):
"""
After first token, pass on token once every 'delay' seconds.
Input :
token: anything
Outputs:
token: anything
"""
@manage(['delay'])
def init(self, delay=0.1):
self.delay = delay
self.use('calvinsys.events.timer', shorthand='timer')
self.timer = None
def setup(self):
self.timer = self['timer'].repeat(self.delay)
def will_migrate(self):
self.timer.cancel()
def did_migrate(self):
self.setup()
@condition(['token'], ['token'])
@guard(lambda self, _: not self.timer)
def start_timer(self, token):
self.setup()
return ActionResult(production=(token, ))
@condition(['token'], ['token'])
@guard(lambda self, _: self.timer and self.timer.triggered)
def passthrough(self, token):
self.timer.ack()
return ActionResult(production=(token, ))
action_priority = (start_timer, passthrough)
requires = ['calvinsys.events.timer']
|
Add actor with behavior similar to old-style Delay
|
Add actor with behavior similar to old-style Delay
|
Python
|
apache-2.0
|
EricssonResearch/calvin-base,les69/calvin-base,les69/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base,les69/calvin-base,EricssonResearch/calvin-base,les69/calvin-base
|
Add actor with behavior similar to old-style Delay
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.actor.actor import Actor, ActionResult, manage, condition, guard
class ClassicDelay(Actor):
"""
After first token, pass on token once every 'delay' seconds.
Input :
token: anything
Outputs:
token: anything
"""
@manage(['delay'])
def init(self, delay=0.1):
self.delay = delay
self.use('calvinsys.events.timer', shorthand='timer')
self.timer = None
def setup(self):
self.timer = self['timer'].repeat(self.delay)
def will_migrate(self):
self.timer.cancel()
def did_migrate(self):
self.setup()
@condition(['token'], ['token'])
@guard(lambda self, _: not self.timer)
def start_timer(self, token):
self.setup()
return ActionResult(production=(token, ))
@condition(['token'], ['token'])
@guard(lambda self, _: self.timer and self.timer.triggered)
def passthrough(self, token):
self.timer.ack()
return ActionResult(production=(token, ))
action_priority = (start_timer, passthrough)
requires = ['calvinsys.events.timer']
|
<commit_before><commit_msg>Add actor with behavior similar to old-style Delay<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.actor.actor import Actor, ActionResult, manage, condition, guard
class ClassicDelay(Actor):
"""
After first token, pass on token once every 'delay' seconds.
Input :
token: anything
Outputs:
token: anything
"""
@manage(['delay'])
def init(self, delay=0.1):
self.delay = delay
self.use('calvinsys.events.timer', shorthand='timer')
self.timer = None
def setup(self):
self.timer = self['timer'].repeat(self.delay)
def will_migrate(self):
self.timer.cancel()
def did_migrate(self):
self.setup()
@condition(['token'], ['token'])
@guard(lambda self, _: not self.timer)
def start_timer(self, token):
self.setup()
return ActionResult(production=(token, ))
@condition(['token'], ['token'])
@guard(lambda self, _: self.timer and self.timer.triggered)
def passthrough(self, token):
self.timer.ack()
return ActionResult(production=(token, ))
action_priority = (start_timer, passthrough)
requires = ['calvinsys.events.timer']
|
Add actor with behavior similar to old-style Delay# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.actor.actor import Actor, ActionResult, manage, condition, guard
class ClassicDelay(Actor):
"""
After first token, pass on token once every 'delay' seconds.
Input :
token: anything
Outputs:
token: anything
"""
@manage(['delay'])
def init(self, delay=0.1):
self.delay = delay
self.use('calvinsys.events.timer', shorthand='timer')
self.timer = None
def setup(self):
self.timer = self['timer'].repeat(self.delay)
def will_migrate(self):
self.timer.cancel()
def did_migrate(self):
self.setup()
@condition(['token'], ['token'])
@guard(lambda self, _: not self.timer)
def start_timer(self, token):
self.setup()
return ActionResult(production=(token, ))
@condition(['token'], ['token'])
@guard(lambda self, _: self.timer and self.timer.triggered)
def passthrough(self, token):
self.timer.ack()
return ActionResult(production=(token, ))
action_priority = (start_timer, passthrough)
requires = ['calvinsys.events.timer']
|
<commit_before><commit_msg>Add actor with behavior similar to old-style Delay<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.actor.actor import Actor, ActionResult, manage, condition, guard
class ClassicDelay(Actor):
"""
After first token, pass on token once every 'delay' seconds.
Input :
token: anything
Outputs:
token: anything
"""
@manage(['delay'])
def init(self, delay=0.1):
self.delay = delay
self.use('calvinsys.events.timer', shorthand='timer')
self.timer = None
def setup(self):
self.timer = self['timer'].repeat(self.delay)
def will_migrate(self):
self.timer.cancel()
def did_migrate(self):
self.setup()
@condition(['token'], ['token'])
@guard(lambda self, _: not self.timer)
def start_timer(self, token):
self.setup()
return ActionResult(production=(token, ))
@condition(['token'], ['token'])
@guard(lambda self, _: self.timer and self.timer.triggered)
def passthrough(self, token):
self.timer.ack()
return ActionResult(production=(token, ))
action_priority = (start_timer, passthrough)
requires = ['calvinsys.events.timer']
|
|
8206cded6a14dca1b7353add337b5a88e5e05422
|
Python/cali_first_guess.py
|
Python/cali_first_guess.py
|
#!/usr/bin/env python
# Usage: cali_first_guess [shift] [slope] [sigma] [multiple] [offset]
# Units are: [Ang.] [???] [km/s?] [None] [Norm.]
# Reasonable defaults: 0.001 -0.002 3.0 1.37 0.002
import sys
import json
#print 'Number of arguments:', len(sys.argv), 'arguments.'
#print 'Argument List:', str(sys.argv)
first_guesses = {}
first_guesses.update({'shift':sys.argv[1],
'fix_shift':False,
'limit_shift':(-1.5, 1.5),
'error_shift':0.03})
first_guesses.update({'slope':sys.argv[2],
'fix_slope':False,
'limit_slope':(-2.0, 2.0),
'error_slope':0.04})
first_guesses.update({'sigma':sys.argv[3],
'fix_sigma':False,
'limit_sigma':(1.0, 10.0),
'error_sigma':0.2})
first_guesses.update({'multiple':sys.argv[4],
'fix_multiple':False,
'limit_multiple':(0.1, 20.0),
'error_multiple':0.03})
first_guesses.update({'offset':sys.argv[5],
'fix_offset':False,
'limit_offset':(-2.0, 2.0),
'error_offset':0.03})
first_guesses.update({'minuit':0, 'fix_minuit':True})
with open("first_guesses.json", 'w') as file_handle:
json.dump(first_guesses, file_handle, indent=2, sort_keys=True)
|
Add python script for creating first guesses.
|
Add python script for creating first guesses.
|
Python
|
mit
|
jbwhit/CaliCompari
|
Add python script for creating first guesses.
|
#!/usr/bin/env python
# Usage: cali_first_guess [shift] [slope] [sigma] [multiple] [offset]
# Units are: [Ang.] [???] [km/s?] [None] [Norm.]
# Reasonable defaults: 0.001 -0.002 3.0 1.37 0.002
import sys
import json
#print 'Number of arguments:', len(sys.argv), 'arguments.'
#print 'Argument List:', str(sys.argv)
first_guesses = {}
first_guesses.update({'shift':sys.argv[1],
'fix_shift':False,
'limit_shift':(-1.5, 1.5),
'error_shift':0.03})
first_guesses.update({'slope':sys.argv[2],
'fix_slope':False,
'limit_slope':(-2.0, 2.0),
'error_slope':0.04})
first_guesses.update({'sigma':sys.argv[3],
'fix_sigma':False,
'limit_sigma':(1.0, 10.0),
'error_sigma':0.2})
first_guesses.update({'multiple':sys.argv[4],
'fix_multiple':False,
'limit_multiple':(0.1, 20.0),
'error_multiple':0.03})
first_guesses.update({'offset':sys.argv[5],
'fix_offset':False,
'limit_offset':(-2.0, 2.0),
'error_offset':0.03})
first_guesses.update({'minuit':0, 'fix_minuit':True})
with open("first_guesses.json", 'w') as file_handle:
json.dump(first_guesses, file_handle, indent=2, sort_keys=True)
|
<commit_before><commit_msg>Add python script for creating first guesses.<commit_after>
|
#!/usr/bin/env python
# Usage: cali_first_guess [shift] [slope] [sigma] [multiple] [offset]
# Units are: [Ang.] [???] [km/s?] [None] [Norm.]
# Reasonable defaults: 0.001 -0.002 3.0 1.37 0.002
import sys
import json
#print 'Number of arguments:', len(sys.argv), 'arguments.'
#print 'Argument List:', str(sys.argv)
first_guesses = {}
first_guesses.update({'shift':sys.argv[1],
'fix_shift':False,
'limit_shift':(-1.5, 1.5),
'error_shift':0.03})
first_guesses.update({'slope':sys.argv[2],
'fix_slope':False,
'limit_slope':(-2.0, 2.0),
'error_slope':0.04})
first_guesses.update({'sigma':sys.argv[3],
'fix_sigma':False,
'limit_sigma':(1.0, 10.0),
'error_sigma':0.2})
first_guesses.update({'multiple':sys.argv[4],
'fix_multiple':False,
'limit_multiple':(0.1, 20.0),
'error_multiple':0.03})
first_guesses.update({'offset':sys.argv[5],
'fix_offset':False,
'limit_offset':(-2.0, 2.0),
'error_offset':0.03})
first_guesses.update({'minuit':0, 'fix_minuit':True})
with open("first_guesses.json", 'w') as file_handle:
json.dump(first_guesses, file_handle, indent=2, sort_keys=True)
|
Add python script for creating first guesses.#!/usr/bin/env python
# Usage: cali_first_guess [shift] [slope] [sigma] [multiple] [offset]
# Units are: [Ang.] [???] [km/s?] [None] [Norm.]
# Reasonable defaults: 0.001 -0.002 3.0 1.37 0.002
import sys
import json
#print 'Number of arguments:', len(sys.argv), 'arguments.'
#print 'Argument List:', str(sys.argv)
first_guesses = {}
first_guesses.update({'shift':sys.argv[1],
'fix_shift':False,
'limit_shift':(-1.5, 1.5),
'error_shift':0.03})
first_guesses.update({'slope':sys.argv[2],
'fix_slope':False,
'limit_slope':(-2.0, 2.0),
'error_slope':0.04})
first_guesses.update({'sigma':sys.argv[3],
'fix_sigma':False,
'limit_sigma':(1.0, 10.0),
'error_sigma':0.2})
first_guesses.update({'multiple':sys.argv[4],
'fix_multiple':False,
'limit_multiple':(0.1, 20.0),
'error_multiple':0.03})
first_guesses.update({'offset':sys.argv[5],
'fix_offset':False,
'limit_offset':(-2.0, 2.0),
'error_offset':0.03})
first_guesses.update({'minuit':0, 'fix_minuit':True})
with open("first_guesses.json", 'w') as file_handle:
json.dump(first_guesses, file_handle, indent=2, sort_keys=True)
|
<commit_before><commit_msg>Add python script for creating first guesses.<commit_after>#!/usr/bin/env python
# Usage: cali_first_guess [shift] [slope] [sigma] [multiple] [offset]
# Units are: [Ang.] [???] [km/s?] [None] [Norm.]
# Reasonable defaults: 0.001 -0.002 3.0 1.37 0.002
import sys
import json
#print 'Number of arguments:', len(sys.argv), 'arguments.'
#print 'Argument List:', str(sys.argv)
first_guesses = {}
first_guesses.update({'shift':sys.argv[1],
'fix_shift':False,
'limit_shift':(-1.5, 1.5),
'error_shift':0.03})
first_guesses.update({'slope':sys.argv[2],
'fix_slope':False,
'limit_slope':(-2.0, 2.0),
'error_slope':0.04})
first_guesses.update({'sigma':sys.argv[3],
'fix_sigma':False,
'limit_sigma':(1.0, 10.0),
'error_sigma':0.2})
first_guesses.update({'multiple':sys.argv[4],
'fix_multiple':False,
'limit_multiple':(0.1, 20.0),
'error_multiple':0.03})
first_guesses.update({'offset':sys.argv[5],
'fix_offset':False,
'limit_offset':(-2.0, 2.0),
'error_offset':0.03})
first_guesses.update({'minuit':0, 'fix_minuit':True})
with open("first_guesses.json", 'w') as file_handle:
json.dump(first_guesses, file_handle, indent=2, sort_keys=True)
|
|
51fa226d7b1d7d70d42127f1ff1f80848a9a6366
|
py/desimodel/test/test_seeing.py
|
py/desimodel/test/test_seeing.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""Test desimodel.seeing.
"""
from __future__ import print_function, division
import unittest
import numpy as np
from ..seeing import relative_probability, sample
class TestSeeing(unittest.TestCase):
"""Test desimodel.seeing.
"""
def test_pdf_norm(self):
"""Test that PDF is normalized and has expected mean.
"""
fwhm = np.linspace(0., 10., 200)
pdf = relative_probability(fwhm)
self.assertAlmostEqual(pdf.sum(), 1.)
# Calculate mean seeing.
mean = (fwhm * pdf).sum() / pdf.sum()
self.assertAlmostEqual(mean, 1.726, places=3)
def test_samples(self):
"""Test that samples have expected PDF.
"""
# Histogram 1M generated samples.
samples = sample(1000000, seed=123)
bin_edges = np.linspace(0., 10., 51)
bin_prob, _ = np.histogram(samples, bin_edges, density=True)
bin_prob *= bin_edges[1]
# Calculate the expected number of samples in each bin.
fwhm = np.linspace(0., 10., 10 * len(bin_prob) + 1)
fwhm_midpt = 0.5 * (fwhm[:-1] + fwhm[1:])
pdf = relative_probability(fwhm_midpt)
expected = pdf.reshape(len(bin_prob), -1).sum(axis=1)
# Check for expected bin counts.
self.assertTrue(np.allclose(bin_prob, expected, rtol=1e-2, atol=1e-2))
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
Add unit test for seeing module
|
Add unit test for seeing module
|
Python
|
bsd-3-clause
|
desihub/desimodel,desihub/desimodel
|
Add unit test for seeing module
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""Test desimodel.seeing.
"""
from __future__ import print_function, division
import unittest
import numpy as np
from ..seeing import relative_probability, sample
class TestSeeing(unittest.TestCase):
"""Test desimodel.seeing.
"""
def test_pdf_norm(self):
"""Test that PDF is normalized and has expected mean.
"""
fwhm = np.linspace(0., 10., 200)
pdf = relative_probability(fwhm)
self.assertAlmostEqual(pdf.sum(), 1.)
# Calculate mean seeing.
mean = (fwhm * pdf).sum() / pdf.sum()
self.assertAlmostEqual(mean, 1.726, places=3)
def test_samples(self):
"""Test that samples have expected PDF.
"""
# Histogram 1M generated samples.
samples = sample(1000000, seed=123)
bin_edges = np.linspace(0., 10., 51)
bin_prob, _ = np.histogram(samples, bin_edges, density=True)
bin_prob *= bin_edges[1]
# Calculate the expected number of samples in each bin.
fwhm = np.linspace(0., 10., 10 * len(bin_prob) + 1)
fwhm_midpt = 0.5 * (fwhm[:-1] + fwhm[1:])
pdf = relative_probability(fwhm_midpt)
expected = pdf.reshape(len(bin_prob), -1).sum(axis=1)
# Check for expected bin counts.
self.assertTrue(np.allclose(bin_prob, expected, rtol=1e-2, atol=1e-2))
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
<commit_before><commit_msg>Add unit test for seeing module<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""Test desimodel.seeing.
"""
from __future__ import print_function, division
import unittest
import numpy as np
from ..seeing import relative_probability, sample
class TestSeeing(unittest.TestCase):
"""Test desimodel.seeing.
"""
def test_pdf_norm(self):
"""Test that PDF is normalized and has expected mean.
"""
fwhm = np.linspace(0., 10., 200)
pdf = relative_probability(fwhm)
self.assertAlmostEqual(pdf.sum(), 1.)
# Calculate mean seeing.
mean = (fwhm * pdf).sum() / pdf.sum()
self.assertAlmostEqual(mean, 1.726, places=3)
def test_samples(self):
"""Test that samples have expected PDF.
"""
# Histogram 1M generated samples.
samples = sample(1000000, seed=123)
bin_edges = np.linspace(0., 10., 51)
bin_prob, _ = np.histogram(samples, bin_edges, density=True)
bin_prob *= bin_edges[1]
# Calculate the expected number of samples in each bin.
fwhm = np.linspace(0., 10., 10 * len(bin_prob) + 1)
fwhm_midpt = 0.5 * (fwhm[:-1] + fwhm[1:])
pdf = relative_probability(fwhm_midpt)
expected = pdf.reshape(len(bin_prob), -1).sum(axis=1)
# Check for expected bin counts.
self.assertTrue(np.allclose(bin_prob, expected, rtol=1e-2, atol=1e-2))
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
Add unit test for seeing module# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""Test desimodel.seeing.
"""
from __future__ import print_function, division
import unittest
import numpy as np
from ..seeing import relative_probability, sample
class TestSeeing(unittest.TestCase):
"""Test desimodel.seeing.
"""
def test_pdf_norm(self):
"""Test that PDF is normalized and has expected mean.
"""
fwhm = np.linspace(0., 10., 200)
pdf = relative_probability(fwhm)
self.assertAlmostEqual(pdf.sum(), 1.)
# Calculate mean seeing.
mean = (fwhm * pdf).sum() / pdf.sum()
self.assertAlmostEqual(mean, 1.726, places=3)
def test_samples(self):
"""Test that samples have expected PDF.
"""
# Histogram 1M generated samples.
samples = sample(1000000, seed=123)
bin_edges = np.linspace(0., 10., 51)
bin_prob, _ = np.histogram(samples, bin_edges, density=True)
bin_prob *= bin_edges[1]
# Calculate the expected number of samples in each bin.
fwhm = np.linspace(0., 10., 10 * len(bin_prob) + 1)
fwhm_midpt = 0.5 * (fwhm[:-1] + fwhm[1:])
pdf = relative_probability(fwhm_midpt)
expected = pdf.reshape(len(bin_prob), -1).sum(axis=1)
# Check for expected bin counts.
self.assertTrue(np.allclose(bin_prob, expected, rtol=1e-2, atol=1e-2))
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
<commit_before><commit_msg>Add unit test for seeing module<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""Test desimodel.seeing.
"""
from __future__ import print_function, division
import unittest
import numpy as np
from ..seeing import relative_probability, sample
class TestSeeing(unittest.TestCase):
"""Test desimodel.seeing.
"""
def test_pdf_norm(self):
"""Test that PDF is normalized and has expected mean.
"""
fwhm = np.linspace(0., 10., 200)
pdf = relative_probability(fwhm)
self.assertAlmostEqual(pdf.sum(), 1.)
# Calculate mean seeing.
mean = (fwhm * pdf).sum() / pdf.sum()
self.assertAlmostEqual(mean, 1.726, places=3)
def test_samples(self):
"""Test that samples have expected PDF.
"""
# Histogram 1M generated samples.
samples = sample(1000000, seed=123)
bin_edges = np.linspace(0., 10., 51)
bin_prob, _ = np.histogram(samples, bin_edges, density=True)
bin_prob *= bin_edges[1]
# Calculate the expected number of samples in each bin.
fwhm = np.linspace(0., 10., 10 * len(bin_prob) + 1)
fwhm_midpt = 0.5 * (fwhm[:-1] + fwhm[1:])
pdf = relative_probability(fwhm_midpt)
expected = pdf.reshape(len(bin_prob), -1).sum(axis=1)
# Check for expected bin counts.
self.assertTrue(np.allclose(bin_prob, expected, rtol=1e-2, atol=1e-2))
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
|
91736e7a7cc2510bb2c9a7a6c7930ea30d9be388
|
py/intersection-of-two-arrays.py
|
py/intersection-of-two-arrays.py
|
class Solution(object):
def intersection(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
return list(set(nums1) & set(nums2))
|
Add py solution for 349. Intersection of Two Arrays
|
Add py solution for 349. Intersection of Two Arrays
349. Intersection of Two Arrays: https://leetcode.com/problems/intersection-of-two-arrays/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 349. Intersection of Two Arrays
349. Intersection of Two Arrays: https://leetcode.com/problems/intersection-of-two-arrays/
|
class Solution(object):
def intersection(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
return list(set(nums1) & set(nums2))
|
<commit_before><commit_msg>Add py solution for 349. Intersection of Two Arrays
349. Intersection of Two Arrays: https://leetcode.com/problems/intersection-of-two-arrays/<commit_after>
|
class Solution(object):
def intersection(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
return list(set(nums1) & set(nums2))
|
Add py solution for 349. Intersection of Two Arrays
349. Intersection of Two Arrays: https://leetcode.com/problems/intersection-of-two-arrays/class Solution(object):
def intersection(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
return list(set(nums1) & set(nums2))
|
<commit_before><commit_msg>Add py solution for 349. Intersection of Two Arrays
349. Intersection of Two Arrays: https://leetcode.com/problems/intersection-of-two-arrays/<commit_after>class Solution(object):
def intersection(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
return list(set(nums1) & set(nums2))
|
|
ed8bf4ce4c8901af093e494cb6811a1ccf4660ba
|
website/tests/test_blog.py
|
website/tests/test_blog.py
|
##
# Copyright (C) 2014 Jessica Tallon & Matt Molyneaux
#
# This file is part of Inboxen.
#
# Inboxen is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Inboxen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Inboxen. If not, see <http://www.gnu.org/licenses/>.
##
from django import test
from django.contrib.auth import get_user_model
from django.core import urlresolvers
from inboxen import models
BODY = """
Hey there!
==========
This is a test post:
* A list item
* And another
Bye!
"""
class BlogTestCase(test.TestCase):
fixtures = ['inboxen_testdata.json']
def test_blog_index(self):
url = urlresolvers.reverse("blog")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_blog_post(self):
user = get_user_model().objects.get(username="isdabizda")
user.is_staff = True
user.save()
login = self.client.login(username=user.username, password="123456")
if not login:
raise Exception("Could not log in")
params = {"title": "A Test Post", "body": BODY}
response = self.client.post(urlresolvers.reverse("blog-post-add"), params)
self.assertRedirects(response, urlresolvers.reverse("blog"))
post = models.BlogPost.objects.latest("date")
self.assertEqual(post.subject, "A Test Post")
self.assertEqual(post.body, BODY)
|
Test blog rendering and posting
|
Test blog rendering and posting
|
Python
|
agpl-3.0
|
Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen
|
Test blog rendering and posting
|
##
# Copyright (C) 2014 Jessica Tallon & Matt Molyneaux
#
# This file is part of Inboxen.
#
# Inboxen is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Inboxen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Inboxen. If not, see <http://www.gnu.org/licenses/>.
##
from django import test
from django.contrib.auth import get_user_model
from django.core import urlresolvers
from inboxen import models
BODY = """
Hey there!
==========
This is a test post:
* A list item
* And another
Bye!
"""
class BlogTestCase(test.TestCase):
fixtures = ['inboxen_testdata.json']
def test_blog_index(self):
url = urlresolvers.reverse("blog")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_blog_post(self):
user = get_user_model().objects.get(username="isdabizda")
user.is_staff = True
user.save()
login = self.client.login(username=user.username, password="123456")
if not login:
raise Exception("Could not log in")
params = {"title": "A Test Post", "body": BODY}
response = self.client.post(urlresolvers.reverse("blog-post-add"), params)
self.assertRedirects(response, urlresolvers.reverse("blog"))
post = models.BlogPost.objects.latest("date")
self.assertEqual(post.subject, "A Test Post")
self.assertEqual(post.body, BODY)
|
<commit_before><commit_msg>Test blog rendering and posting<commit_after>
|
##
# Copyright (C) 2014 Jessica Tallon & Matt Molyneaux
#
# This file is part of Inboxen.
#
# Inboxen is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Inboxen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Inboxen. If not, see <http://www.gnu.org/licenses/>.
##
from django import test
from django.contrib.auth import get_user_model
from django.core import urlresolvers
from inboxen import models
BODY = """
Hey there!
==========
This is a test post:
* A list item
* And another
Bye!
"""
class BlogTestCase(test.TestCase):
fixtures = ['inboxen_testdata.json']
def test_blog_index(self):
url = urlresolvers.reverse("blog")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_blog_post(self):
user = get_user_model().objects.get(username="isdabizda")
user.is_staff = True
user.save()
login = self.client.login(username=user.username, password="123456")
if not login:
raise Exception("Could not log in")
params = {"title": "A Test Post", "body": BODY}
response = self.client.post(urlresolvers.reverse("blog-post-add"), params)
self.assertRedirects(response, urlresolvers.reverse("blog"))
post = models.BlogPost.objects.latest("date")
self.assertEqual(post.subject, "A Test Post")
self.assertEqual(post.body, BODY)
|
Test blog rendering and posting##
# Copyright (C) 2014 Jessica Tallon & Matt Molyneaux
#
# This file is part of Inboxen.
#
# Inboxen is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Inboxen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Inboxen. If not, see <http://www.gnu.org/licenses/>.
##
from django import test
from django.contrib.auth import get_user_model
from django.core import urlresolvers
from inboxen import models
BODY = """
Hey there!
==========
This is a test post:
* A list item
* And another
Bye!
"""
class BlogTestCase(test.TestCase):
fixtures = ['inboxen_testdata.json']
def test_blog_index(self):
url = urlresolvers.reverse("blog")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_blog_post(self):
user = get_user_model().objects.get(username="isdabizda")
user.is_staff = True
user.save()
login = self.client.login(username=user.username, password="123456")
if not login:
raise Exception("Could not log in")
params = {"title": "A Test Post", "body": BODY}
response = self.client.post(urlresolvers.reverse("blog-post-add"), params)
self.assertRedirects(response, urlresolvers.reverse("blog"))
post = models.BlogPost.objects.latest("date")
self.assertEqual(post.subject, "A Test Post")
self.assertEqual(post.body, BODY)
|
<commit_before><commit_msg>Test blog rendering and posting<commit_after>##
# Copyright (C) 2014 Jessica Tallon & Matt Molyneaux
#
# This file is part of Inboxen.
#
# Inboxen is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Inboxen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Inboxen. If not, see <http://www.gnu.org/licenses/>.
##
from django import test
from django.contrib.auth import get_user_model
from django.core import urlresolvers
from inboxen import models
BODY = """
Hey there!
==========
This is a test post:
* A list item
* And another
Bye!
"""
class BlogTestCase(test.TestCase):
fixtures = ['inboxen_testdata.json']
def test_blog_index(self):
url = urlresolvers.reverse("blog")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_blog_post(self):
user = get_user_model().objects.get(username="isdabizda")
user.is_staff = True
user.save()
login = self.client.login(username=user.username, password="123456")
if not login:
raise Exception("Could not log in")
params = {"title": "A Test Post", "body": BODY}
response = self.client.post(urlresolvers.reverse("blog-post-add"), params)
self.assertRedirects(response, urlresolvers.reverse("blog"))
post = models.BlogPost.objects.latest("date")
self.assertEqual(post.subject, "A Test Post")
self.assertEqual(post.body, BODY)
|
|
8e4c58eabe2160b79044f6cc8d0cc04844b4c59f
|
Graphs/breadthFirstSearch.py
|
Graphs/breadthFirstSearch.py
|
#!/usr/local/bin/python
# edX Intro to Computational Thinking and Data Science
# Graphs - Breadth First Search to find shortest path lecture code
import graphs
def printPath(path):
"""Assumes path is a list of nodes"""
result = ''
for i in range(len(path)):
result += str(path[i])
if i != len(path) - 1:
result += '->'
return result
def BFS(graph, start, end, toPrint=False):
initPath = [start]
pathQueue = [initPath]
if toPrint:
print('Current BFS path: {}'.format(printPath(pathQueue)))
while len(pathQueue) != 0:
# Get and remove oldest element in pathQueue
tmpPath = pathQueue.pop(0)
print('Current BFS path: {}'.format(printPath(tmpPath)))
lastNode = tmpPath[-1]
if lastNode == end:
return tmpPath
for nextNode in graph.childrenOf(lastNode):
if nextNode not in tmpPath:
newPath = tmpPath + [nextNode]
pathQueue.append(newPath)
return None
def shortestPath(graph, start, end, toPrint=False):
return BFS(graph, start, end, toPrint)
def testSP(source, destination):
g = graphs.buildCityGraph(graphs.Digraph)
sp = shortestPath(g, g.getNode(source), g.getNode(destination),
toPrint=True)
if sp is not None:
print('Shortest path from {} to {} is {}'
.format(source, destination, printPath(sp)))
else:
print('There is no path from {} to {}'.format(source, destination))
def main():
testSP('Chicago', 'Boston')
testSP('Boston', 'Phoenix')
if __name__ == '__main__':
main()
|
Add breadth first search for graphs
|
Add breadth first search for graphs
|
Python
|
mit
|
HKuz/Test_Code
|
Add breadth first search for graphs
|
#!/usr/local/bin/python
# edX Intro to Computational Thinking and Data Science
# Graphs - Breadth First Search to find shortest path lecture code
import graphs
def printPath(path):
"""Assumes path is a list of nodes"""
result = ''
for i in range(len(path)):
result += str(path[i])
if i != len(path) - 1:
result += '->'
return result
def BFS(graph, start, end, toPrint=False):
initPath = [start]
pathQueue = [initPath]
if toPrint:
print('Current BFS path: {}'.format(printPath(pathQueue)))
while len(pathQueue) != 0:
# Get and remove oldest element in pathQueue
tmpPath = pathQueue.pop(0)
print('Current BFS path: {}'.format(printPath(tmpPath)))
lastNode = tmpPath[-1]
if lastNode == end:
return tmpPath
for nextNode in graph.childrenOf(lastNode):
if nextNode not in tmpPath:
newPath = tmpPath + [nextNode]
pathQueue.append(newPath)
return None
def shortestPath(graph, start, end, toPrint=False):
return BFS(graph, start, end, toPrint)
def testSP(source, destination):
g = graphs.buildCityGraph(graphs.Digraph)
sp = shortestPath(g, g.getNode(source), g.getNode(destination),
toPrint=True)
if sp is not None:
print('Shortest path from {} to {} is {}'
.format(source, destination, printPath(sp)))
else:
print('There is no path from {} to {}'.format(source, destination))
def main():
testSP('Chicago', 'Boston')
testSP('Boston', 'Phoenix')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add breadth first search for graphs<commit_after>
|
#!/usr/local/bin/python
# edX Intro to Computational Thinking and Data Science
# Graphs - Breadth First Search to find shortest path lecture code
import graphs
def printPath(path):
"""Assumes path is a list of nodes"""
result = ''
for i in range(len(path)):
result += str(path[i])
if i != len(path) - 1:
result += '->'
return result
def BFS(graph, start, end, toPrint=False):
initPath = [start]
pathQueue = [initPath]
if toPrint:
print('Current BFS path: {}'.format(printPath(pathQueue)))
while len(pathQueue) != 0:
# Get and remove oldest element in pathQueue
tmpPath = pathQueue.pop(0)
print('Current BFS path: {}'.format(printPath(tmpPath)))
lastNode = tmpPath[-1]
if lastNode == end:
return tmpPath
for nextNode in graph.childrenOf(lastNode):
if nextNode not in tmpPath:
newPath = tmpPath + [nextNode]
pathQueue.append(newPath)
return None
def shortestPath(graph, start, end, toPrint=False):
return BFS(graph, start, end, toPrint)
def testSP(source, destination):
g = graphs.buildCityGraph(graphs.Digraph)
sp = shortestPath(g, g.getNode(source), g.getNode(destination),
toPrint=True)
if sp is not None:
print('Shortest path from {} to {} is {}'
.format(source, destination, printPath(sp)))
else:
print('There is no path from {} to {}'.format(source, destination))
def main():
testSP('Chicago', 'Boston')
testSP('Boston', 'Phoenix')
if __name__ == '__main__':
main()
|
Add breadth first search for graphs#!/usr/local/bin/python
# edX Intro to Computational Thinking and Data Science
# Graphs - Breadth First Search to find shortest path lecture code
import graphs
def printPath(path):
"""Assumes path is a list of nodes"""
result = ''
for i in range(len(path)):
result += str(path[i])
if i != len(path) - 1:
result += '->'
return result
def BFS(graph, start, end, toPrint=False):
initPath = [start]
pathQueue = [initPath]
if toPrint:
print('Current BFS path: {}'.format(printPath(pathQueue)))
while len(pathQueue) != 0:
# Get and remove oldest element in pathQueue
tmpPath = pathQueue.pop(0)
print('Current BFS path: {}'.format(printPath(tmpPath)))
lastNode = tmpPath[-1]
if lastNode == end:
return tmpPath
for nextNode in graph.childrenOf(lastNode):
if nextNode not in tmpPath:
newPath = tmpPath + [nextNode]
pathQueue.append(newPath)
return None
def shortestPath(graph, start, end, toPrint=False):
return BFS(graph, start, end, toPrint)
def testSP(source, destination):
g = graphs.buildCityGraph(graphs.Digraph)
sp = shortestPath(g, g.getNode(source), g.getNode(destination),
toPrint=True)
if sp is not None:
print('Shortest path from {} to {} is {}'
.format(source, destination, printPath(sp)))
else:
print('There is no path from {} to {}'.format(source, destination))
def main():
testSP('Chicago', 'Boston')
testSP('Boston', 'Phoenix')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add breadth first search for graphs<commit_after>#!/usr/local/bin/python
# edX Intro to Computational Thinking and Data Science
# Graphs - Breadth First Search to find shortest path lecture code
import graphs
def printPath(path):
"""Assumes path is a list of nodes"""
result = ''
for i in range(len(path)):
result += str(path[i])
if i != len(path) - 1:
result += '->'
return result
def BFS(graph, start, end, toPrint=False):
initPath = [start]
pathQueue = [initPath]
if toPrint:
print('Current BFS path: {}'.format(printPath(pathQueue)))
while len(pathQueue) != 0:
# Get and remove oldest element in pathQueue
tmpPath = pathQueue.pop(0)
print('Current BFS path: {}'.format(printPath(tmpPath)))
lastNode = tmpPath[-1]
if lastNode == end:
return tmpPath
for nextNode in graph.childrenOf(lastNode):
if nextNode not in tmpPath:
newPath = tmpPath + [nextNode]
pathQueue.append(newPath)
return None
def shortestPath(graph, start, end, toPrint=False):
return BFS(graph, start, end, toPrint)
def testSP(source, destination):
g = graphs.buildCityGraph(graphs.Digraph)
sp = shortestPath(g, g.getNode(source), g.getNode(destination),
toPrint=True)
if sp is not None:
print('Shortest path from {} to {} is {}'
.format(source, destination, printPath(sp)))
else:
print('There is no path from {} to {}'.format(source, destination))
def main():
testSP('Chicago', 'Boston')
testSP('Boston', 'Phoenix')
if __name__ == '__main__':
main()
|
|
58ddb21bb245878d6358de6dd40ba4725414eb6a
|
greengrapher.py
|
greengrapher.py
|
from argparse import ArgumentParser
from greengraph import Greengraph
from matplotlib import pyplot as plt
if __name__ == "__main__":
parser = ArgumentParser(description = "Plot the amount of green space \
between two locations")
parser.add_argument('--start')
parser.add_argument('--end')
parser.add_argument('--steps')
parser.add_argument('--out')
arguments= parser.parse_args()
if not arguments.steps:
arguments.steps = 20
mygraph=Greengraph(arguments.start, arguments.end)
data = mygraph.green_between(arguments.steps)
plt.plot(data)
plt.title('Green space versus location.')
plt.xlabel('Step.')
plt.ylabel('Number of green pixels.')
if arguments.out:
plt.savefig(arguments.out)
else:
plt.show()
|
Add a command line interface.
|
Add a command line interface.
|
Python
|
apache-2.0
|
paulsbrookes/greengraph
|
Add a command line interface.
|
from argparse import ArgumentParser
from greengraph import Greengraph
from matplotlib import pyplot as plt
if __name__ == "__main__":
parser = ArgumentParser(description = "Plot the amount of green space \
between two locations")
parser.add_argument('--start')
parser.add_argument('--end')
parser.add_argument('--steps')
parser.add_argument('--out')
arguments= parser.parse_args()
if not arguments.steps:
arguments.steps = 20
mygraph=Greengraph(arguments.start, arguments.end)
data = mygraph.green_between(arguments.steps)
plt.plot(data)
plt.title('Green space versus location.')
plt.xlabel('Step.')
plt.ylabel('Number of green pixels.')
if arguments.out:
plt.savefig(arguments.out)
else:
plt.show()
|
<commit_before><commit_msg>Add a command line interface.<commit_after>
|
from argparse import ArgumentParser
from greengraph import Greengraph
from matplotlib import pyplot as plt
if __name__ == "__main__":
parser = ArgumentParser(description = "Plot the amount of green space \
between two locations")
parser.add_argument('--start')
parser.add_argument('--end')
parser.add_argument('--steps')
parser.add_argument('--out')
arguments= parser.parse_args()
if not arguments.steps:
arguments.steps = 20
mygraph=Greengraph(arguments.start, arguments.end)
data = mygraph.green_between(arguments.steps)
plt.plot(data)
plt.title('Green space versus location.')
plt.xlabel('Step.')
plt.ylabel('Number of green pixels.')
if arguments.out:
plt.savefig(arguments.out)
else:
plt.show()
|
Add a command line interface.from argparse import ArgumentParser
from greengraph import Greengraph
from matplotlib import pyplot as plt
if __name__ == "__main__":
parser = ArgumentParser(description = "Plot the amount of green space \
between two locations")
parser.add_argument('--start')
parser.add_argument('--end')
parser.add_argument('--steps')
parser.add_argument('--out')
arguments= parser.parse_args()
if not arguments.steps:
arguments.steps = 20
mygraph=Greengraph(arguments.start, arguments.end)
data = mygraph.green_between(arguments.steps)
plt.plot(data)
plt.title('Green space versus location.')
plt.xlabel('Step.')
plt.ylabel('Number of green pixels.')
if arguments.out:
plt.savefig(arguments.out)
else:
plt.show()
|
<commit_before><commit_msg>Add a command line interface.<commit_after>from argparse import ArgumentParser
from greengraph import Greengraph
from matplotlib import pyplot as plt
if __name__ == "__main__":
parser = ArgumentParser(description = "Plot the amount of green space \
between two locations")
parser.add_argument('--start')
parser.add_argument('--end')
parser.add_argument('--steps')
parser.add_argument('--out')
arguments= parser.parse_args()
if not arguments.steps:
arguments.steps = 20
mygraph=Greengraph(arguments.start, arguments.end)
data = mygraph.green_between(arguments.steps)
plt.plot(data)
plt.title('Green space versus location.')
plt.xlabel('Step.')
plt.ylabel('Number of green pixels.')
if arguments.out:
plt.savefig(arguments.out)
else:
plt.show()
|
|
fc56abad64ae9601b3ae2ef9fa4e55134041597a
|
active_redis/active_redis.py
|
active_redis/active_redis.py
|
def defaultConfig(*args, **kwargs):
"""Sets the default Redis client configuration."""
class ActiveRedis(object):
"""
Core class for interacting with Redis via ActiveRedis.
"""
def __init__(self, *args, **kwargs):
try:
if isinstance(args[0], Redis):
self.redis = args[0]
else:
self.redis = Redis(*args, **kwargs)
except IndexError:
self.redis = Redis(*args, **kwargs)
def _create_datatype(self, type, key=None):
"""Creates a Redis data type."""
return DataType.load(type, self.redis, key)
def string(self, key=None):
"""Returns a new Redis string."""
return self._create_datatype('string', key)
def list(self, key=None):
"""Returns a new Redis list."""
return self._create_datatype('list', key)
def set(self, key=None):
"""Returns a new Redis set."""
return self._create_datatype('set', key)
def sorted_set(self, key=None):
"""Returns a new Redis sorted set."""
return self._create_datatype('sorted_set', key)
def hash(self, key=None):
"""Returns a new Redis hash."""
return self._create_datatype('hash', key)
|
Add core Active Redis module.
|
Add core Active Redis module.
|
Python
|
mit
|
kuujo/active-redis
|
Add core Active Redis module.
|
def defaultConfig(*args, **kwargs):
"""Sets the default Redis client configuration."""
class ActiveRedis(object):
"""
Core class for interacting with Redis via ActiveRedis.
"""
def __init__(self, *args, **kwargs):
try:
if isinstance(args[0], Redis):
self.redis = args[0]
else:
self.redis = Redis(*args, **kwargs)
except IndexError:
self.redis = Redis(*args, **kwargs)
def _create_datatype(self, type, key=None):
"""Creates a Redis data type."""
return DataType.load(type, self.redis, key)
def string(self, key=None):
"""Returns a new Redis string."""
return self._create_datatype('string', key)
def list(self, key=None):
"""Returns a new Redis list."""
return self._create_datatype('list', key)
def set(self, key=None):
"""Returns a new Redis set."""
return self._create_datatype('set', key)
def sorted_set(self, key=None):
"""Returns a new Redis sorted set."""
return self._create_datatype('sorted_set', key)
def hash(self, key=None):
"""Returns a new Redis hash."""
return self._create_datatype('hash', key)
|
<commit_before><commit_msg>Add core Active Redis module.<commit_after>
|
def defaultConfig(*args, **kwargs):
"""Sets the default Redis client configuration."""
class ActiveRedis(object):
"""
Core class for interacting with Redis via ActiveRedis.
"""
def __init__(self, *args, **kwargs):
try:
if isinstance(args[0], Redis):
self.redis = args[0]
else:
self.redis = Redis(*args, **kwargs)
except IndexError:
self.redis = Redis(*args, **kwargs)
def _create_datatype(self, type, key=None):
"""Creates a Redis data type."""
return DataType.load(type, self.redis, key)
def string(self, key=None):
"""Returns a new Redis string."""
return self._create_datatype('string', key)
def list(self, key=None):
"""Returns a new Redis list."""
return self._create_datatype('list', key)
def set(self, key=None):
"""Returns a new Redis set."""
return self._create_datatype('set', key)
def sorted_set(self, key=None):
"""Returns a new Redis sorted set."""
return self._create_datatype('sorted_set', key)
def hash(self, key=None):
"""Returns a new Redis hash."""
return self._create_datatype('hash', key)
|
Add core Active Redis module.
def defaultConfig(*args, **kwargs):
"""Sets the default Redis client configuration."""
class ActiveRedis(object):
"""
Core class for interacting with Redis via ActiveRedis.
"""
def __init__(self, *args, **kwargs):
try:
if isinstance(args[0], Redis):
self.redis = args[0]
else:
self.redis = Redis(*args, **kwargs)
except IndexError:
self.redis = Redis(*args, **kwargs)
def _create_datatype(self, type, key=None):
"""Creates a Redis data type."""
return DataType.load(type, self.redis, key)
def string(self, key=None):
"""Returns a new Redis string."""
return self._create_datatype('string', key)
def list(self, key=None):
"""Returns a new Redis list."""
return self._create_datatype('list', key)
def set(self, key=None):
"""Returns a new Redis set."""
return self._create_datatype('set', key)
def sorted_set(self, key=None):
"""Returns a new Redis sorted set."""
return self._create_datatype('sorted_set', key)
def hash(self, key=None):
"""Returns a new Redis hash."""
return self._create_datatype('hash', key)
|
<commit_before><commit_msg>Add core Active Redis module.<commit_after>
def defaultConfig(*args, **kwargs):
"""Sets the default Redis client configuration."""
class ActiveRedis(object):
"""
Core class for interacting with Redis via ActiveRedis.
"""
def __init__(self, *args, **kwargs):
try:
if isinstance(args[0], Redis):
self.redis = args[0]
else:
self.redis = Redis(*args, **kwargs)
except IndexError:
self.redis = Redis(*args, **kwargs)
def _create_datatype(self, type, key=None):
"""Creates a Redis data type."""
return DataType.load(type, self.redis, key)
def string(self, key=None):
"""Returns a new Redis string."""
return self._create_datatype('string', key)
def list(self, key=None):
"""Returns a new Redis list."""
return self._create_datatype('list', key)
def set(self, key=None):
"""Returns a new Redis set."""
return self._create_datatype('set', key)
def sorted_set(self, key=None):
"""Returns a new Redis sorted set."""
return self._create_datatype('sorted_set', key)
def hash(self, key=None):
"""Returns a new Redis hash."""
return self._create_datatype('hash', key)
|
|
9f5adef59a8065f0524cc9c16c2ddc46e46f57cc
|
google-stt.py
|
google-stt.py
|
from scikits.samplerate import resample
from tempfile import mkstemp
from os import remove
import scikits.audiolab as audiolab
# if you want make the down sample rate using scipy.signal
#import scipy.signal
import urllib2
import sys
if len(sys.argv)<2 :
print 'Usage: %s <audio file.wav>' %sys.argv[0]
sys.exit(0)
File=sys.argv[1]
#making a file temp for manipulation
cd, FileNameTmp = mkstemp('TmpSpeechFile.flac')
#Frame Rate used by api speech from google
fr=16000.
#using audiolab to read wav file
Signal, fs = audiolab.wavread(File)[:2]
#changing the original sample rate to 16000fs fast mode
Signal = resample(Signal, fr/float(fs), 'sinc_best')
#changing sample rate from audio file using scipy this is a bit slow
#Signal=scipy.signal.resample(Signal,int(round(len(Getsignal)*fr)/float(fs)),window=None)
# file Format type
fmt = audiolab.Format('flac', 'pcm16')
nchannels = 1
# making the file .flac
afile = audiolab.Sndfile(FileNameTmp, 'w', fmt, nchannels, fr)
#writing in the file
afile.write_frames(Signal)
#Sending to google the file .flac
url = "https://www.google.com/speech-api/v1/recognize?xjerr=1&client=chromium&lang=pt-BR"
flac=open(FileNameTmp,"rb").read()
header = {'Content-Type' : 'audio/x-flac; rate=16000'}
req = urllib2.Request(url, flac, header)
data = urllib2.urlopen(req)
print data.read()
remove(FileNameTmp)
|
Add google speech to text sample
|
Add google speech to text sample
|
Python
|
mit
|
voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts
|
Add google speech to text sample
|
from scikits.samplerate import resample
from tempfile import mkstemp
from os import remove
import scikits.audiolab as audiolab
# if you want make the down sample rate using scipy.signal
#import scipy.signal
import urllib2
import sys
if len(sys.argv)<2 :
print 'Usage: %s <audio file.wav>' %sys.argv[0]
sys.exit(0)
File=sys.argv[1]
#making a file temp for manipulation
cd, FileNameTmp = mkstemp('TmpSpeechFile.flac')
#Frame Rate used by api speech from google
fr=16000.
#using audiolab to read wav file
Signal, fs = audiolab.wavread(File)[:2]
#changing the original sample rate to 16000fs fast mode
Signal = resample(Signal, fr/float(fs), 'sinc_best')
#changing sample rate from audio file using scipy this is a bit slow
#Signal=scipy.signal.resample(Signal,int(round(len(Getsignal)*fr)/float(fs)),window=None)
# file Format type
fmt = audiolab.Format('flac', 'pcm16')
nchannels = 1
# making the file .flac
afile = audiolab.Sndfile(FileNameTmp, 'w', fmt, nchannels, fr)
#writing in the file
afile.write_frames(Signal)
#Sending to google the file .flac
url = "https://www.google.com/speech-api/v1/recognize?xjerr=1&client=chromium&lang=pt-BR"
flac=open(FileNameTmp,"rb").read()
header = {'Content-Type' : 'audio/x-flac; rate=16000'}
req = urllib2.Request(url, flac, header)
data = urllib2.urlopen(req)
print data.read()
remove(FileNameTmp)
|
<commit_before><commit_msg>Add google speech to text sample<commit_after>
|
from scikits.samplerate import resample
from tempfile import mkstemp
from os import remove
import scikits.audiolab as audiolab
# if you want make the down sample rate using scipy.signal
#import scipy.signal
import urllib2
import sys
if len(sys.argv)<2 :
print 'Usage: %s <audio file.wav>' %sys.argv[0]
sys.exit(0)
File=sys.argv[1]
#making a file temp for manipulation
cd, FileNameTmp = mkstemp('TmpSpeechFile.flac')
#Frame Rate used by api speech from google
fr=16000.
#using audiolab to read wav file
Signal, fs = audiolab.wavread(File)[:2]
#changing the original sample rate to 16000fs fast mode
Signal = resample(Signal, fr/float(fs), 'sinc_best')
#changing sample rate from audio file using scipy this is a bit slow
#Signal=scipy.signal.resample(Signal,int(round(len(Getsignal)*fr)/float(fs)),window=None)
# file Format type
fmt = audiolab.Format('flac', 'pcm16')
nchannels = 1
# making the file .flac
afile = audiolab.Sndfile(FileNameTmp, 'w', fmt, nchannels, fr)
#writing in the file
afile.write_frames(Signal)
#Sending to google the file .flac
url = "https://www.google.com/speech-api/v1/recognize?xjerr=1&client=chromium&lang=pt-BR"
flac=open(FileNameTmp,"rb").read()
header = {'Content-Type' : 'audio/x-flac; rate=16000'}
req = urllib2.Request(url, flac, header)
data = urllib2.urlopen(req)
print data.read()
remove(FileNameTmp)
|
Add google speech to text samplefrom scikits.samplerate import resample
from tempfile import mkstemp
from os import remove
import scikits.audiolab as audiolab
# if you want make the down sample rate using scipy.signal
#import scipy.signal
import urllib2
import sys
if len(sys.argv)<2 :
print 'Usage: %s <audio file.wav>' %sys.argv[0]
sys.exit(0)
File=sys.argv[1]
#making a file temp for manipulation
cd, FileNameTmp = mkstemp('TmpSpeechFile.flac')
#Frame Rate used by api speech from google
fr=16000.
#using audiolab to read wav file
Signal, fs = audiolab.wavread(File)[:2]
#changing the original sample rate to 16000fs fast mode
Signal = resample(Signal, fr/float(fs), 'sinc_best')
#changing sample rate from audio file using scipy this is a bit slow
#Signal=scipy.signal.resample(Signal,int(round(len(Getsignal)*fr)/float(fs)),window=None)
# file Format type
fmt = audiolab.Format('flac', 'pcm16')
nchannels = 1
# making the file .flac
afile = audiolab.Sndfile(FileNameTmp, 'w', fmt, nchannels, fr)
#writing in the file
afile.write_frames(Signal)
#Sending to google the file .flac
url = "https://www.google.com/speech-api/v1/recognize?xjerr=1&client=chromium&lang=pt-BR"
flac=open(FileNameTmp,"rb").read()
header = {'Content-Type' : 'audio/x-flac; rate=16000'}
req = urllib2.Request(url, flac, header)
data = urllib2.urlopen(req)
print data.read()
remove(FileNameTmp)
|
<commit_before><commit_msg>Add google speech to text sample<commit_after>from scikits.samplerate import resample
from tempfile import mkstemp
from os import remove
import scikits.audiolab as audiolab
# if you want make the down sample rate using scipy.signal
#import scipy.signal
import urllib2
import sys
if len(sys.argv)<2 :
print 'Usage: %s <audio file.wav>' %sys.argv[0]
sys.exit(0)
File=sys.argv[1]
#making a file temp for manipulation
cd, FileNameTmp = mkstemp('TmpSpeechFile.flac')
#Frame Rate used by api speech from google
fr=16000.
#using audiolab to read wav file
Signal, fs = audiolab.wavread(File)[:2]
#changing the original sample rate to 16000fs fast mode
Signal = resample(Signal, fr/float(fs), 'sinc_best')
#changing sample rate from audio file using scipy this is a bit slow
#Signal=scipy.signal.resample(Signal,int(round(len(Getsignal)*fr)/float(fs)),window=None)
# file Format type
fmt = audiolab.Format('flac', 'pcm16')
nchannels = 1
# making the file .flac
afile = audiolab.Sndfile(FileNameTmp, 'w', fmt, nchannels, fr)
#writing in the file
afile.write_frames(Signal)
#Sending to google the file .flac
url = "https://www.google.com/speech-api/v1/recognize?xjerr=1&client=chromium&lang=pt-BR"
flac=open(FileNameTmp,"rb").read()
header = {'Content-Type' : 'audio/x-flac; rate=16000'}
req = urllib2.Request(url, flac, header)
data = urllib2.urlopen(req)
print data.read()
remove(FileNameTmp)
|
|
c269315ec83a0cfc6ec6c5bd58945ba68d6f69f3
|
analyzarr/ui/custom_tools.py
|
analyzarr/ui/custom_tools.py
|
from chaco.tools.api import ScatterInspector
from numpy import zeros
class PeakSelectionTool(ScatterInspector):
def _deselect(self, index=None):
super(PeakSelectionTool, self)._deselect(index)
self._update_mask()
# override this method so that we only select one peak at a time
def _select(self, index, append=False):
super(PeakSelectionTool, self)._select(index, append)
self._update_mask()
def _update_mask(self):
plot = self.component
for name in ('index', 'value'):
if not hasattr(plot, name):
continue
md = getattr(plot, name).metadata
mask = zeros(getattr(plot, name).get_data().shape[0],
dtype=bool)
mask[list(md[self.selection_metadata_name])]=True
md['selection_masks'] = mask
|
Add missing custom tools file
|
Add missing custom tools file
|
Python
|
bsd-2-clause
|
msarahan/analyzarr,msarahan/analyzarr
|
Add missing custom tools file
|
from chaco.tools.api import ScatterInspector
from numpy import zeros
class PeakSelectionTool(ScatterInspector):
def _deselect(self, index=None):
super(PeakSelectionTool, self)._deselect(index)
self._update_mask()
# override this method so that we only select one peak at a time
def _select(self, index, append=False):
super(PeakSelectionTool, self)._select(index, append)
self._update_mask()
def _update_mask(self):
plot = self.component
for name in ('index', 'value'):
if not hasattr(plot, name):
continue
md = getattr(plot, name).metadata
mask = zeros(getattr(plot, name).get_data().shape[0],
dtype=bool)
mask[list(md[self.selection_metadata_name])]=True
md['selection_masks'] = mask
|
<commit_before><commit_msg>Add missing custom tools file<commit_after>
|
from chaco.tools.api import ScatterInspector
from numpy import zeros
class PeakSelectionTool(ScatterInspector):
def _deselect(self, index=None):
super(PeakSelectionTool, self)._deselect(index)
self._update_mask()
# override this method so that we only select one peak at a time
def _select(self, index, append=False):
super(PeakSelectionTool, self)._select(index, append)
self._update_mask()
def _update_mask(self):
plot = self.component
for name in ('index', 'value'):
if not hasattr(plot, name):
continue
md = getattr(plot, name).metadata
mask = zeros(getattr(plot, name).get_data().shape[0],
dtype=bool)
mask[list(md[self.selection_metadata_name])]=True
md['selection_masks'] = mask
|
Add missing custom tools filefrom chaco.tools.api import ScatterInspector
from numpy import zeros
class PeakSelectionTool(ScatterInspector):
def _deselect(self, index=None):
super(PeakSelectionTool, self)._deselect(index)
self._update_mask()
# override this method so that we only select one peak at a time
def _select(self, index, append=False):
super(PeakSelectionTool, self)._select(index, append)
self._update_mask()
def _update_mask(self):
plot = self.component
for name in ('index', 'value'):
if not hasattr(plot, name):
continue
md = getattr(plot, name).metadata
mask = zeros(getattr(plot, name).get_data().shape[0],
dtype=bool)
mask[list(md[self.selection_metadata_name])]=True
md['selection_masks'] = mask
|
<commit_before><commit_msg>Add missing custom tools file<commit_after>from chaco.tools.api import ScatterInspector
from numpy import zeros
class PeakSelectionTool(ScatterInspector):
def _deselect(self, index=None):
super(PeakSelectionTool, self)._deselect(index)
self._update_mask()
# override this method so that we only select one peak at a time
def _select(self, index, append=False):
super(PeakSelectionTool, self)._select(index, append)
self._update_mask()
def _update_mask(self):
plot = self.component
for name in ('index', 'value'):
if not hasattr(plot, name):
continue
md = getattr(plot, name).metadata
mask = zeros(getattr(plot, name).get_data().shape[0],
dtype=bool)
mask[list(md[self.selection_metadata_name])]=True
md['selection_masks'] = mask
|
|
6190a721030094d8eb9016fb85cb56e7d451b157
|
neutronclient/tests/functional/test_subnet_create.py
|
neutronclient/tests/functional/test_subnet_create.py
|
# Copyright 2015 Hewlett-Packard Development Company, L.P
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutronclient.tests.functional import base
class SubnetCreateNeutronClientCLITest(base.ClientTestBase):
def test_create_subnet_net_name_first(self):
self.neutron('net-create', params='netwrk-1')
self.addCleanup(self.neutron, 'net-delete netwrk-1')
self.neutron('subnet-create netwrk-1',
params='--name fake --gateway 192.168.51.1 '
'192.168.51.0/24')
self.addCleanup(self.neutron, 'subnet-delete fake')
subnet_list = self.parser.listing(self.neutron('subnet-list'))
self.assertTableStruct(subnet_list, ['id', 'name', 'cidr',
'allocation_pools'])
found = False
for row in subnet_list:
if row.get('name') == 'fake':
found = True
break
if not found:
self.fail('Created subnet not found in list')
|
Add functional test for subnet create
|
Add functional test for subnet create
This test adds a functional test to verify a subnet-create command
with the arg order used in the docs. python-neutronclient introduced
a regression which broke the usage of this order. This test will
prevent this from happening in the future.
Change-Id: If7e4211a4cbf33bc87a1304553ad3dc9c89346c4
Related-Bug: #1442771
|
Python
|
apache-2.0
|
johnpbatty/python-neutronclient,rackerlabs/rackspace-python-neutronclient,venusource/python-neutronclient,Juniper/python-neutronclient,huntxu/python-neutronclient,Juniper/python-neutronclient,cryptickp/python-neutronclient,rackerlabs/rackspace-python-neutronclient,NeCTAR-RC/python-neutronclient,johnpbatty/python-neutronclient,openstack/python-neutronclient,eayunstack/python-neutronclient,noironetworks/python-neutronclient,NeCTAR-RC/python-neutronclient,openstack/python-neutronclient,huntxu/python-neutronclient,eayunstack/python-neutronclient,noironetworks/python-neutronclient,cryptickp/python-neutronclient,mangelajo/python-neutronclient,mangelajo/python-neutronclient,venusource/python-neutronclient
|
Add functional test for subnet create
This test adds a functional test to verify a subnet-create command
with the arg order used in the docs. python-neutronclient introduced
a regression which broke the usage of this order. This test will
prevent this from happening in the future.
Change-Id: If7e4211a4cbf33bc87a1304553ad3dc9c89346c4
Related-Bug: #1442771
|
# Copyright 2015 Hewlett-Packard Development Company, L.P
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutronclient.tests.functional import base
class SubnetCreateNeutronClientCLITest(base.ClientTestBase):
def test_create_subnet_net_name_first(self):
self.neutron('net-create', params='netwrk-1')
self.addCleanup(self.neutron, 'net-delete netwrk-1')
self.neutron('subnet-create netwrk-1',
params='--name fake --gateway 192.168.51.1 '
'192.168.51.0/24')
self.addCleanup(self.neutron, 'subnet-delete fake')
subnet_list = self.parser.listing(self.neutron('subnet-list'))
self.assertTableStruct(subnet_list, ['id', 'name', 'cidr',
'allocation_pools'])
found = False
for row in subnet_list:
if row.get('name') == 'fake':
found = True
break
if not found:
self.fail('Created subnet not found in list')
|
<commit_before><commit_msg>Add functional test for subnet create
This test adds a functional test to verify a subnet-create command
with the arg order used in the docs. python-neutronclient introduced
a regression which broke the usage of this order. This test will
prevent this from happening in the future.
Change-Id: If7e4211a4cbf33bc87a1304553ad3dc9c89346c4
Related-Bug: #1442771<commit_after>
|
# Copyright 2015 Hewlett-Packard Development Company, L.P
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutronclient.tests.functional import base
class SubnetCreateNeutronClientCLITest(base.ClientTestBase):
def test_create_subnet_net_name_first(self):
self.neutron('net-create', params='netwrk-1')
self.addCleanup(self.neutron, 'net-delete netwrk-1')
self.neutron('subnet-create netwrk-1',
params='--name fake --gateway 192.168.51.1 '
'192.168.51.0/24')
self.addCleanup(self.neutron, 'subnet-delete fake')
subnet_list = self.parser.listing(self.neutron('subnet-list'))
self.assertTableStruct(subnet_list, ['id', 'name', 'cidr',
'allocation_pools'])
found = False
for row in subnet_list:
if row.get('name') == 'fake':
found = True
break
if not found:
self.fail('Created subnet not found in list')
|
Add functional test for subnet create
This test adds a functional test to verify a subnet-create command
with the arg order used in the docs. python-neutronclient introduced
a regression which broke the usage of this order. This test will
prevent this from happening in the future.
Change-Id: If7e4211a4cbf33bc87a1304553ad3dc9c89346c4
Related-Bug: #1442771# Copyright 2015 Hewlett-Packard Development Company, L.P
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutronclient.tests.functional import base
class SubnetCreateNeutronClientCLITest(base.ClientTestBase):
def test_create_subnet_net_name_first(self):
self.neutron('net-create', params='netwrk-1')
self.addCleanup(self.neutron, 'net-delete netwrk-1')
self.neutron('subnet-create netwrk-1',
params='--name fake --gateway 192.168.51.1 '
'192.168.51.0/24')
self.addCleanup(self.neutron, 'subnet-delete fake')
subnet_list = self.parser.listing(self.neutron('subnet-list'))
self.assertTableStruct(subnet_list, ['id', 'name', 'cidr',
'allocation_pools'])
found = False
for row in subnet_list:
if row.get('name') == 'fake':
found = True
break
if not found:
self.fail('Created subnet not found in list')
|
<commit_before><commit_msg>Add functional test for subnet create
This test adds a functional test to verify a subnet-create command
with the arg order used in the docs. python-neutronclient introduced
a regression which broke the usage of this order. This test will
prevent this from happening in the future.
Change-Id: If7e4211a4cbf33bc87a1304553ad3dc9c89346c4
Related-Bug: #1442771<commit_after># Copyright 2015 Hewlett-Packard Development Company, L.P
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutronclient.tests.functional import base
class SubnetCreateNeutronClientCLITest(base.ClientTestBase):
def test_create_subnet_net_name_first(self):
self.neutron('net-create', params='netwrk-1')
self.addCleanup(self.neutron, 'net-delete netwrk-1')
self.neutron('subnet-create netwrk-1',
params='--name fake --gateway 192.168.51.1 '
'192.168.51.0/24')
self.addCleanup(self.neutron, 'subnet-delete fake')
subnet_list = self.parser.listing(self.neutron('subnet-list'))
self.assertTableStruct(subnet_list, ['id', 'name', 'cidr',
'allocation_pools'])
found = False
for row in subnet_list:
if row.get('name') == 'fake':
found = True
break
if not found:
self.fail('Created subnet not found in list')
|
|
7afa271a1e8513fa78300f5aee10f4e7b63df293
|
jupyter_notebook_config.py
|
jupyter_notebook_config.py
|
# Configuration file for Jupyter-notebook.
# https://github.com/jupyter/docker-demo-images/blob/master/resources/jupyter_notebook_config.partial.py
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888 #9999
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
#c.NotebookApp.tornado_settings = {
# 'headers': {
# 'X-Frame-Options': 'ALLOW FROM nature.com'
# },
# 'template_path':['/srv/ga/', '/srv/ipython/IPython/html',
# '/srv/ipython/IPython/html/templates']
#}
#c.Spawner.env_keep = ['PATH', 'PYTHONPATH', 'CONDA_ROOT', 'CONDA_DEFAULT_ENV', 'VIRTUAL_ENV', 'LANG', 'LC_ALL', 'NRN_NMODL_PATH']
# http://www.harrisgeospatial.com/Support/HelpArticlesDetail/TabId/219/ArtMID/900/ArticleID/14776/Integrating-the-Jupyter-Notebook-with-ESE.aspx
# We need to create an exception in the Jupyter Notebook security that will allow the Jupyter web page to be embedded in an HTML iframe
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' http://*.projectpyrho.org "
}
#'static_url_prefix': 'https://cdn.jupyter.org/notebook/try-4.0.5/'
}
#'http://yourhostname:9191/Jupyter/'
|
Add config file TODO: edit to work with IFrames
|
Add config file TODO: edit to work with IFrames
|
Python
|
bsd-3-clause
|
ProjectPyRhO/Prometheus,ProjectPyRhO/Prometheus,ProjectPyRhO/Prometheus,ProjectPyRhO/Prometheus
|
Add config file TODO: edit to work with IFrames
|
# Configuration file for Jupyter-notebook.
# https://github.com/jupyter/docker-demo-images/blob/master/resources/jupyter_notebook_config.partial.py
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888 #9999
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
#c.NotebookApp.tornado_settings = {
# 'headers': {
# 'X-Frame-Options': 'ALLOW FROM nature.com'
# },
# 'template_path':['/srv/ga/', '/srv/ipython/IPython/html',
# '/srv/ipython/IPython/html/templates']
#}
#c.Spawner.env_keep = ['PATH', 'PYTHONPATH', 'CONDA_ROOT', 'CONDA_DEFAULT_ENV', 'VIRTUAL_ENV', 'LANG', 'LC_ALL', 'NRN_NMODL_PATH']
# http://www.harrisgeospatial.com/Support/HelpArticlesDetail/TabId/219/ArtMID/900/ArticleID/14776/Integrating-the-Jupyter-Notebook-with-ESE.aspx
# We need to create an exception in the Jupyter Notebook security that will allow the Jupyter web page to be embedded in an HTML iframe
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' http://*.projectpyrho.org "
}
#'static_url_prefix': 'https://cdn.jupyter.org/notebook/try-4.0.5/'
}
#'http://yourhostname:9191/Jupyter/'
|
<commit_before><commit_msg>Add config file TODO: edit to work with IFrames<commit_after>
|
# Configuration file for Jupyter-notebook.
# https://github.com/jupyter/docker-demo-images/blob/master/resources/jupyter_notebook_config.partial.py
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888 #9999
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
#c.NotebookApp.tornado_settings = {
# 'headers': {
# 'X-Frame-Options': 'ALLOW FROM nature.com'
# },
# 'template_path':['/srv/ga/', '/srv/ipython/IPython/html',
# '/srv/ipython/IPython/html/templates']
#}
#c.Spawner.env_keep = ['PATH', 'PYTHONPATH', 'CONDA_ROOT', 'CONDA_DEFAULT_ENV', 'VIRTUAL_ENV', 'LANG', 'LC_ALL', 'NRN_NMODL_PATH']
# http://www.harrisgeospatial.com/Support/HelpArticlesDetail/TabId/219/ArtMID/900/ArticleID/14776/Integrating-the-Jupyter-Notebook-with-ESE.aspx
# We need to create an exception in the Jupyter Notebook security that will allow the Jupyter web page to be embedded in an HTML iframe
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' http://*.projectpyrho.org "
}
#'static_url_prefix': 'https://cdn.jupyter.org/notebook/try-4.0.5/'
}
#'http://yourhostname:9191/Jupyter/'
|
Add config file TODO: edit to work with IFrames# Configuration file for Jupyter-notebook.
# https://github.com/jupyter/docker-demo-images/blob/master/resources/jupyter_notebook_config.partial.py
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888 #9999
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
#c.NotebookApp.tornado_settings = {
# 'headers': {
# 'X-Frame-Options': 'ALLOW FROM nature.com'
# },
# 'template_path':['/srv/ga/', '/srv/ipython/IPython/html',
# '/srv/ipython/IPython/html/templates']
#}
#c.Spawner.env_keep = ['PATH', 'PYTHONPATH', 'CONDA_ROOT', 'CONDA_DEFAULT_ENV', 'VIRTUAL_ENV', 'LANG', 'LC_ALL', 'NRN_NMODL_PATH']
# http://www.harrisgeospatial.com/Support/HelpArticlesDetail/TabId/219/ArtMID/900/ArticleID/14776/Integrating-the-Jupyter-Notebook-with-ESE.aspx
# We need to create an exception in the Jupyter Notebook security that will allow the Jupyter web page to be embedded in an HTML iframe
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' http://*.projectpyrho.org "
}
#'static_url_prefix': 'https://cdn.jupyter.org/notebook/try-4.0.5/'
}
#'http://yourhostname:9191/Jupyter/'
|
<commit_before><commit_msg>Add config file TODO: edit to work with IFrames<commit_after># Configuration file for Jupyter-notebook.
# https://github.com/jupyter/docker-demo-images/blob/master/resources/jupyter_notebook_config.partial.py
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888 #9999
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Include our extra templates
c.NotebookApp.extra_template_paths = ['/srv/templates/']
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
#c.NotebookApp.tornado_settings = {
# 'headers': {
# 'X-Frame-Options': 'ALLOW FROM nature.com'
# },
# 'template_path':['/srv/ga/', '/srv/ipython/IPython/html',
# '/srv/ipython/IPython/html/templates']
#}
#c.Spawner.env_keep = ['PATH', 'PYTHONPATH', 'CONDA_ROOT', 'CONDA_DEFAULT_ENV', 'VIRTUAL_ENV', 'LANG', 'LC_ALL', 'NRN_NMODL_PATH']
# http://www.harrisgeospatial.com/Support/HelpArticlesDetail/TabId/219/ArtMID/900/ArticleID/14776/Integrating-the-Jupyter-Notebook-with-ESE.aspx
# We need to create an exception in the Jupyter Notebook security that will allow the Jupyter web page to be embedded in an HTML iframe
c.NotebookApp.tornado_settings = {
'headers': {
'Content-Security-Policy': "frame-ancestors 'self' http://*.projectpyrho.org "
}
#'static_url_prefix': 'https://cdn.jupyter.org/notebook/try-4.0.5/'
}
#'http://yourhostname:9191/Jupyter/'
|
|
a20507f980328e54adef30af696d7afd01bfd6d2
|
buffer/tests/test_link.py
|
buffer/tests/test_link.py
|
from nose.tools import eq_, raises
from mock import MagicMock, patch
from buffer.models.link import Link
def test_links_shares():
'''
Test link's shares retrieving from constructor
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
|
Test basic link api call
|
Test basic link api call
|
Python
|
mit
|
vtemian/buffpy,bufferapp/buffer-python
|
Test basic link api call
|
from nose.tools import eq_, raises
from mock import MagicMock, patch
from buffer.models.link import Link
def test_links_shares():
'''
Test link's shares retrieving from constructor
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
|
<commit_before><commit_msg>Test basic link api call<commit_after>
|
from nose.tools import eq_, raises
from mock import MagicMock, patch
from buffer.models.link import Link
def test_links_shares():
'''
Test link's shares retrieving from constructor
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
|
Test basic link api callfrom nose.tools import eq_, raises
from mock import MagicMock, patch
from buffer.models.link import Link
def test_links_shares():
'''
Test link's shares retrieving from constructor
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
|
<commit_before><commit_msg>Test basic link api call<commit_after>from nose.tools import eq_, raises
from mock import MagicMock, patch
from buffer.models.link import Link
def test_links_shares():
'''
Test link's shares retrieving from constructor
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
|
|
778485d7637e9833ab5aea0327b21a77bf2a0c1e
|
dci/alembic/versions/8a64d57a77d3_add_server_default_to_component_active_.py
|
dci/alembic/versions/8a64d57a77d3_add_server_default_to_component_active_.py
|
#
# Copyright (C) 2016 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""add server_default to component active flag
Revision ID: 8a64d57a77d3
Revises: cc020d3f2290
Create Date: 2016-10-12 11:47:08.609980
"""
# revision identifiers, used by Alembic.
revision = '8a64d57a77d3'
down_revision = 'cc020d3f2290'
branch_labels = None
depends_on = None
from alembic import op
def upgrade():
op.alter_column('components', 'active', server_default='1')
def downgrade():
pass
|
Add server_default value to component's active flag.
|
Add server_default value to component's active flag.
This commit adds a server default value to component's
active flag in order to update all the components rows
of the existing database.
Change-Id: Idba7c7e13450378d74904e7e17903c731c9427eb
|
Python
|
apache-2.0
|
redhat-cip/dci-control-server,enovance/dci-control-server,redhat-cip/dci-control-server,enovance/dci-control-server
|
Add server_default value to component's active flag.
This commit adds a server default value to component's
active flag in order to update all the components rows
of the existing database.
Change-Id: Idba7c7e13450378d74904e7e17903c731c9427eb
|
#
# Copyright (C) 2016 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""add server_default to component active flag
Revision ID: 8a64d57a77d3
Revises: cc020d3f2290
Create Date: 2016-10-12 11:47:08.609980
"""
# revision identifiers, used by Alembic.
revision = '8a64d57a77d3'
down_revision = 'cc020d3f2290'
branch_labels = None
depends_on = None
from alembic import op
def upgrade():
op.alter_column('components', 'active', server_default='1')
def downgrade():
pass
|
<commit_before><commit_msg>Add server_default value to component's active flag.
This commit adds a server default value to component's
active flag in order to update all the components rows
of the existing database.
Change-Id: Idba7c7e13450378d74904e7e17903c731c9427eb<commit_after>
|
#
# Copyright (C) 2016 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""add server_default to component active flag
Revision ID: 8a64d57a77d3
Revises: cc020d3f2290
Create Date: 2016-10-12 11:47:08.609980
"""
# revision identifiers, used by Alembic.
revision = '8a64d57a77d3'
down_revision = 'cc020d3f2290'
branch_labels = None
depends_on = None
from alembic import op
def upgrade():
op.alter_column('components', 'active', server_default='1')
def downgrade():
pass
|
Add server_default value to component's active flag.
This commit adds a server default value to component's
active flag in order to update all the components rows
of the existing database.
Change-Id: Idba7c7e13450378d74904e7e17903c731c9427eb#
# Copyright (C) 2016 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""add server_default to component active flag
Revision ID: 8a64d57a77d3
Revises: cc020d3f2290
Create Date: 2016-10-12 11:47:08.609980
"""
# revision identifiers, used by Alembic.
revision = '8a64d57a77d3'
down_revision = 'cc020d3f2290'
branch_labels = None
depends_on = None
from alembic import op
def upgrade():
op.alter_column('components', 'active', server_default='1')
def downgrade():
pass
|
<commit_before><commit_msg>Add server_default value to component's active flag.
This commit adds a server default value to component's
active flag in order to update all the components rows
of the existing database.
Change-Id: Idba7c7e13450378d74904e7e17903c731c9427eb<commit_after>#
# Copyright (C) 2016 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""add server_default to component active flag
Revision ID: 8a64d57a77d3
Revises: cc020d3f2290
Create Date: 2016-10-12 11:47:08.609980
"""
# revision identifiers, used by Alembic.
revision = '8a64d57a77d3'
down_revision = 'cc020d3f2290'
branch_labels = None
depends_on = None
from alembic import op
def upgrade():
op.alter_column('components', 'active', server_default='1')
def downgrade():
pass
|
|
0946ad9e528c3c4485755e26f0a3323902d15a38
|
bsopener.py
|
bsopener.py
|
from bs4 import BeautifulSoup
from urllib.request import urlopen
from urllib.request import Request
from const import constant
class BSOpener(object):
""" A wrapper arround urllib and BeautifulSoup used a helper for url requests """
# TODO: make this class a singleton
class _Const():
""" Contains the constants used in BSOpener class """
@constant
def HEADERS():
""" Headers to send with all url requests """
return {"Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"User-Agent":"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"}
def __init__(self):
self.CONST = self._Const()
def bsopen(self, url, headers=None):
if headers is None:
headers=self.CONST.HEADERS
req = Request(url=url, headers=headers)
return BeautifulSoup(urlopen(req), "html.parser")
|
Add a url opener abstraction module
|
Add a url opener abstraction module
|
Python
|
mit
|
iluxonchik/lyricist
|
Add a url opener abstraction module
|
from bs4 import BeautifulSoup
from urllib.request import urlopen
from urllib.request import Request
from const import constant
class BSOpener(object):
""" A wrapper arround urllib and BeautifulSoup used a helper for url requests """
# TODO: make this class a singleton
class _Const():
""" Contains the constants used in BSOpener class """
@constant
def HEADERS():
""" Headers to send with all url requests """
return {"Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"User-Agent":"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"}
def __init__(self):
self.CONST = self._Const()
def bsopen(self, url, headers=None):
if headers is None:
headers=self.CONST.HEADERS
req = Request(url=url, headers=headers)
return BeautifulSoup(urlopen(req), "html.parser")
|
<commit_before><commit_msg>Add a url opener abstraction module<commit_after>
|
from bs4 import BeautifulSoup
from urllib.request import urlopen
from urllib.request import Request
from const import constant
class BSOpener(object):
""" A wrapper arround urllib and BeautifulSoup used a helper for url requests """
# TODO: make this class a singleton
class _Const():
""" Contains the constants used in BSOpener class """
@constant
def HEADERS():
""" Headers to send with all url requests """
return {"Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"User-Agent":"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"}
def __init__(self):
self.CONST = self._Const()
def bsopen(self, url, headers=None):
if headers is None:
headers=self.CONST.HEADERS
req = Request(url=url, headers=headers)
return BeautifulSoup(urlopen(req), "html.parser")
|
Add a url opener abstraction modulefrom bs4 import BeautifulSoup
from urllib.request import urlopen
from urllib.request import Request
from const import constant
class BSOpener(object):
""" A wrapper arround urllib and BeautifulSoup used a helper for url requests """
# TODO: make this class a singleton
class _Const():
""" Contains the constants used in BSOpener class """
@constant
def HEADERS():
""" Headers to send with all url requests """
return {"Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"User-Agent":"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"}
def __init__(self):
self.CONST = self._Const()
def bsopen(self, url, headers=None):
if headers is None:
headers=self.CONST.HEADERS
req = Request(url=url, headers=headers)
return BeautifulSoup(urlopen(req), "html.parser")
|
<commit_before><commit_msg>Add a url opener abstraction module<commit_after>from bs4 import BeautifulSoup
from urllib.request import urlopen
from urllib.request import Request
from const import constant
class BSOpener(object):
""" A wrapper arround urllib and BeautifulSoup used a helper for url requests """
# TODO: make this class a singleton
class _Const():
""" Contains the constants used in BSOpener class """
@constant
def HEADERS():
""" Headers to send with all url requests """
return {"Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"User-Agent":"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36"}
def __init__(self):
self.CONST = self._Const()
def bsopen(self, url, headers=None):
if headers is None:
headers=self.CONST.HEADERS
req = Request(url=url, headers=headers)
return BeautifulSoup(urlopen(req), "html.parser")
|
|
777d06e9048fd67b04deadaca0b05446f4804acd
|
email_tracker/south_migrations/0003_set_content_types.py
|
email_tracker/south_migrations/0003_set_content_types.py
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
orm.TrackedEmail.objects.filter(body__contains='<html>').update(content_type='html')
def backwards(self, orm):
"Write your backwards methods here."
models = {
'email_tracker.emailcategory': {
'Meta': {'object_name': 'EmailCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
'email_tracker.trackedemail': {
'Meta': {'ordering': "('-created_at',)", 'object_name': 'TrackedEmail'},
'bcc': ('django.db.models.fields.TextField', [], {}),
'body': ('django.db.models.fields.TextField', [], {}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['email_tracker.EmailCategory']", 'null': 'True', 'blank': 'True'}),
'cc': ('django.db.models.fields.TextField', [], {}),
'content_type': ('django.db.models.fields.CharField', [], {'default': "'plain'", 'max_length': '64'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'from_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'recipients': ('django.db.models.fields.TextField', [], {}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '512'})
}
}
complete_apps = ['email_tracker']
symmetrical = True
|
Add data migration to set correct content type of previously created navs.
|
Add data migration to set correct content type of previously created navs.
|
Python
|
mit
|
IndustriaTech/django-email-tracker,MagicSolutions/django-email-tracker
|
Add data migration to set correct content type of previously created navs.
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
orm.TrackedEmail.objects.filter(body__contains='<html>').update(content_type='html')
def backwards(self, orm):
"Write your backwards methods here."
models = {
'email_tracker.emailcategory': {
'Meta': {'object_name': 'EmailCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
'email_tracker.trackedemail': {
'Meta': {'ordering': "('-created_at',)", 'object_name': 'TrackedEmail'},
'bcc': ('django.db.models.fields.TextField', [], {}),
'body': ('django.db.models.fields.TextField', [], {}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['email_tracker.EmailCategory']", 'null': 'True', 'blank': 'True'}),
'cc': ('django.db.models.fields.TextField', [], {}),
'content_type': ('django.db.models.fields.CharField', [], {'default': "'plain'", 'max_length': '64'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'from_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'recipients': ('django.db.models.fields.TextField', [], {}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '512'})
}
}
complete_apps = ['email_tracker']
symmetrical = True
|
<commit_before><commit_msg>Add data migration to set correct content type of previously created navs.<commit_after>
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
orm.TrackedEmail.objects.filter(body__contains='<html>').update(content_type='html')
def backwards(self, orm):
"Write your backwards methods here."
models = {
'email_tracker.emailcategory': {
'Meta': {'object_name': 'EmailCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
'email_tracker.trackedemail': {
'Meta': {'ordering': "('-created_at',)", 'object_name': 'TrackedEmail'},
'bcc': ('django.db.models.fields.TextField', [], {}),
'body': ('django.db.models.fields.TextField', [], {}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['email_tracker.EmailCategory']", 'null': 'True', 'blank': 'True'}),
'cc': ('django.db.models.fields.TextField', [], {}),
'content_type': ('django.db.models.fields.CharField', [], {'default': "'plain'", 'max_length': '64'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'from_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'recipients': ('django.db.models.fields.TextField', [], {}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '512'})
}
}
complete_apps = ['email_tracker']
symmetrical = True
|
Add data migration to set correct content type of previously created navs.# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
orm.TrackedEmail.objects.filter(body__contains='<html>').update(content_type='html')
def backwards(self, orm):
"Write your backwards methods here."
models = {
'email_tracker.emailcategory': {
'Meta': {'object_name': 'EmailCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
'email_tracker.trackedemail': {
'Meta': {'ordering': "('-created_at',)", 'object_name': 'TrackedEmail'},
'bcc': ('django.db.models.fields.TextField', [], {}),
'body': ('django.db.models.fields.TextField', [], {}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['email_tracker.EmailCategory']", 'null': 'True', 'blank': 'True'}),
'cc': ('django.db.models.fields.TextField', [], {}),
'content_type': ('django.db.models.fields.CharField', [], {'default': "'plain'", 'max_length': '64'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'from_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'recipients': ('django.db.models.fields.TextField', [], {}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '512'})
}
}
complete_apps = ['email_tracker']
symmetrical = True
|
<commit_before><commit_msg>Add data migration to set correct content type of previously created navs.<commit_after># -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
orm.TrackedEmail.objects.filter(body__contains='<html>').update(content_type='html')
def backwards(self, orm):
"Write your backwards methods here."
models = {
'email_tracker.emailcategory': {
'Meta': {'object_name': 'EmailCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
'email_tracker.trackedemail': {
'Meta': {'ordering': "('-created_at',)", 'object_name': 'TrackedEmail'},
'bcc': ('django.db.models.fields.TextField', [], {}),
'body': ('django.db.models.fields.TextField', [], {}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['email_tracker.EmailCategory']", 'null': 'True', 'blank': 'True'}),
'cc': ('django.db.models.fields.TextField', [], {}),
'content_type': ('django.db.models.fields.CharField', [], {'default': "'plain'", 'max_length': '64'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'from_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'recipients': ('django.db.models.fields.TextField', [], {}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '512'})
}
}
complete_apps = ['email_tracker']
symmetrical = True
|
|
bace18ec06e27d36492835fc009c2265b7ac5880
|
mla_game/apps/transcript/migrations/0018_auto_20171031_1422.py
|
mla_game/apps/transcript/migrations/0018_auto_20171031_1422.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-31 14:22
from __future__ import unicode_literals
from django.db import migrations
def create_votes(apps, schema_editor):
TranscriptPhraseVote = apps.get_model(
'transcript', 'TranscriptPhraseVote'
)
TranscriptPhraseCorrection = apps.get_model(
'transcript', 'TranscriptPhraseCorrection'
)
corrections = TranscriptPhraseCorrection.objects.filter(
not_an_error=True
)
for correction in corrections:
if TranscriptPhraseVote.objects.filter(
user=correction.user,
transcript_phrase=correction.transcript_phrase
).count() == 0:
TranscriptPhraseVote.objects.create(
transcript_phrase=correction.transcript_phrase,
user=correction.user,
upvote=True
)
class Migration(migrations.Migration):
dependencies = [
('transcript', '0017_auto_20171031_1420'),
]
operations = [
migrations.RunPython(create_votes)
]
|
Convert 'not an error' corrections to TranscriptPhraseVote objects
|
Convert 'not an error' corrections to TranscriptPhraseVote objects
|
Python
|
mit
|
WGBH/FixIt,WGBH/FixIt,WGBH/FixIt
|
Convert 'not an error' corrections to TranscriptPhraseVote objects
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-31 14:22
from __future__ import unicode_literals
from django.db import migrations
def create_votes(apps, schema_editor):
TranscriptPhraseVote = apps.get_model(
'transcript', 'TranscriptPhraseVote'
)
TranscriptPhraseCorrection = apps.get_model(
'transcript', 'TranscriptPhraseCorrection'
)
corrections = TranscriptPhraseCorrection.objects.filter(
not_an_error=True
)
for correction in corrections:
if TranscriptPhraseVote.objects.filter(
user=correction.user,
transcript_phrase=correction.transcript_phrase
).count() == 0:
TranscriptPhraseVote.objects.create(
transcript_phrase=correction.transcript_phrase,
user=correction.user,
upvote=True
)
class Migration(migrations.Migration):
dependencies = [
('transcript', '0017_auto_20171031_1420'),
]
operations = [
migrations.RunPython(create_votes)
]
|
<commit_before><commit_msg>Convert 'not an error' corrections to TranscriptPhraseVote objects<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-31 14:22
from __future__ import unicode_literals
from django.db import migrations
def create_votes(apps, schema_editor):
TranscriptPhraseVote = apps.get_model(
'transcript', 'TranscriptPhraseVote'
)
TranscriptPhraseCorrection = apps.get_model(
'transcript', 'TranscriptPhraseCorrection'
)
corrections = TranscriptPhraseCorrection.objects.filter(
not_an_error=True
)
for correction in corrections:
if TranscriptPhraseVote.objects.filter(
user=correction.user,
transcript_phrase=correction.transcript_phrase
).count() == 0:
TranscriptPhraseVote.objects.create(
transcript_phrase=correction.transcript_phrase,
user=correction.user,
upvote=True
)
class Migration(migrations.Migration):
dependencies = [
('transcript', '0017_auto_20171031_1420'),
]
operations = [
migrations.RunPython(create_votes)
]
|
Convert 'not an error' corrections to TranscriptPhraseVote objects# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-31 14:22
from __future__ import unicode_literals
from django.db import migrations
def create_votes(apps, schema_editor):
TranscriptPhraseVote = apps.get_model(
'transcript', 'TranscriptPhraseVote'
)
TranscriptPhraseCorrection = apps.get_model(
'transcript', 'TranscriptPhraseCorrection'
)
corrections = TranscriptPhraseCorrection.objects.filter(
not_an_error=True
)
for correction in corrections:
if TranscriptPhraseVote.objects.filter(
user=correction.user,
transcript_phrase=correction.transcript_phrase
).count() == 0:
TranscriptPhraseVote.objects.create(
transcript_phrase=correction.transcript_phrase,
user=correction.user,
upvote=True
)
class Migration(migrations.Migration):
dependencies = [
('transcript', '0017_auto_20171031_1420'),
]
operations = [
migrations.RunPython(create_votes)
]
|
<commit_before><commit_msg>Convert 'not an error' corrections to TranscriptPhraseVote objects<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-31 14:22
from __future__ import unicode_literals
from django.db import migrations
def create_votes(apps, schema_editor):
TranscriptPhraseVote = apps.get_model(
'transcript', 'TranscriptPhraseVote'
)
TranscriptPhraseCorrection = apps.get_model(
'transcript', 'TranscriptPhraseCorrection'
)
corrections = TranscriptPhraseCorrection.objects.filter(
not_an_error=True
)
for correction in corrections:
if TranscriptPhraseVote.objects.filter(
user=correction.user,
transcript_phrase=correction.transcript_phrase
).count() == 0:
TranscriptPhraseVote.objects.create(
transcript_phrase=correction.transcript_phrase,
user=correction.user,
upvote=True
)
class Migration(migrations.Migration):
dependencies = [
('transcript', '0017_auto_20171031_1420'),
]
operations = [
migrations.RunPython(create_votes)
]
|
|
23b2723d432fc84bb77f34d55a79b860ade3c62c
|
opentreemap/treemap/migrations/0045_add_modeling_permission.py
|
opentreemap/treemap/migrations/0045_add_modeling_permission.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-28 15:47
from __future__ import unicode_literals
from django.db import migrations
from django.db.utils import IntegrityError
# Note: to reuse this data migration when adding a new permission
# in the future, copy the code and update these three lines:
_new_permission_codename = 'modeling'
_new_permission_name = 'Can access modeling page'
_default_role_names = ['administrator']
def add_permission(apps, schema_editor):
Permission, Instance, instance_content_type, Role = _get_models(apps)
# Create new permission
try:
perm = Permission.objects.create(
codename=_new_permission_codename,
name=_new_permission_codename,
content_type=instance_content_type
)
except IntegrityError:
# The logic in the migration was previously part of a migration in a
# different Django application. If the permission already exists, we do
# not need to create it or any related instance permissions.
return
# Add new permission to specified roles in all instances
for instance in Instance.objects.all():
for role in Role.objects.filter(instance=instance):
if role.name in _default_role_names:
role.instance_permissions.add(perm)
def remove_permission(apps, schema_editor):
Permission, Instance, instance_content_type, Role = _get_models(apps)
perm = Permission.objects.get(codename=_new_permission_codename,
content_type=instance_content_type)
# Remove permission from all roles
ThroughModel = Role.instance_permissions.through
ThroughModel.objects.filter(permission_id=perm.id).delete()
# Remove permission itself
Permission.objects.filter(id=perm.id).delete()
def _get_models(apps):
Instance = apps.get_model('treemap', 'Instance')
ContentType = apps.get_model('contenttypes', 'ContentType')
instance_content_type = ContentType.objects.get_for_model(Instance)
Role = apps.get_model('treemap', 'Role')
Permission = apps.get_model('auth', 'Permission')
return Permission, Instance, instance_content_type, Role
class Migration(migrations.Migration):
dependencies = [
('treemap', '0044_hstorefield'),
]
operations = [
migrations.RunPython(add_permission, remove_permission)
]
|
Add modeling permission data migration
|
Add modeling permission data migration
Any permissions defined in code must have a supporting `Permission` row.
This migration is unusual because it contains logic that was originally created
in a different Django application. As a result, in some environments the change
has already been applied. To deal with this situation, I added a try/except
block to exit the migration early if the `Permission` row already exists.
|
Python
|
agpl-3.0
|
maurizi/otm-core,maurizi/otm-core,maurizi/otm-core,maurizi/otm-core
|
Add modeling permission data migration
Any permissions defined in code must have a supporting `Permission` row.
This migration is unusual because it contains logic that was originally created
in a different Django application. As a result, in some environments the change
has already been applied. To deal with this situation, I added a try/except
block to exit the migration early if the `Permission` row already exists.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-28 15:47
from __future__ import unicode_literals
from django.db import migrations
from django.db.utils import IntegrityError
# Note: to reuse this data migration when adding a new permission
# in the future, copy the code and update these three lines:
_new_permission_codename = 'modeling'
_new_permission_name = 'Can access modeling page'
_default_role_names = ['administrator']
def add_permission(apps, schema_editor):
Permission, Instance, instance_content_type, Role = _get_models(apps)
# Create new permission
try:
perm = Permission.objects.create(
codename=_new_permission_codename,
name=_new_permission_codename,
content_type=instance_content_type
)
except IntegrityError:
# The logic in the migration was previously part of a migration in a
# different Django application. If the permission already exists, we do
# not need to create it or any related instance permissions.
return
# Add new permission to specified roles in all instances
for instance in Instance.objects.all():
for role in Role.objects.filter(instance=instance):
if role.name in _default_role_names:
role.instance_permissions.add(perm)
def remove_permission(apps, schema_editor):
Permission, Instance, instance_content_type, Role = _get_models(apps)
perm = Permission.objects.get(codename=_new_permission_codename,
content_type=instance_content_type)
# Remove permission from all roles
ThroughModel = Role.instance_permissions.through
ThroughModel.objects.filter(permission_id=perm.id).delete()
# Remove permission itself
Permission.objects.filter(id=perm.id).delete()
def _get_models(apps):
Instance = apps.get_model('treemap', 'Instance')
ContentType = apps.get_model('contenttypes', 'ContentType')
instance_content_type = ContentType.objects.get_for_model(Instance)
Role = apps.get_model('treemap', 'Role')
Permission = apps.get_model('auth', 'Permission')
return Permission, Instance, instance_content_type, Role
class Migration(migrations.Migration):
dependencies = [
('treemap', '0044_hstorefield'),
]
operations = [
migrations.RunPython(add_permission, remove_permission)
]
|
<commit_before><commit_msg>Add modeling permission data migration
Any permissions defined in code must have a supporting `Permission` row.
This migration is unusual because it contains logic that was originally created
in a different Django application. As a result, in some environments the change
has already been applied. To deal with this situation, I added a try/except
block to exit the migration early if the `Permission` row already exists.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-28 15:47
from __future__ import unicode_literals
from django.db import migrations
from django.db.utils import IntegrityError
# Note: to reuse this data migration when adding a new permission
# in the future, copy the code and update these three lines:
_new_permission_codename = 'modeling'
_new_permission_name = 'Can access modeling page'
_default_role_names = ['administrator']
def add_permission(apps, schema_editor):
Permission, Instance, instance_content_type, Role = _get_models(apps)
# Create new permission
try:
perm = Permission.objects.create(
codename=_new_permission_codename,
name=_new_permission_codename,
content_type=instance_content_type
)
except IntegrityError:
# The logic in the migration was previously part of a migration in a
# different Django application. If the permission already exists, we do
# not need to create it or any related instance permissions.
return
# Add new permission to specified roles in all instances
for instance in Instance.objects.all():
for role in Role.objects.filter(instance=instance):
if role.name in _default_role_names:
role.instance_permissions.add(perm)
def remove_permission(apps, schema_editor):
Permission, Instance, instance_content_type, Role = _get_models(apps)
perm = Permission.objects.get(codename=_new_permission_codename,
content_type=instance_content_type)
# Remove permission from all roles
ThroughModel = Role.instance_permissions.through
ThroughModel.objects.filter(permission_id=perm.id).delete()
# Remove permission itself
Permission.objects.filter(id=perm.id).delete()
def _get_models(apps):
Instance = apps.get_model('treemap', 'Instance')
ContentType = apps.get_model('contenttypes', 'ContentType')
instance_content_type = ContentType.objects.get_for_model(Instance)
Role = apps.get_model('treemap', 'Role')
Permission = apps.get_model('auth', 'Permission')
return Permission, Instance, instance_content_type, Role
class Migration(migrations.Migration):
dependencies = [
('treemap', '0044_hstorefield'),
]
operations = [
migrations.RunPython(add_permission, remove_permission)
]
|
Add modeling permission data migration
Any permissions defined in code must have a supporting `Permission` row.
This migration is unusual because it contains logic that was originally created
in a different Django application. As a result, in some environments the change
has already been applied. To deal with this situation, I added a try/except
block to exit the migration early if the `Permission` row already exists.# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-28 15:47
from __future__ import unicode_literals
from django.db import migrations
from django.db.utils import IntegrityError
# Note: to reuse this data migration when adding a new permission
# in the future, copy the code and update these three lines:
_new_permission_codename = 'modeling'
_new_permission_name = 'Can access modeling page'
_default_role_names = ['administrator']
def add_permission(apps, schema_editor):
Permission, Instance, instance_content_type, Role = _get_models(apps)
# Create new permission
try:
perm = Permission.objects.create(
codename=_new_permission_codename,
name=_new_permission_codename,
content_type=instance_content_type
)
except IntegrityError:
# The logic in the migration was previously part of a migration in a
# different Django application. If the permission already exists, we do
# not need to create it or any related instance permissions.
return
# Add new permission to specified roles in all instances
for instance in Instance.objects.all():
for role in Role.objects.filter(instance=instance):
if role.name in _default_role_names:
role.instance_permissions.add(perm)
def remove_permission(apps, schema_editor):
Permission, Instance, instance_content_type, Role = _get_models(apps)
perm = Permission.objects.get(codename=_new_permission_codename,
content_type=instance_content_type)
# Remove permission from all roles
ThroughModel = Role.instance_permissions.through
ThroughModel.objects.filter(permission_id=perm.id).delete()
# Remove permission itself
Permission.objects.filter(id=perm.id).delete()
def _get_models(apps):
Instance = apps.get_model('treemap', 'Instance')
ContentType = apps.get_model('contenttypes', 'ContentType')
instance_content_type = ContentType.objects.get_for_model(Instance)
Role = apps.get_model('treemap', 'Role')
Permission = apps.get_model('auth', 'Permission')
return Permission, Instance, instance_content_type, Role
class Migration(migrations.Migration):
dependencies = [
('treemap', '0044_hstorefield'),
]
operations = [
migrations.RunPython(add_permission, remove_permission)
]
|
<commit_before><commit_msg>Add modeling permission data migration
Any permissions defined in code must have a supporting `Permission` row.
This migration is unusual because it contains logic that was originally created
in a different Django application. As a result, in some environments the change
has already been applied. To deal with this situation, I added a try/except
block to exit the migration early if the `Permission` row already exists.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-28 15:47
from __future__ import unicode_literals
from django.db import migrations
from django.db.utils import IntegrityError
# Note: to reuse this data migration when adding a new permission
# in the future, copy the code and update these three lines:
_new_permission_codename = 'modeling'
_new_permission_name = 'Can access modeling page'
_default_role_names = ['administrator']
def add_permission(apps, schema_editor):
Permission, Instance, instance_content_type, Role = _get_models(apps)
# Create new permission
try:
perm = Permission.objects.create(
codename=_new_permission_codename,
name=_new_permission_codename,
content_type=instance_content_type
)
except IntegrityError:
# The logic in the migration was previously part of a migration in a
# different Django application. If the permission already exists, we do
# not need to create it or any related instance permissions.
return
# Add new permission to specified roles in all instances
for instance in Instance.objects.all():
for role in Role.objects.filter(instance=instance):
if role.name in _default_role_names:
role.instance_permissions.add(perm)
def remove_permission(apps, schema_editor):
Permission, Instance, instance_content_type, Role = _get_models(apps)
perm = Permission.objects.get(codename=_new_permission_codename,
content_type=instance_content_type)
# Remove permission from all roles
ThroughModel = Role.instance_permissions.through
ThroughModel.objects.filter(permission_id=perm.id).delete()
# Remove permission itself
Permission.objects.filter(id=perm.id).delete()
def _get_models(apps):
Instance = apps.get_model('treemap', 'Instance')
ContentType = apps.get_model('contenttypes', 'ContentType')
instance_content_type = ContentType.objects.get_for_model(Instance)
Role = apps.get_model('treemap', 'Role')
Permission = apps.get_model('auth', 'Permission')
return Permission, Instance, instance_content_type, Role
class Migration(migrations.Migration):
dependencies = [
('treemap', '0044_hstorefield'),
]
operations = [
migrations.RunPython(add_permission, remove_permission)
]
|
|
93c719f11a42b68d30d8c8229ce5edb22ad0ef95
|
indra/mock_statement.py
|
indra/mock_statement.py
|
class MockStatement(object):
def __init__(evidence, mk_hash, supports):
self.evidence = evidence
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key():
return self.__mk_hash
class MockEvidence(object):
def __init__(source_api, selected_annotations):
self.source_api = source_api
# For
# biopax: 'source_sub_id'
# reach: 'found_by'
# geneways: 'actiontype'
self.annotations = selected_annotations
|
Add classes to imitate Statements and Evidence in belief engine.
|
Add classes to imitate Statements and Evidence in belief engine.
|
Python
|
bsd-2-clause
|
johnbachman/belpy,sorgerlab/indra,bgyori/indra,sorgerlab/indra,bgyori/indra,sorgerlab/belpy,johnbachman/indra,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,johnbachman/belpy,johnbachman/belpy,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/belpy,bgyori/indra,pvtodorov/indra
|
Add classes to imitate Statements and Evidence in belief engine.
|
class MockStatement(object):
def __init__(evidence, mk_hash, supports):
self.evidence = evidence
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key():
return self.__mk_hash
class MockEvidence(object):
def __init__(source_api, selected_annotations):
self.source_api = source_api
# For
# biopax: 'source_sub_id'
# reach: 'found_by'
# geneways: 'actiontype'
self.annotations = selected_annotations
|
<commit_before><commit_msg>Add classes to imitate Statements and Evidence in belief engine.<commit_after>
|
class MockStatement(object):
def __init__(evidence, mk_hash, supports):
self.evidence = evidence
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key():
return self.__mk_hash
class MockEvidence(object):
def __init__(source_api, selected_annotations):
self.source_api = source_api
# For
# biopax: 'source_sub_id'
# reach: 'found_by'
# geneways: 'actiontype'
self.annotations = selected_annotations
|
Add classes to imitate Statements and Evidence in belief engine.class MockStatement(object):
def __init__(evidence, mk_hash, supports):
self.evidence = evidence
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key():
return self.__mk_hash
class MockEvidence(object):
def __init__(source_api, selected_annotations):
self.source_api = source_api
# For
# biopax: 'source_sub_id'
# reach: 'found_by'
# geneways: 'actiontype'
self.annotations = selected_annotations
|
<commit_before><commit_msg>Add classes to imitate Statements and Evidence in belief engine.<commit_after>class MockStatement(object):
def __init__(evidence, mk_hash, supports):
self.evidence = evidence
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key():
return self.__mk_hash
class MockEvidence(object):
def __init__(source_api, selected_annotations):
self.source_api = source_api
# For
# biopax: 'source_sub_id'
# reach: 'found_by'
# geneways: 'actiontype'
self.annotations = selected_annotations
|
|
7dc02947b33d0319bf849e30ea6c3fac28c78c9a
|
blanc_basic_pages/context_processors.py
|
blanc_basic_pages/context_processors.py
|
from django.utils.functional import SimpleLazyObject
from django.contrib.flatpages.models import FlatPage
from django.conf import settings
from django.utils.safestring import mark_safe
def flatpage(request):
def get_flatpage():
url = request.path_info
# This has no chance of working
if not url.endswith('/') and settings.APPEND_SLASH:
return ''
if not url.startswith('/'):
url = '/' + url
try:
f = FlatPage.objects.get(url=url)
f.title = mark_safe(f.title)
f.content = mark_safe(f.content)
return f
except FlatPage.DoesNotExist:
return ''
return {
'lazy_flatpage': SimpleLazyObject(get_flatpage),
}
|
Add the lazy_flatpage context processor
|
Add the lazy_flatpage context processor
Allows any non-flatpage to access a flatpage of the same request URL with lazy_flatpage
|
Python
|
bsd-3-clause
|
blancltd/blanc-basic-pages
|
Add the lazy_flatpage context processor
Allows any non-flatpage to access a flatpage of the same request URL with lazy_flatpage
|
from django.utils.functional import SimpleLazyObject
from django.contrib.flatpages.models import FlatPage
from django.conf import settings
from django.utils.safestring import mark_safe
def flatpage(request):
def get_flatpage():
url = request.path_info
# This has no chance of working
if not url.endswith('/') and settings.APPEND_SLASH:
return ''
if not url.startswith('/'):
url = '/' + url
try:
f = FlatPage.objects.get(url=url)
f.title = mark_safe(f.title)
f.content = mark_safe(f.content)
return f
except FlatPage.DoesNotExist:
return ''
return {
'lazy_flatpage': SimpleLazyObject(get_flatpage),
}
|
<commit_before><commit_msg>Add the lazy_flatpage context processor
Allows any non-flatpage to access a flatpage of the same request URL with lazy_flatpage<commit_after>
|
from django.utils.functional import SimpleLazyObject
from django.contrib.flatpages.models import FlatPage
from django.conf import settings
from django.utils.safestring import mark_safe
def flatpage(request):
def get_flatpage():
url = request.path_info
# This has no chance of working
if not url.endswith('/') and settings.APPEND_SLASH:
return ''
if not url.startswith('/'):
url = '/' + url
try:
f = FlatPage.objects.get(url=url)
f.title = mark_safe(f.title)
f.content = mark_safe(f.content)
return f
except FlatPage.DoesNotExist:
return ''
return {
'lazy_flatpage': SimpleLazyObject(get_flatpage),
}
|
Add the lazy_flatpage context processor
Allows any non-flatpage to access a flatpage of the same request URL with lazy_flatpagefrom django.utils.functional import SimpleLazyObject
from django.contrib.flatpages.models import FlatPage
from django.conf import settings
from django.utils.safestring import mark_safe
def flatpage(request):
def get_flatpage():
url = request.path_info
# This has no chance of working
if not url.endswith('/') and settings.APPEND_SLASH:
return ''
if not url.startswith('/'):
url = '/' + url
try:
f = FlatPage.objects.get(url=url)
f.title = mark_safe(f.title)
f.content = mark_safe(f.content)
return f
except FlatPage.DoesNotExist:
return ''
return {
'lazy_flatpage': SimpleLazyObject(get_flatpage),
}
|
<commit_before><commit_msg>Add the lazy_flatpage context processor
Allows any non-flatpage to access a flatpage of the same request URL with lazy_flatpage<commit_after>from django.utils.functional import SimpleLazyObject
from django.contrib.flatpages.models import FlatPage
from django.conf import settings
from django.utils.safestring import mark_safe
def flatpage(request):
def get_flatpage():
url = request.path_info
# This has no chance of working
if not url.endswith('/') and settings.APPEND_SLASH:
return ''
if not url.startswith('/'):
url = '/' + url
try:
f = FlatPage.objects.get(url=url)
f.title = mark_safe(f.title)
f.content = mark_safe(f.content)
return f
except FlatPage.DoesNotExist:
return ''
return {
'lazy_flatpage': SimpleLazyObject(get_flatpage),
}
|
|
64216fcc89b251a64a4ceda5da02ccff5285f548
|
octohatrack_graphql.py
|
octohatrack_graphql.py
|
#!/usr/bin/env python
"""
Quick implementation of octhatrack with GraphQL
USAGE
./octohatrack_graphql.py user/repo
LIMITATIONS
Limitations in the github graphql api means that this will only return the:
- last 100 issues
- last 100 comments per issue
- last 100 pull requests
- last 100 comments per pull request
- last 100 commit comments
"""
import requests
import json
import os
import click
GITHUB_API = "https://api.github.com/graphql"
GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN")
HEADERS = {"Authorization": "token %s" % GITHUB_TOKEN}
graphql_query = """
query ($owner: String!, $name: String!, $history: Int!) {
repository(owner: $owner, name: $name) {
issues(last:$history) {
nodes {
author { login avatarUrl }
comments (last:$history){ nodes {author {login avatarUrl}}}
}
}
pullRequests(last: $history) {
edges { node {
author { avatarUrl login }
comments (last:$history){ nodes {author {login avatarUrl}}}
}}
}
commitComments(last: $history) {
edges { node { author { login avatarUrl }}}
}
}
}
"""
def reducejson(j):
"""
Not sure if there's a better way to walk the ... interesting result
"""
authors = []
for key in j["data"]["repository"]["commitComments"]["edges"]:
authors.append(key["node"]["author"])
for key in j["data"]["repository"]["issues"]["nodes"]:
authors.append(key["author"])
for c in key["comments"]["nodes"]:
authors.append(c["author"])
for key in j["data"]["repository"]["pullRequests"]["edges"]:
authors.append(key["node"]["author"])
for c in key["node"]["comments"]["nodes"]:
authors.append(c["author"])
unique = list({v['login']:v for v in authors}.values())
return unique
@click.command()
@click.argument('repo')
def main(repo):
owner, name = repo.split("/")
variables = { "owner": owner, "name": name, "history":100}
result = requests.post(GITHUB_API, json.dumps({"query": graphql_query, "variables": variables}), headers=HEADERS)
authors = reducejson(result.json())
for a in authors:
print(a)
print(len(authors))
if __name__ == '__main__':
main()
|
Add proof of concept graphql implementation
|
Add proof of concept graphql implementation
|
Python
|
bsd-3-clause
|
glasnt/octohat,LABHR/octohatrack
|
Add proof of concept graphql implementation
|
#!/usr/bin/env python
"""
Quick implementation of octhatrack with GraphQL
USAGE
./octohatrack_graphql.py user/repo
LIMITATIONS
Limitations in the github graphql api means that this will only return the:
- last 100 issues
- last 100 comments per issue
- last 100 pull requests
- last 100 comments per pull request
- last 100 commit comments
"""
import requests
import json
import os
import click
GITHUB_API = "https://api.github.com/graphql"
GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN")
HEADERS = {"Authorization": "token %s" % GITHUB_TOKEN}
graphql_query = """
query ($owner: String!, $name: String!, $history: Int!) {
repository(owner: $owner, name: $name) {
issues(last:$history) {
nodes {
author { login avatarUrl }
comments (last:$history){ nodes {author {login avatarUrl}}}
}
}
pullRequests(last: $history) {
edges { node {
author { avatarUrl login }
comments (last:$history){ nodes {author {login avatarUrl}}}
}}
}
commitComments(last: $history) {
edges { node { author { login avatarUrl }}}
}
}
}
"""
def reducejson(j):
"""
Not sure if there's a better way to walk the ... interesting result
"""
authors = []
for key in j["data"]["repository"]["commitComments"]["edges"]:
authors.append(key["node"]["author"])
for key in j["data"]["repository"]["issues"]["nodes"]:
authors.append(key["author"])
for c in key["comments"]["nodes"]:
authors.append(c["author"])
for key in j["data"]["repository"]["pullRequests"]["edges"]:
authors.append(key["node"]["author"])
for c in key["node"]["comments"]["nodes"]:
authors.append(c["author"])
unique = list({v['login']:v for v in authors}.values())
return unique
@click.command()
@click.argument('repo')
def main(repo):
owner, name = repo.split("/")
variables = { "owner": owner, "name": name, "history":100}
result = requests.post(GITHUB_API, json.dumps({"query": graphql_query, "variables": variables}), headers=HEADERS)
authors = reducejson(result.json())
for a in authors:
print(a)
print(len(authors))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add proof of concept graphql implementation<commit_after>
|
#!/usr/bin/env python
"""
Quick implementation of octhatrack with GraphQL
USAGE
./octohatrack_graphql.py user/repo
LIMITATIONS
Limitations in the github graphql api means that this will only return the:
- last 100 issues
- last 100 comments per issue
- last 100 pull requests
- last 100 comments per pull request
- last 100 commit comments
"""
import requests
import json
import os
import click
GITHUB_API = "https://api.github.com/graphql"
GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN")
HEADERS = {"Authorization": "token %s" % GITHUB_TOKEN}
graphql_query = """
query ($owner: String!, $name: String!, $history: Int!) {
repository(owner: $owner, name: $name) {
issues(last:$history) {
nodes {
author { login avatarUrl }
comments (last:$history){ nodes {author {login avatarUrl}}}
}
}
pullRequests(last: $history) {
edges { node {
author { avatarUrl login }
comments (last:$history){ nodes {author {login avatarUrl}}}
}}
}
commitComments(last: $history) {
edges { node { author { login avatarUrl }}}
}
}
}
"""
def reducejson(j):
"""
Not sure if there's a better way to walk the ... interesting result
"""
authors = []
for key in j["data"]["repository"]["commitComments"]["edges"]:
authors.append(key["node"]["author"])
for key in j["data"]["repository"]["issues"]["nodes"]:
authors.append(key["author"])
for c in key["comments"]["nodes"]:
authors.append(c["author"])
for key in j["data"]["repository"]["pullRequests"]["edges"]:
authors.append(key["node"]["author"])
for c in key["node"]["comments"]["nodes"]:
authors.append(c["author"])
unique = list({v['login']:v for v in authors}.values())
return unique
@click.command()
@click.argument('repo')
def main(repo):
owner, name = repo.split("/")
variables = { "owner": owner, "name": name, "history":100}
result = requests.post(GITHUB_API, json.dumps({"query": graphql_query, "variables": variables}), headers=HEADERS)
authors = reducejson(result.json())
for a in authors:
print(a)
print(len(authors))
if __name__ == '__main__':
main()
|
Add proof of concept graphql implementation#!/usr/bin/env python
"""
Quick implementation of octhatrack with GraphQL
USAGE
./octohatrack_graphql.py user/repo
LIMITATIONS
Limitations in the github graphql api means that this will only return the:
- last 100 issues
- last 100 comments per issue
- last 100 pull requests
- last 100 comments per pull request
- last 100 commit comments
"""
import requests
import json
import os
import click
GITHUB_API = "https://api.github.com/graphql"
GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN")
HEADERS = {"Authorization": "token %s" % GITHUB_TOKEN}
graphql_query = """
query ($owner: String!, $name: String!, $history: Int!) {
repository(owner: $owner, name: $name) {
issues(last:$history) {
nodes {
author { login avatarUrl }
comments (last:$history){ nodes {author {login avatarUrl}}}
}
}
pullRequests(last: $history) {
edges { node {
author { avatarUrl login }
comments (last:$history){ nodes {author {login avatarUrl}}}
}}
}
commitComments(last: $history) {
edges { node { author { login avatarUrl }}}
}
}
}
"""
def reducejson(j):
"""
Not sure if there's a better way to walk the ... interesting result
"""
authors = []
for key in j["data"]["repository"]["commitComments"]["edges"]:
authors.append(key["node"]["author"])
for key in j["data"]["repository"]["issues"]["nodes"]:
authors.append(key["author"])
for c in key["comments"]["nodes"]:
authors.append(c["author"])
for key in j["data"]["repository"]["pullRequests"]["edges"]:
authors.append(key["node"]["author"])
for c in key["node"]["comments"]["nodes"]:
authors.append(c["author"])
unique = list({v['login']:v for v in authors}.values())
return unique
@click.command()
@click.argument('repo')
def main(repo):
owner, name = repo.split("/")
variables = { "owner": owner, "name": name, "history":100}
result = requests.post(GITHUB_API, json.dumps({"query": graphql_query, "variables": variables}), headers=HEADERS)
authors = reducejson(result.json())
for a in authors:
print(a)
print(len(authors))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add proof of concept graphql implementation<commit_after>#!/usr/bin/env python
"""
Quick implementation of octhatrack with GraphQL
USAGE
./octohatrack_graphql.py user/repo
LIMITATIONS
Limitations in the github graphql api means that this will only return the:
- last 100 issues
- last 100 comments per issue
- last 100 pull requests
- last 100 comments per pull request
- last 100 commit comments
"""
import requests
import json
import os
import click
GITHUB_API = "https://api.github.com/graphql"
GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN")
HEADERS = {"Authorization": "token %s" % GITHUB_TOKEN}
graphql_query = """
query ($owner: String!, $name: String!, $history: Int!) {
repository(owner: $owner, name: $name) {
issues(last:$history) {
nodes {
author { login avatarUrl }
comments (last:$history){ nodes {author {login avatarUrl}}}
}
}
pullRequests(last: $history) {
edges { node {
author { avatarUrl login }
comments (last:$history){ nodes {author {login avatarUrl}}}
}}
}
commitComments(last: $history) {
edges { node { author { login avatarUrl }}}
}
}
}
"""
def reducejson(j):
"""
Not sure if there's a better way to walk the ... interesting result
"""
authors = []
for key in j["data"]["repository"]["commitComments"]["edges"]:
authors.append(key["node"]["author"])
for key in j["data"]["repository"]["issues"]["nodes"]:
authors.append(key["author"])
for c in key["comments"]["nodes"]:
authors.append(c["author"])
for key in j["data"]["repository"]["pullRequests"]["edges"]:
authors.append(key["node"]["author"])
for c in key["node"]["comments"]["nodes"]:
authors.append(c["author"])
unique = list({v['login']:v for v in authors}.values())
return unique
@click.command()
@click.argument('repo')
def main(repo):
owner, name = repo.split("/")
variables = { "owner": owner, "name": name, "history":100}
result = requests.post(GITHUB_API, json.dumps({"query": graphql_query, "variables": variables}), headers=HEADERS)
authors = reducejson(result.json())
for a in authors:
print(a)
print(len(authors))
if __name__ == '__main__':
main()
|
|
ef0dd3851f001ebc44484a10d58e6f3f7c5671e1
|
color_names.py
|
color_names.py
|
colors_dict = dict(
ENDC = '\033[0m',
RED = '\033[31m',
GREEN = '\033[32m',
GOLD = '\033[33m',
BLUE = '\033[34m',
PURPLE = '\033[35m',
LIGHTBLUE = '\033[36m',
GRAY = '\033[37m',
RED_BG = '\033[41m',
GREEN_BG = '\033[42m',
GOLD_BG = '\033[43m',
BLUE_BG = '\033[44m',
PURPLE_BG = '\033[45m',
LIGHTBLUE_BG = '\033[46m',
GRAY_BG = '\033[47m',
DARKGRAY_SOFT = '\033[90m',
RED_SOFT = '\033[91m',
GREEN_SOFT = '\033[92m',
GOLD_SOFT = '\033[93m',
BLUE_SOFT = '\033[94m',
PURPLE_SOFT = '\033[95m',
LIGHTBLUE_SOFT = '\033[96m',
GRAY_SOFT = '\033[97m',
DARKGRAY_SOFT_BG = '\033[100m',
RED_SOFT_BG = '\033[101m',
GREEN_SOFT_BG = '\033[102m',
GOLD_SOFT_BG = '\033[103m',
BLUE_SOFT_BG = '\033[104m',
PURPLE_SOFT_BG = '\033[105m',
LIGHTBLUE_SOFT_BG = '\033[106m',
GRAY_SOFT_BG = '\033[107m',
HEADER = '\033[95m',
OKBLUE = '\033[94m',
OKGREEN = '\033[92m',
WARNING = '\033[93m',
FAIL = '\033[91m',
BOLD = '\033[1m',
UNDERLINE = '\033[4m',
CROSSOUT = '\033[28m',
GRYHIGHLT = '\033[7m',
BLACKBLACK = '\033[30m')
|
Add initial version of code.
|
Add initial version of code.
|
Python
|
mit
|
rec/josh
|
Add initial version of code.
|
colors_dict = dict(
ENDC = '\033[0m',
RED = '\033[31m',
GREEN = '\033[32m',
GOLD = '\033[33m',
BLUE = '\033[34m',
PURPLE = '\033[35m',
LIGHTBLUE = '\033[36m',
GRAY = '\033[37m',
RED_BG = '\033[41m',
GREEN_BG = '\033[42m',
GOLD_BG = '\033[43m',
BLUE_BG = '\033[44m',
PURPLE_BG = '\033[45m',
LIGHTBLUE_BG = '\033[46m',
GRAY_BG = '\033[47m',
DARKGRAY_SOFT = '\033[90m',
RED_SOFT = '\033[91m',
GREEN_SOFT = '\033[92m',
GOLD_SOFT = '\033[93m',
BLUE_SOFT = '\033[94m',
PURPLE_SOFT = '\033[95m',
LIGHTBLUE_SOFT = '\033[96m',
GRAY_SOFT = '\033[97m',
DARKGRAY_SOFT_BG = '\033[100m',
RED_SOFT_BG = '\033[101m',
GREEN_SOFT_BG = '\033[102m',
GOLD_SOFT_BG = '\033[103m',
BLUE_SOFT_BG = '\033[104m',
PURPLE_SOFT_BG = '\033[105m',
LIGHTBLUE_SOFT_BG = '\033[106m',
GRAY_SOFT_BG = '\033[107m',
HEADER = '\033[95m',
OKBLUE = '\033[94m',
OKGREEN = '\033[92m',
WARNING = '\033[93m',
FAIL = '\033[91m',
BOLD = '\033[1m',
UNDERLINE = '\033[4m',
CROSSOUT = '\033[28m',
GRYHIGHLT = '\033[7m',
BLACKBLACK = '\033[30m')
|
<commit_before><commit_msg>Add initial version of code.<commit_after>
|
colors_dict = dict(
ENDC = '\033[0m',
RED = '\033[31m',
GREEN = '\033[32m',
GOLD = '\033[33m',
BLUE = '\033[34m',
PURPLE = '\033[35m',
LIGHTBLUE = '\033[36m',
GRAY = '\033[37m',
RED_BG = '\033[41m',
GREEN_BG = '\033[42m',
GOLD_BG = '\033[43m',
BLUE_BG = '\033[44m',
PURPLE_BG = '\033[45m',
LIGHTBLUE_BG = '\033[46m',
GRAY_BG = '\033[47m',
DARKGRAY_SOFT = '\033[90m',
RED_SOFT = '\033[91m',
GREEN_SOFT = '\033[92m',
GOLD_SOFT = '\033[93m',
BLUE_SOFT = '\033[94m',
PURPLE_SOFT = '\033[95m',
LIGHTBLUE_SOFT = '\033[96m',
GRAY_SOFT = '\033[97m',
DARKGRAY_SOFT_BG = '\033[100m',
RED_SOFT_BG = '\033[101m',
GREEN_SOFT_BG = '\033[102m',
GOLD_SOFT_BG = '\033[103m',
BLUE_SOFT_BG = '\033[104m',
PURPLE_SOFT_BG = '\033[105m',
LIGHTBLUE_SOFT_BG = '\033[106m',
GRAY_SOFT_BG = '\033[107m',
HEADER = '\033[95m',
OKBLUE = '\033[94m',
OKGREEN = '\033[92m',
WARNING = '\033[93m',
FAIL = '\033[91m',
BOLD = '\033[1m',
UNDERLINE = '\033[4m',
CROSSOUT = '\033[28m',
GRYHIGHLT = '\033[7m',
BLACKBLACK = '\033[30m')
|
Add initial version of code.colors_dict = dict(
ENDC = '\033[0m',
RED = '\033[31m',
GREEN = '\033[32m',
GOLD = '\033[33m',
BLUE = '\033[34m',
PURPLE = '\033[35m',
LIGHTBLUE = '\033[36m',
GRAY = '\033[37m',
RED_BG = '\033[41m',
GREEN_BG = '\033[42m',
GOLD_BG = '\033[43m',
BLUE_BG = '\033[44m',
PURPLE_BG = '\033[45m',
LIGHTBLUE_BG = '\033[46m',
GRAY_BG = '\033[47m',
DARKGRAY_SOFT = '\033[90m',
RED_SOFT = '\033[91m',
GREEN_SOFT = '\033[92m',
GOLD_SOFT = '\033[93m',
BLUE_SOFT = '\033[94m',
PURPLE_SOFT = '\033[95m',
LIGHTBLUE_SOFT = '\033[96m',
GRAY_SOFT = '\033[97m',
DARKGRAY_SOFT_BG = '\033[100m',
RED_SOFT_BG = '\033[101m',
GREEN_SOFT_BG = '\033[102m',
GOLD_SOFT_BG = '\033[103m',
BLUE_SOFT_BG = '\033[104m',
PURPLE_SOFT_BG = '\033[105m',
LIGHTBLUE_SOFT_BG = '\033[106m',
GRAY_SOFT_BG = '\033[107m',
HEADER = '\033[95m',
OKBLUE = '\033[94m',
OKGREEN = '\033[92m',
WARNING = '\033[93m',
FAIL = '\033[91m',
BOLD = '\033[1m',
UNDERLINE = '\033[4m',
CROSSOUT = '\033[28m',
GRYHIGHLT = '\033[7m',
BLACKBLACK = '\033[30m')
|
<commit_before><commit_msg>Add initial version of code.<commit_after>colors_dict = dict(
ENDC = '\033[0m',
RED = '\033[31m',
GREEN = '\033[32m',
GOLD = '\033[33m',
BLUE = '\033[34m',
PURPLE = '\033[35m',
LIGHTBLUE = '\033[36m',
GRAY = '\033[37m',
RED_BG = '\033[41m',
GREEN_BG = '\033[42m',
GOLD_BG = '\033[43m',
BLUE_BG = '\033[44m',
PURPLE_BG = '\033[45m',
LIGHTBLUE_BG = '\033[46m',
GRAY_BG = '\033[47m',
DARKGRAY_SOFT = '\033[90m',
RED_SOFT = '\033[91m',
GREEN_SOFT = '\033[92m',
GOLD_SOFT = '\033[93m',
BLUE_SOFT = '\033[94m',
PURPLE_SOFT = '\033[95m',
LIGHTBLUE_SOFT = '\033[96m',
GRAY_SOFT = '\033[97m',
DARKGRAY_SOFT_BG = '\033[100m',
RED_SOFT_BG = '\033[101m',
GREEN_SOFT_BG = '\033[102m',
GOLD_SOFT_BG = '\033[103m',
BLUE_SOFT_BG = '\033[104m',
PURPLE_SOFT_BG = '\033[105m',
LIGHTBLUE_SOFT_BG = '\033[106m',
GRAY_SOFT_BG = '\033[107m',
HEADER = '\033[95m',
OKBLUE = '\033[94m',
OKGREEN = '\033[92m',
WARNING = '\033[93m',
FAIL = '\033[91m',
BOLD = '\033[1m',
UNDERLINE = '\033[4m',
CROSSOUT = '\033[28m',
GRYHIGHLT = '\033[7m',
BLACKBLACK = '\033[30m')
|
|
4052fee38c8616177fa96e35e3a1975211dda7ba
|
regparser/web/jobs/migrations/0010_auto_20160921_2322.py
|
regparser/web/jobs/migrations/0010_auto_20160921_2322.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('jobs', '0009_auto_20160824_2347'),
]
operations = [
migrations.AlterField(
model_name='pipelinejob',
name='status',
field=models.CharField(default=b'received', max_length=32, choices=[(b'complete', b'complete'), (b'complete_with_errors', b'complete_with_errors'), (b'failed', b'failed'), (b'in_progress', b'in_progress'), (b'received', b'received')]),
),
migrations.AlterField(
model_name='proposalpipelinejob',
name='status',
field=models.CharField(default=b'received', max_length=32, choices=[(b'complete', b'complete'), (b'complete_with_errors', b'complete_with_errors'), (b'failed', b'failed'), (b'in_progress', b'in_progress'), (b'received', b'received')]),
),
]
|
Add missing Web API migration file.
|
Add missing Web API migration file.
|
Python
|
cc0-1.0
|
eregs/regulations-parser,tadhg-ohiggins/regulations-parser,tadhg-ohiggins/regulations-parser,eregs/regulations-parser
|
Add missing Web API migration file.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('jobs', '0009_auto_20160824_2347'),
]
operations = [
migrations.AlterField(
model_name='pipelinejob',
name='status',
field=models.CharField(default=b'received', max_length=32, choices=[(b'complete', b'complete'), (b'complete_with_errors', b'complete_with_errors'), (b'failed', b'failed'), (b'in_progress', b'in_progress'), (b'received', b'received')]),
),
migrations.AlterField(
model_name='proposalpipelinejob',
name='status',
field=models.CharField(default=b'received', max_length=32, choices=[(b'complete', b'complete'), (b'complete_with_errors', b'complete_with_errors'), (b'failed', b'failed'), (b'in_progress', b'in_progress'), (b'received', b'received')]),
),
]
|
<commit_before><commit_msg>Add missing Web API migration file.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('jobs', '0009_auto_20160824_2347'),
]
operations = [
migrations.AlterField(
model_name='pipelinejob',
name='status',
field=models.CharField(default=b'received', max_length=32, choices=[(b'complete', b'complete'), (b'complete_with_errors', b'complete_with_errors'), (b'failed', b'failed'), (b'in_progress', b'in_progress'), (b'received', b'received')]),
),
migrations.AlterField(
model_name='proposalpipelinejob',
name='status',
field=models.CharField(default=b'received', max_length=32, choices=[(b'complete', b'complete'), (b'complete_with_errors', b'complete_with_errors'), (b'failed', b'failed'), (b'in_progress', b'in_progress'), (b'received', b'received')]),
),
]
|
Add missing Web API migration file.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('jobs', '0009_auto_20160824_2347'),
]
operations = [
migrations.AlterField(
model_name='pipelinejob',
name='status',
field=models.CharField(default=b'received', max_length=32, choices=[(b'complete', b'complete'), (b'complete_with_errors', b'complete_with_errors'), (b'failed', b'failed'), (b'in_progress', b'in_progress'), (b'received', b'received')]),
),
migrations.AlterField(
model_name='proposalpipelinejob',
name='status',
field=models.CharField(default=b'received', max_length=32, choices=[(b'complete', b'complete'), (b'complete_with_errors', b'complete_with_errors'), (b'failed', b'failed'), (b'in_progress', b'in_progress'), (b'received', b'received')]),
),
]
|
<commit_before><commit_msg>Add missing Web API migration file.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('jobs', '0009_auto_20160824_2347'),
]
operations = [
migrations.AlterField(
model_name='pipelinejob',
name='status',
field=models.CharField(default=b'received', max_length=32, choices=[(b'complete', b'complete'), (b'complete_with_errors', b'complete_with_errors'), (b'failed', b'failed'), (b'in_progress', b'in_progress'), (b'received', b'received')]),
),
migrations.AlterField(
model_name='proposalpipelinejob',
name='status',
field=models.CharField(default=b'received', max_length=32, choices=[(b'complete', b'complete'), (b'complete_with_errors', b'complete_with_errors'), (b'failed', b'failed'), (b'in_progress', b'in_progress'), (b'received', b'received')]),
),
]
|
|
5bf040c84bb5fddceba0324b409514e8c80e19eb
|
conda/python-scons/conda.py
|
conda/python-scons/conda.py
|
from distutils.version import StrictVersion
from distutils.msvccompiler import get_build_version
from sys import maxsize
from SCons.Script import AddOption, GetOption
from path import path
import subprocess
def generate(env):
"""Add Builders and construction variables to the Environment."""
if not 'conda' in env['TOOLS'][:-1]:
env.Tool('system')
SYSTEM = env['SYSTEM']
def Conda(sources=[], channels=['statiskit', 'conda-forge']):
if len(sources) == 0:
if SYSTEM == 'win':
sources = [recipe for recipe in path(env.Dir('.').srcnode().abspath).walkdirs() if (recipe/'meta.yaml').exists() and (recipe/'bld.bat').exists()]
else:
sources = [recipe for recipe in path(env.Dir('.').srcnode().abspath).walkdirs() if (recipe/'meta.yaml').exists() and (recipe/'build.sh').exists()]
for source in sources
targets = []
if SYSTEM == 'win':
env['SHLIBSUFFIX'] = '.pyd'
env['TARGET_ARCH'] = 'x86_64' if ARCH == '64' else 'x86'
AddOption('--msvc-version',
dest = 'msvc-version',
type = 'string',
nargs = 1,
action = 'store',
help = 'MSVC version',
default = str(get_build_version()))
env['MSVC_VERSION'] = GetOption('msvc-version')
env.Tool('default')
env.Tool('prefix')
if SYSTEM == 'win':
env.AppendUnique(CCFLAGS=['/O2',
'/Ob2',
'/MD',
'/GR',
'/EHsc',
'/Gy',
'/GF',
'/GA'],
CPPDEFINES=['WIN32',
'UNICODE'])
env.PrependUnique(CPPPATH=['$PREFIX\include'])
env.PrependUnique(LIBPATH=['$PREFIX\lib',
'$PREFIX\..\libs'])
else:
env.PrependUnique(CPPPATH=['$PREFIX/include'],
LIBPATH=['$PREFIX/lib'],
CFLAGS=["-x", "c", "-std=c11"],
CXXFLAGS=["-x", "c++", "-std=c++11"])
if ARCH == '32':
env.AppendUnique(CCFLAGS=['-m32'])
if SYSTEM == 'osx':
env.AppendUnique(CCFLAGS=['-ferror-limit=0'],
CXXFLAGS=['-stdlib=libc++'])
else:
env.AppendUnique(CCFLAGS=['-fmax-errors=0',
'-Wl,--no-undefined',
'-fvisibility=hidden'],
CPPDEFINES=['_GLIBCXX_USE_CXX11_ABI=1'])
def exists(env):
return 1
|
Add Conda Tool for SCons
|
Add Conda Tool for SCons
|
Python
|
apache-2.0
|
StatisKit/StatisKit,StatisKit/StatisKit
|
Add Conda Tool for SCons
|
from distutils.version import StrictVersion
from distutils.msvccompiler import get_build_version
from sys import maxsize
from SCons.Script import AddOption, GetOption
from path import path
import subprocess
def generate(env):
"""Add Builders and construction variables to the Environment."""
if not 'conda' in env['TOOLS'][:-1]:
env.Tool('system')
SYSTEM = env['SYSTEM']
def Conda(sources=[], channels=['statiskit', 'conda-forge']):
if len(sources) == 0:
if SYSTEM == 'win':
sources = [recipe for recipe in path(env.Dir('.').srcnode().abspath).walkdirs() if (recipe/'meta.yaml').exists() and (recipe/'bld.bat').exists()]
else:
sources = [recipe for recipe in path(env.Dir('.').srcnode().abspath).walkdirs() if (recipe/'meta.yaml').exists() and (recipe/'build.sh').exists()]
for source in sources
targets = []
if SYSTEM == 'win':
env['SHLIBSUFFIX'] = '.pyd'
env['TARGET_ARCH'] = 'x86_64' if ARCH == '64' else 'x86'
AddOption('--msvc-version',
dest = 'msvc-version',
type = 'string',
nargs = 1,
action = 'store',
help = 'MSVC version',
default = str(get_build_version()))
env['MSVC_VERSION'] = GetOption('msvc-version')
env.Tool('default')
env.Tool('prefix')
if SYSTEM == 'win':
env.AppendUnique(CCFLAGS=['/O2',
'/Ob2',
'/MD',
'/GR',
'/EHsc',
'/Gy',
'/GF',
'/GA'],
CPPDEFINES=['WIN32',
'UNICODE'])
env.PrependUnique(CPPPATH=['$PREFIX\include'])
env.PrependUnique(LIBPATH=['$PREFIX\lib',
'$PREFIX\..\libs'])
else:
env.PrependUnique(CPPPATH=['$PREFIX/include'],
LIBPATH=['$PREFIX/lib'],
CFLAGS=["-x", "c", "-std=c11"],
CXXFLAGS=["-x", "c++", "-std=c++11"])
if ARCH == '32':
env.AppendUnique(CCFLAGS=['-m32'])
if SYSTEM == 'osx':
env.AppendUnique(CCFLAGS=['-ferror-limit=0'],
CXXFLAGS=['-stdlib=libc++'])
else:
env.AppendUnique(CCFLAGS=['-fmax-errors=0',
'-Wl,--no-undefined',
'-fvisibility=hidden'],
CPPDEFINES=['_GLIBCXX_USE_CXX11_ABI=1'])
def exists(env):
return 1
|
<commit_before><commit_msg>Add Conda Tool for SCons<commit_after>
|
from distutils.version import StrictVersion
from distutils.msvccompiler import get_build_version
from sys import maxsize
from SCons.Script import AddOption, GetOption
from path import path
import subprocess
def generate(env):
"""Add Builders and construction variables to the Environment."""
if not 'conda' in env['TOOLS'][:-1]:
env.Tool('system')
SYSTEM = env['SYSTEM']
def Conda(sources=[], channels=['statiskit', 'conda-forge']):
if len(sources) == 0:
if SYSTEM == 'win':
sources = [recipe for recipe in path(env.Dir('.').srcnode().abspath).walkdirs() if (recipe/'meta.yaml').exists() and (recipe/'bld.bat').exists()]
else:
sources = [recipe for recipe in path(env.Dir('.').srcnode().abspath).walkdirs() if (recipe/'meta.yaml').exists() and (recipe/'build.sh').exists()]
for source in sources
targets = []
if SYSTEM == 'win':
env['SHLIBSUFFIX'] = '.pyd'
env['TARGET_ARCH'] = 'x86_64' if ARCH == '64' else 'x86'
AddOption('--msvc-version',
dest = 'msvc-version',
type = 'string',
nargs = 1,
action = 'store',
help = 'MSVC version',
default = str(get_build_version()))
env['MSVC_VERSION'] = GetOption('msvc-version')
env.Tool('default')
env.Tool('prefix')
if SYSTEM == 'win':
env.AppendUnique(CCFLAGS=['/O2',
'/Ob2',
'/MD',
'/GR',
'/EHsc',
'/Gy',
'/GF',
'/GA'],
CPPDEFINES=['WIN32',
'UNICODE'])
env.PrependUnique(CPPPATH=['$PREFIX\include'])
env.PrependUnique(LIBPATH=['$PREFIX\lib',
'$PREFIX\..\libs'])
else:
env.PrependUnique(CPPPATH=['$PREFIX/include'],
LIBPATH=['$PREFIX/lib'],
CFLAGS=["-x", "c", "-std=c11"],
CXXFLAGS=["-x", "c++", "-std=c++11"])
if ARCH == '32':
env.AppendUnique(CCFLAGS=['-m32'])
if SYSTEM == 'osx':
env.AppendUnique(CCFLAGS=['-ferror-limit=0'],
CXXFLAGS=['-stdlib=libc++'])
else:
env.AppendUnique(CCFLAGS=['-fmax-errors=0',
'-Wl,--no-undefined',
'-fvisibility=hidden'],
CPPDEFINES=['_GLIBCXX_USE_CXX11_ABI=1'])
def exists(env):
return 1
|
Add Conda Tool for SConsfrom distutils.version import StrictVersion
from distutils.msvccompiler import get_build_version
from sys import maxsize
from SCons.Script import AddOption, GetOption
from path import path
import subprocess
def generate(env):
"""Add Builders and construction variables to the Environment."""
if not 'conda' in env['TOOLS'][:-1]:
env.Tool('system')
SYSTEM = env['SYSTEM']
def Conda(sources=[], channels=['statiskit', 'conda-forge']):
if len(sources) == 0:
if SYSTEM == 'win':
sources = [recipe for recipe in path(env.Dir('.').srcnode().abspath).walkdirs() if (recipe/'meta.yaml').exists() and (recipe/'bld.bat').exists()]
else:
sources = [recipe for recipe in path(env.Dir('.').srcnode().abspath).walkdirs() if (recipe/'meta.yaml').exists() and (recipe/'build.sh').exists()]
for source in sources
targets = []
if SYSTEM == 'win':
env['SHLIBSUFFIX'] = '.pyd'
env['TARGET_ARCH'] = 'x86_64' if ARCH == '64' else 'x86'
AddOption('--msvc-version',
dest = 'msvc-version',
type = 'string',
nargs = 1,
action = 'store',
help = 'MSVC version',
default = str(get_build_version()))
env['MSVC_VERSION'] = GetOption('msvc-version')
env.Tool('default')
env.Tool('prefix')
if SYSTEM == 'win':
env.AppendUnique(CCFLAGS=['/O2',
'/Ob2',
'/MD',
'/GR',
'/EHsc',
'/Gy',
'/GF',
'/GA'],
CPPDEFINES=['WIN32',
'UNICODE'])
env.PrependUnique(CPPPATH=['$PREFIX\include'])
env.PrependUnique(LIBPATH=['$PREFIX\lib',
'$PREFIX\..\libs'])
else:
env.PrependUnique(CPPPATH=['$PREFIX/include'],
LIBPATH=['$PREFIX/lib'],
CFLAGS=["-x", "c", "-std=c11"],
CXXFLAGS=["-x", "c++", "-std=c++11"])
if ARCH == '32':
env.AppendUnique(CCFLAGS=['-m32'])
if SYSTEM == 'osx':
env.AppendUnique(CCFLAGS=['-ferror-limit=0'],
CXXFLAGS=['-stdlib=libc++'])
else:
env.AppendUnique(CCFLAGS=['-fmax-errors=0',
'-Wl,--no-undefined',
'-fvisibility=hidden'],
CPPDEFINES=['_GLIBCXX_USE_CXX11_ABI=1'])
def exists(env):
return 1
|
<commit_before><commit_msg>Add Conda Tool for SCons<commit_after>from distutils.version import StrictVersion
from distutils.msvccompiler import get_build_version
from sys import maxsize
from SCons.Script import AddOption, GetOption
from path import path
import subprocess
def generate(env):
"""Add Builders and construction variables to the Environment."""
if not 'conda' in env['TOOLS'][:-1]:
env.Tool('system')
SYSTEM = env['SYSTEM']
def Conda(sources=[], channels=['statiskit', 'conda-forge']):
if len(sources) == 0:
if SYSTEM == 'win':
sources = [recipe for recipe in path(env.Dir('.').srcnode().abspath).walkdirs() if (recipe/'meta.yaml').exists() and (recipe/'bld.bat').exists()]
else:
sources = [recipe for recipe in path(env.Dir('.').srcnode().abspath).walkdirs() if (recipe/'meta.yaml').exists() and (recipe/'build.sh').exists()]
for source in sources
targets = []
if SYSTEM == 'win':
env['SHLIBSUFFIX'] = '.pyd'
env['TARGET_ARCH'] = 'x86_64' if ARCH == '64' else 'x86'
AddOption('--msvc-version',
dest = 'msvc-version',
type = 'string',
nargs = 1,
action = 'store',
help = 'MSVC version',
default = str(get_build_version()))
env['MSVC_VERSION'] = GetOption('msvc-version')
env.Tool('default')
env.Tool('prefix')
if SYSTEM == 'win':
env.AppendUnique(CCFLAGS=['/O2',
'/Ob2',
'/MD',
'/GR',
'/EHsc',
'/Gy',
'/GF',
'/GA'],
CPPDEFINES=['WIN32',
'UNICODE'])
env.PrependUnique(CPPPATH=['$PREFIX\include'])
env.PrependUnique(LIBPATH=['$PREFIX\lib',
'$PREFIX\..\libs'])
else:
env.PrependUnique(CPPPATH=['$PREFIX/include'],
LIBPATH=['$PREFIX/lib'],
CFLAGS=["-x", "c", "-std=c11"],
CXXFLAGS=["-x", "c++", "-std=c++11"])
if ARCH == '32':
env.AppendUnique(CCFLAGS=['-m32'])
if SYSTEM == 'osx':
env.AppendUnique(CCFLAGS=['-ferror-limit=0'],
CXXFLAGS=['-stdlib=libc++'])
else:
env.AppendUnique(CCFLAGS=['-fmax-errors=0',
'-Wl,--no-undefined',
'-fvisibility=hidden'],
CPPDEFINES=['_GLIBCXX_USE_CXX11_ABI=1'])
def exists(env):
return 1
|
|
e5642d8ab1d833896e3011cadcff7eb2eaf02d31
|
Sketches/MPS/Random/ChatServer.py
|
Sketches/MPS/Random/ChatServer.py
|
#!/usr/bin/python
import socket
import Axon
from Kamaelia.Chassis.ConnectedServer import ServerCore
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Util.Console import *
from Kamaelia.Util.PureTransformer import PureTransformer
from Kamaelia.Apps.Grey.PeriodicWakeup import PeriodicWakeup
from Kamaelia.Apps.Grey.WakeableIntrospector import WakeableIntrospector
class Echo(Axon.Component.component):
def main(self):
while not self.dataReady("control"):
for i in self.Inbox("inbox"):
self.send(i, "outbox")
self.pause()
yield 1
self.send(self.recv("control"), "signal")
class WakeableIntrospector(Axon.Component.component):
def main(self):
while not self.dataReady("control"):
Q = [ q.name for q in self.scheduler.listAllThreads() ]
Q.sort()
self.send(Q, "outbox")
self.scheduler.debuggingon = False
yield 1
while not self.dataReady("inbox"):
self.pause()
yield 1
while self.dataReady("inbox"): self.recv("inbox")
self.send(self.recv("control"), "signal")
class Uniq(Axon.Component.component):
def main(self):
last = None
while not self.dataReady("control"):
for msg in self.Inbox("inbox"):
if msg != last:
self.send(msg, "outbox")
last = msg
self.pause()
yield 1
self.send(self.recv("control"), "signal")
from Kamaelia.Experimental.PythonInterpreter import InterpreterTransformer
def NetInterpreter(*args, **argv):
return Pipeline(
PureTransformer(lambda x: str(x).rstrip()),
PureTransformer(lambda x: str(x).replace("\r","")),
InterpreterTransformer(),
PureTransformer(lambda x: str(x)+"\r\n>>> "),
)
ServerCore(protocol=NetInterpreter,
socketOptions=(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1),
port=8765).activate()
Pipeline(
PeriodicWakeup(interval=1),
WakeableIntrospector(),
PureTransformer(lambda x: str(len(x))+" "+str(x)+"\n"),
Uniq(),
ConsoleEchoer(),
).activate()
ServerCore(protocol=Echo,
socketOptions=(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1),
port=1234).run()
|
Test server for debugging a memory leak
|
Test server for debugging a memory leak
|
Python
|
apache-2.0
|
sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia
|
Test server for debugging a memory leak
|
#!/usr/bin/python
import socket
import Axon
from Kamaelia.Chassis.ConnectedServer import ServerCore
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Util.Console import *
from Kamaelia.Util.PureTransformer import PureTransformer
from Kamaelia.Apps.Grey.PeriodicWakeup import PeriodicWakeup
from Kamaelia.Apps.Grey.WakeableIntrospector import WakeableIntrospector
class Echo(Axon.Component.component):
def main(self):
while not self.dataReady("control"):
for i in self.Inbox("inbox"):
self.send(i, "outbox")
self.pause()
yield 1
self.send(self.recv("control"), "signal")
class WakeableIntrospector(Axon.Component.component):
def main(self):
while not self.dataReady("control"):
Q = [ q.name for q in self.scheduler.listAllThreads() ]
Q.sort()
self.send(Q, "outbox")
self.scheduler.debuggingon = False
yield 1
while not self.dataReady("inbox"):
self.pause()
yield 1
while self.dataReady("inbox"): self.recv("inbox")
self.send(self.recv("control"), "signal")
class Uniq(Axon.Component.component):
def main(self):
last = None
while not self.dataReady("control"):
for msg in self.Inbox("inbox"):
if msg != last:
self.send(msg, "outbox")
last = msg
self.pause()
yield 1
self.send(self.recv("control"), "signal")
from Kamaelia.Experimental.PythonInterpreter import InterpreterTransformer
def NetInterpreter(*args, **argv):
return Pipeline(
PureTransformer(lambda x: str(x).rstrip()),
PureTransformer(lambda x: str(x).replace("\r","")),
InterpreterTransformer(),
PureTransformer(lambda x: str(x)+"\r\n>>> "),
)
ServerCore(protocol=NetInterpreter,
socketOptions=(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1),
port=8765).activate()
Pipeline(
PeriodicWakeup(interval=1),
WakeableIntrospector(),
PureTransformer(lambda x: str(len(x))+" "+str(x)+"\n"),
Uniq(),
ConsoleEchoer(),
).activate()
ServerCore(protocol=Echo,
socketOptions=(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1),
port=1234).run()
|
<commit_before><commit_msg>Test server for debugging a memory leak<commit_after>
|
#!/usr/bin/python
import socket
import Axon
from Kamaelia.Chassis.ConnectedServer import ServerCore
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Util.Console import *
from Kamaelia.Util.PureTransformer import PureTransformer
from Kamaelia.Apps.Grey.PeriodicWakeup import PeriodicWakeup
from Kamaelia.Apps.Grey.WakeableIntrospector import WakeableIntrospector
class Echo(Axon.Component.component):
def main(self):
while not self.dataReady("control"):
for i in self.Inbox("inbox"):
self.send(i, "outbox")
self.pause()
yield 1
self.send(self.recv("control"), "signal")
class WakeableIntrospector(Axon.Component.component):
def main(self):
while not self.dataReady("control"):
Q = [ q.name for q in self.scheduler.listAllThreads() ]
Q.sort()
self.send(Q, "outbox")
self.scheduler.debuggingon = False
yield 1
while not self.dataReady("inbox"):
self.pause()
yield 1
while self.dataReady("inbox"): self.recv("inbox")
self.send(self.recv("control"), "signal")
class Uniq(Axon.Component.component):
def main(self):
last = None
while not self.dataReady("control"):
for msg in self.Inbox("inbox"):
if msg != last:
self.send(msg, "outbox")
last = msg
self.pause()
yield 1
self.send(self.recv("control"), "signal")
from Kamaelia.Experimental.PythonInterpreter import InterpreterTransformer
def NetInterpreter(*args, **argv):
return Pipeline(
PureTransformer(lambda x: str(x).rstrip()),
PureTransformer(lambda x: str(x).replace("\r","")),
InterpreterTransformer(),
PureTransformer(lambda x: str(x)+"\r\n>>> "),
)
ServerCore(protocol=NetInterpreter,
socketOptions=(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1),
port=8765).activate()
Pipeline(
PeriodicWakeup(interval=1),
WakeableIntrospector(),
PureTransformer(lambda x: str(len(x))+" "+str(x)+"\n"),
Uniq(),
ConsoleEchoer(),
).activate()
ServerCore(protocol=Echo,
socketOptions=(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1),
port=1234).run()
|
Test server for debugging a memory leak#!/usr/bin/python
import socket
import Axon
from Kamaelia.Chassis.ConnectedServer import ServerCore
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Util.Console import *
from Kamaelia.Util.PureTransformer import PureTransformer
from Kamaelia.Apps.Grey.PeriodicWakeup import PeriodicWakeup
from Kamaelia.Apps.Grey.WakeableIntrospector import WakeableIntrospector
class Echo(Axon.Component.component):
def main(self):
while not self.dataReady("control"):
for i in self.Inbox("inbox"):
self.send(i, "outbox")
self.pause()
yield 1
self.send(self.recv("control"), "signal")
class WakeableIntrospector(Axon.Component.component):
def main(self):
while not self.dataReady("control"):
Q = [ q.name for q in self.scheduler.listAllThreads() ]
Q.sort()
self.send(Q, "outbox")
self.scheduler.debuggingon = False
yield 1
while not self.dataReady("inbox"):
self.pause()
yield 1
while self.dataReady("inbox"): self.recv("inbox")
self.send(self.recv("control"), "signal")
class Uniq(Axon.Component.component):
def main(self):
last = None
while not self.dataReady("control"):
for msg in self.Inbox("inbox"):
if msg != last:
self.send(msg, "outbox")
last = msg
self.pause()
yield 1
self.send(self.recv("control"), "signal")
from Kamaelia.Experimental.PythonInterpreter import InterpreterTransformer
def NetInterpreter(*args, **argv):
return Pipeline(
PureTransformer(lambda x: str(x).rstrip()),
PureTransformer(lambda x: str(x).replace("\r","")),
InterpreterTransformer(),
PureTransformer(lambda x: str(x)+"\r\n>>> "),
)
ServerCore(protocol=NetInterpreter,
socketOptions=(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1),
port=8765).activate()
Pipeline(
PeriodicWakeup(interval=1),
WakeableIntrospector(),
PureTransformer(lambda x: str(len(x))+" "+str(x)+"\n"),
Uniq(),
ConsoleEchoer(),
).activate()
ServerCore(protocol=Echo,
socketOptions=(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1),
port=1234).run()
|
<commit_before><commit_msg>Test server for debugging a memory leak<commit_after>#!/usr/bin/python
import socket
import Axon
from Kamaelia.Chassis.ConnectedServer import ServerCore
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Util.Console import *
from Kamaelia.Util.PureTransformer import PureTransformer
from Kamaelia.Apps.Grey.PeriodicWakeup import PeriodicWakeup
from Kamaelia.Apps.Grey.WakeableIntrospector import WakeableIntrospector
class Echo(Axon.Component.component):
def main(self):
while not self.dataReady("control"):
for i in self.Inbox("inbox"):
self.send(i, "outbox")
self.pause()
yield 1
self.send(self.recv("control"), "signal")
class WakeableIntrospector(Axon.Component.component):
def main(self):
while not self.dataReady("control"):
Q = [ q.name for q in self.scheduler.listAllThreads() ]
Q.sort()
self.send(Q, "outbox")
self.scheduler.debuggingon = False
yield 1
while not self.dataReady("inbox"):
self.pause()
yield 1
while self.dataReady("inbox"): self.recv("inbox")
self.send(self.recv("control"), "signal")
class Uniq(Axon.Component.component):
def main(self):
last = None
while not self.dataReady("control"):
for msg in self.Inbox("inbox"):
if msg != last:
self.send(msg, "outbox")
last = msg
self.pause()
yield 1
self.send(self.recv("control"), "signal")
from Kamaelia.Experimental.PythonInterpreter import InterpreterTransformer
def NetInterpreter(*args, **argv):
return Pipeline(
PureTransformer(lambda x: str(x).rstrip()),
PureTransformer(lambda x: str(x).replace("\r","")),
InterpreterTransformer(),
PureTransformer(lambda x: str(x)+"\r\n>>> "),
)
ServerCore(protocol=NetInterpreter,
socketOptions=(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1),
port=8765).activate()
Pipeline(
PeriodicWakeup(interval=1),
WakeableIntrospector(),
PureTransformer(lambda x: str(len(x))+" "+str(x)+"\n"),
Uniq(),
ConsoleEchoer(),
).activate()
ServerCore(protocol=Echo,
socketOptions=(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1),
port=1234).run()
|
|
4a8b30a6f4791d690e7cf159a407f0895008f4f3
|
cafe/patterns/context/__init__.py
|
cafe/patterns/context/__init__.py
|
class SessionManager(object):
def __init__(self, factory, *args, **kwargs):
self._kwargs = kwargs
self._args = args
self._factory = factory
self.session = None
def open(self):
if self.session is None:
self.session = self._factory(*self._args, **self._kwargs)
def close(self):
if self.session is not None:
self.session.close()
self.session = None
def __enter__(self):
self.open()
return self.session
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
|
Add a genereic context based session pattern
|
Add a genereic context based session pattern
|
Python
|
apache-2.0
|
abn/python-cafe
|
Add a genereic context based session pattern
|
class SessionManager(object):
def __init__(self, factory, *args, **kwargs):
self._kwargs = kwargs
self._args = args
self._factory = factory
self.session = None
def open(self):
if self.session is None:
self.session = self._factory(*self._args, **self._kwargs)
def close(self):
if self.session is not None:
self.session.close()
self.session = None
def __enter__(self):
self.open()
return self.session
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
|
<commit_before><commit_msg>Add a genereic context based session pattern<commit_after>
|
class SessionManager(object):
def __init__(self, factory, *args, **kwargs):
self._kwargs = kwargs
self._args = args
self._factory = factory
self.session = None
def open(self):
if self.session is None:
self.session = self._factory(*self._args, **self._kwargs)
def close(self):
if self.session is not None:
self.session.close()
self.session = None
def __enter__(self):
self.open()
return self.session
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
|
Add a genereic context based session patternclass SessionManager(object):
def __init__(self, factory, *args, **kwargs):
self._kwargs = kwargs
self._args = args
self._factory = factory
self.session = None
def open(self):
if self.session is None:
self.session = self._factory(*self._args, **self._kwargs)
def close(self):
if self.session is not None:
self.session.close()
self.session = None
def __enter__(self):
self.open()
return self.session
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
|
<commit_before><commit_msg>Add a genereic context based session pattern<commit_after>class SessionManager(object):
def __init__(self, factory, *args, **kwargs):
self._kwargs = kwargs
self._args = args
self._factory = factory
self.session = None
def open(self):
if self.session is None:
self.session = self._factory(*self._args, **self._kwargs)
def close(self):
if self.session is not None:
self.session.close()
self.session = None
def __enter__(self):
self.open()
return self.session
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
|
|
9863845fcf84ff17901109b3b355930d128cb632
|
datasets.py
|
datasets.py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 09 14:01:35 2017
@author: sakurai
"""
from fuel.schemes import SequentialScheme
from fuel.streams import DataStream
from random_fixed_size_crop_mod import RandomFixedSizeCrop
from cars196_dataset import Cars196Dataset
def get_cars196_streams(crop_size=227, load_in_memory=False):
# This scheme is dummy, since DataStream requires an iteration_scheme for
# DataStream.produces_examples to be False in the constructor.
dummy_scheme = SequentialScheme(1, 1)
train_stream = RandomFixedSizeCrop(
DataStream(Cars196Dataset(['train'], load_in_memory=load_in_memory),
iteration_scheme=dummy_scheme),
(crop_size, crop_size), which_sources=("images"))
test_stream = RandomFixedSizeCrop(
DataStream(Cars196Dataset(['test'], load_in_memory=load_in_memory),
iteration_scheme=dummy_scheme),
(crop_size, crop_size), which_sources=("images"))
return train_stream, test_stream
if __name__ == '__main__':
train, test = get_cars196_streams(load_in_memory=True)
train.get_data([0, 1, 2])
|
Implement the loader of streams of Cars 196
|
Implement the loader of streams of Cars 196
|
Python
|
mit
|
ronekko/deep_metric_learning
|
Implement the loader of streams of Cars 196
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 09 14:01:35 2017
@author: sakurai
"""
from fuel.schemes import SequentialScheme
from fuel.streams import DataStream
from random_fixed_size_crop_mod import RandomFixedSizeCrop
from cars196_dataset import Cars196Dataset
def get_cars196_streams(crop_size=227, load_in_memory=False):
# This scheme is dummy, since DataStream requires an iteration_scheme for
# DataStream.produces_examples to be False in the constructor.
dummy_scheme = SequentialScheme(1, 1)
train_stream = RandomFixedSizeCrop(
DataStream(Cars196Dataset(['train'], load_in_memory=load_in_memory),
iteration_scheme=dummy_scheme),
(crop_size, crop_size), which_sources=("images"))
test_stream = RandomFixedSizeCrop(
DataStream(Cars196Dataset(['test'], load_in_memory=load_in_memory),
iteration_scheme=dummy_scheme),
(crop_size, crop_size), which_sources=("images"))
return train_stream, test_stream
if __name__ == '__main__':
train, test = get_cars196_streams(load_in_memory=True)
train.get_data([0, 1, 2])
|
<commit_before><commit_msg>Implement the loader of streams of Cars 196<commit_after>
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 09 14:01:35 2017
@author: sakurai
"""
from fuel.schemes import SequentialScheme
from fuel.streams import DataStream
from random_fixed_size_crop_mod import RandomFixedSizeCrop
from cars196_dataset import Cars196Dataset
def get_cars196_streams(crop_size=227, load_in_memory=False):
# This scheme is dummy, since DataStream requires an iteration_scheme for
# DataStream.produces_examples to be False in the constructor.
dummy_scheme = SequentialScheme(1, 1)
train_stream = RandomFixedSizeCrop(
DataStream(Cars196Dataset(['train'], load_in_memory=load_in_memory),
iteration_scheme=dummy_scheme),
(crop_size, crop_size), which_sources=("images"))
test_stream = RandomFixedSizeCrop(
DataStream(Cars196Dataset(['test'], load_in_memory=load_in_memory),
iteration_scheme=dummy_scheme),
(crop_size, crop_size), which_sources=("images"))
return train_stream, test_stream
if __name__ == '__main__':
train, test = get_cars196_streams(load_in_memory=True)
train.get_data([0, 1, 2])
|
Implement the loader of streams of Cars 196# -*- coding: utf-8 -*-
"""
Created on Mon Jan 09 14:01:35 2017
@author: sakurai
"""
from fuel.schemes import SequentialScheme
from fuel.streams import DataStream
from random_fixed_size_crop_mod import RandomFixedSizeCrop
from cars196_dataset import Cars196Dataset
def get_cars196_streams(crop_size=227, load_in_memory=False):
# This scheme is dummy, since DataStream requires an iteration_scheme for
# DataStream.produces_examples to be False in the constructor.
dummy_scheme = SequentialScheme(1, 1)
train_stream = RandomFixedSizeCrop(
DataStream(Cars196Dataset(['train'], load_in_memory=load_in_memory),
iteration_scheme=dummy_scheme),
(crop_size, crop_size), which_sources=("images"))
test_stream = RandomFixedSizeCrop(
DataStream(Cars196Dataset(['test'], load_in_memory=load_in_memory),
iteration_scheme=dummy_scheme),
(crop_size, crop_size), which_sources=("images"))
return train_stream, test_stream
if __name__ == '__main__':
train, test = get_cars196_streams(load_in_memory=True)
train.get_data([0, 1, 2])
|
<commit_before><commit_msg>Implement the loader of streams of Cars 196<commit_after># -*- coding: utf-8 -*-
"""
Created on Mon Jan 09 14:01:35 2017
@author: sakurai
"""
from fuel.schemes import SequentialScheme
from fuel.streams import DataStream
from random_fixed_size_crop_mod import RandomFixedSizeCrop
from cars196_dataset import Cars196Dataset
def get_cars196_streams(crop_size=227, load_in_memory=False):
# This scheme is dummy, since DataStream requires an iteration_scheme for
# DataStream.produces_examples to be False in the constructor.
dummy_scheme = SequentialScheme(1, 1)
train_stream = RandomFixedSizeCrop(
DataStream(Cars196Dataset(['train'], load_in_memory=load_in_memory),
iteration_scheme=dummy_scheme),
(crop_size, crop_size), which_sources=("images"))
test_stream = RandomFixedSizeCrop(
DataStream(Cars196Dataset(['test'], load_in_memory=load_in_memory),
iteration_scheme=dummy_scheme),
(crop_size, crop_size), which_sources=("images"))
return train_stream, test_stream
if __name__ == '__main__':
train, test = get_cars196_streams(load_in_memory=True)
train.get_data([0, 1, 2])
|
|
e2fd04bbf886c2893e8addc9543f861dcfd8bb96
|
Doc/lib/tzinfo-examples.py
|
Doc/lib/tzinfo-examples.py
|
from datetime import tzinfo
class UTC(tzinfo):
"""UTC"""
def utcoffset(self, dt):
return 0
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return 0
class FixedOffset(tzinfo):
"""Fixed offset in minutes east from UTC."""
def __init__(self, offset, name):
self.__offset = offset
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
# It depends on more than we know in an example.
return None # Indicate we don't know
import time
class LocalTime(tzinfo):
"""Local time as defined by the operating system."""
def _isdst(self, dt):
t = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
-1, -1, -1)
# XXX This may fail for years < 1970 or >= 2038
t = time.localtime(time.mktime(t))
return t.tm_isdst > 0
def utcoffset(self, dt):
if self._isdst(dt):
return -time.timezone/60
else:
return -time.altzone/60
def tzname(self, dt):
return time.tzname[self._isdst(dt)]
|
Move the examples of concrete tzinfo classes to a separate file, so the verbatim environment does not bollux page breaking.
|
Move the examples of concrete tzinfo classes to a separate file, so the
verbatim environment does not bollux page breaking.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
Move the examples of concrete tzinfo classes to a separate file, so the
verbatim environment does not bollux page breaking.
|
from datetime import tzinfo
class UTC(tzinfo):
"""UTC"""
def utcoffset(self, dt):
return 0
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return 0
class FixedOffset(tzinfo):
"""Fixed offset in minutes east from UTC."""
def __init__(self, offset, name):
self.__offset = offset
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
# It depends on more than we know in an example.
return None # Indicate we don't know
import time
class LocalTime(tzinfo):
"""Local time as defined by the operating system."""
def _isdst(self, dt):
t = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
-1, -1, -1)
# XXX This may fail for years < 1970 or >= 2038
t = time.localtime(time.mktime(t))
return t.tm_isdst > 0
def utcoffset(self, dt):
if self._isdst(dt):
return -time.timezone/60
else:
return -time.altzone/60
def tzname(self, dt):
return time.tzname[self._isdst(dt)]
|
<commit_before><commit_msg>Move the examples of concrete tzinfo classes to a separate file, so the
verbatim environment does not bollux page breaking.<commit_after>
|
from datetime import tzinfo
class UTC(tzinfo):
"""UTC"""
def utcoffset(self, dt):
return 0
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return 0
class FixedOffset(tzinfo):
"""Fixed offset in minutes east from UTC."""
def __init__(self, offset, name):
self.__offset = offset
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
# It depends on more than we know in an example.
return None # Indicate we don't know
import time
class LocalTime(tzinfo):
"""Local time as defined by the operating system."""
def _isdst(self, dt):
t = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
-1, -1, -1)
# XXX This may fail for years < 1970 or >= 2038
t = time.localtime(time.mktime(t))
return t.tm_isdst > 0
def utcoffset(self, dt):
if self._isdst(dt):
return -time.timezone/60
else:
return -time.altzone/60
def tzname(self, dt):
return time.tzname[self._isdst(dt)]
|
Move the examples of concrete tzinfo classes to a separate file, so the
verbatim environment does not bollux page breaking.from datetime import tzinfo
class UTC(tzinfo):
"""UTC"""
def utcoffset(self, dt):
return 0
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return 0
class FixedOffset(tzinfo):
"""Fixed offset in minutes east from UTC."""
def __init__(self, offset, name):
self.__offset = offset
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
# It depends on more than we know in an example.
return None # Indicate we don't know
import time
class LocalTime(tzinfo):
"""Local time as defined by the operating system."""
def _isdst(self, dt):
t = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
-1, -1, -1)
# XXX This may fail for years < 1970 or >= 2038
t = time.localtime(time.mktime(t))
return t.tm_isdst > 0
def utcoffset(self, dt):
if self._isdst(dt):
return -time.timezone/60
else:
return -time.altzone/60
def tzname(self, dt):
return time.tzname[self._isdst(dt)]
|
<commit_before><commit_msg>Move the examples of concrete tzinfo classes to a separate file, so the
verbatim environment does not bollux page breaking.<commit_after>from datetime import tzinfo
class UTC(tzinfo):
"""UTC"""
def utcoffset(self, dt):
return 0
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return 0
class FixedOffset(tzinfo):
"""Fixed offset in minutes east from UTC."""
def __init__(self, offset, name):
self.__offset = offset
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
# It depends on more than we know in an example.
return None # Indicate we don't know
import time
class LocalTime(tzinfo):
"""Local time as defined by the operating system."""
def _isdst(self, dt):
t = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
-1, -1, -1)
# XXX This may fail for years < 1970 or >= 2038
t = time.localtime(time.mktime(t))
return t.tm_isdst > 0
def utcoffset(self, dt):
if self._isdst(dt):
return -time.timezone/60
else:
return -time.altzone/60
def tzname(self, dt):
return time.tzname[self._isdst(dt)]
|
|
d4187acb33a0f65dfc4bc38f31995e354847dcb3
|
nodeconductor/users/migrations/0002_invitation_error_message.py
|
nodeconductor/users/migrations/0002_invitation_error_message.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='invitation',
name='error_message',
field=models.TextField(blank=True),
),
]
|
Add Invitation error message migration
|
Add Invitation error message migration
- WAL-66
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
Add Invitation error message migration
- WAL-66
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='invitation',
name='error_message',
field=models.TextField(blank=True),
),
]
|
<commit_before><commit_msg>Add Invitation error message migration
- WAL-66<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='invitation',
name='error_message',
field=models.TextField(blank=True),
),
]
|
Add Invitation error message migration
- WAL-66# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='invitation',
name='error_message',
field=models.TextField(blank=True),
),
]
|
<commit_before><commit_msg>Add Invitation error message migration
- WAL-66<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='invitation',
name='error_message',
field=models.TextField(blank=True),
),
]
|
|
61a7dad7c47b25907246524a28252c6e8004a74f
|
sara_flexbe_states/src/sara_flexbe_states/LogEntity.py
|
sara_flexbe_states/src/sara_flexbe_states/LogEntity.py
|
#!/usr/bin/env python
# encoding=utf8
from flexbe_core import EventState, Logger
from sara_msgs.msg import Entity, Entities
class LogEntity(EventState):
'''
Print an entity or entities object in Flexbe runtime. Should be only used for debug.
># entity Entity The entity or entities array following the `sara_msgs/entity.msg` structure
<= done returned when am error happen during the HTTP request
'''
def __init__(self):
# See example_state.py for basic explanations.
super(LogEntity, self).__init__(outcomes=['done'], input_keys=['entity'])
def execute(self, userdata):
if type(userdata.entity) is Entity:
self.print_entity(userdata.entity)
elif type(userdata.entity) is Entities:
for entity in userdata.entity:
self.print_entity(entity)
return 'done'
@staticmethod
def print_entity(entity):
Logger.loginfo("===== ===== This is an entity. ===== =====")
Logger.loginfo("ID: " + str(entity.ID))
Logger.loginfo("Wonderland ID: " + str(entity.wonderlandId))
Logger.loginfo("Name: " + entity.name)
for alias in entity.aliases:
Logger.loginfo("Alias: " + alias)
Logger.loginfo("Category: " + entity.category)
Logger.loginfo("Color: " + entity.color)
Logger.loginfo("Weight: " + str(entity.weight))
Logger.loginfo("Size: " + str(entity.size))
Logger.loginfo("Gender: " + entity.gender)
Logger.loginfo("Emotion: " + entity.emotion)
Logger.loginfo("Container ID: " + str(entity.containerId))
Logger.loginfo("Position x: " + str(entity.position.x))
Logger.loginfo("Position y: " + str(entity.position.y))
Logger.loginfo("Position z: " + str(entity.position.z))
Logger.loginfo("Waypoint x: " + str(entity.waypoint.x))
Logger.loginfo("Waypoint y: " + str(entity.waypoint.y))
Logger.loginfo("Waypoint yaw: " + str(entity.waypoint.theta))
Logger.loginfo("Velocity x: " + str(entity.velocity.x))
Logger.loginfo("Velocity y: " + str(entity.velocity.y))
Logger.loginfo("Velocity z: " + str(entity.velocity.z))
|
Create a state for print an entity in flexbe logger.
|
Create a state for print an entity in flexbe logger.
|
Python
|
bsd-3-clause
|
WalkingMachine/sara_behaviors,WalkingMachine/sara_behaviors
|
Create a state for print an entity in flexbe logger.
|
#!/usr/bin/env python
# encoding=utf8
from flexbe_core import EventState, Logger
from sara_msgs.msg import Entity, Entities
class LogEntity(EventState):
'''
Print an entity or entities object in Flexbe runtime. Should be only used for debug.
># entity Entity The entity or entities array following the `sara_msgs/entity.msg` structure
<= done returned when am error happen during the HTTP request
'''
def __init__(self):
# See example_state.py for basic explanations.
super(LogEntity, self).__init__(outcomes=['done'], input_keys=['entity'])
def execute(self, userdata):
if type(userdata.entity) is Entity:
self.print_entity(userdata.entity)
elif type(userdata.entity) is Entities:
for entity in userdata.entity:
self.print_entity(entity)
return 'done'
@staticmethod
def print_entity(entity):
Logger.loginfo("===== ===== This is an entity. ===== =====")
Logger.loginfo("ID: " + str(entity.ID))
Logger.loginfo("Wonderland ID: " + str(entity.wonderlandId))
Logger.loginfo("Name: " + entity.name)
for alias in entity.aliases:
Logger.loginfo("Alias: " + alias)
Logger.loginfo("Category: " + entity.category)
Logger.loginfo("Color: " + entity.color)
Logger.loginfo("Weight: " + str(entity.weight))
Logger.loginfo("Size: " + str(entity.size))
Logger.loginfo("Gender: " + entity.gender)
Logger.loginfo("Emotion: " + entity.emotion)
Logger.loginfo("Container ID: " + str(entity.containerId))
Logger.loginfo("Position x: " + str(entity.position.x))
Logger.loginfo("Position y: " + str(entity.position.y))
Logger.loginfo("Position z: " + str(entity.position.z))
Logger.loginfo("Waypoint x: " + str(entity.waypoint.x))
Logger.loginfo("Waypoint y: " + str(entity.waypoint.y))
Logger.loginfo("Waypoint yaw: " + str(entity.waypoint.theta))
Logger.loginfo("Velocity x: " + str(entity.velocity.x))
Logger.loginfo("Velocity y: " + str(entity.velocity.y))
Logger.loginfo("Velocity z: " + str(entity.velocity.z))
|
<commit_before><commit_msg>Create a state for print an entity in flexbe logger.<commit_after>
|
#!/usr/bin/env python
# encoding=utf8
from flexbe_core import EventState, Logger
from sara_msgs.msg import Entity, Entities
class LogEntity(EventState):
'''
Print an entity or entities object in Flexbe runtime. Should be only used for debug.
># entity Entity The entity or entities array following the `sara_msgs/entity.msg` structure
<= done returned when am error happen during the HTTP request
'''
def __init__(self):
# See example_state.py for basic explanations.
super(LogEntity, self).__init__(outcomes=['done'], input_keys=['entity'])
def execute(self, userdata):
if type(userdata.entity) is Entity:
self.print_entity(userdata.entity)
elif type(userdata.entity) is Entities:
for entity in userdata.entity:
self.print_entity(entity)
return 'done'
@staticmethod
def print_entity(entity):
Logger.loginfo("===== ===== This is an entity. ===== =====")
Logger.loginfo("ID: " + str(entity.ID))
Logger.loginfo("Wonderland ID: " + str(entity.wonderlandId))
Logger.loginfo("Name: " + entity.name)
for alias in entity.aliases:
Logger.loginfo("Alias: " + alias)
Logger.loginfo("Category: " + entity.category)
Logger.loginfo("Color: " + entity.color)
Logger.loginfo("Weight: " + str(entity.weight))
Logger.loginfo("Size: " + str(entity.size))
Logger.loginfo("Gender: " + entity.gender)
Logger.loginfo("Emotion: " + entity.emotion)
Logger.loginfo("Container ID: " + str(entity.containerId))
Logger.loginfo("Position x: " + str(entity.position.x))
Logger.loginfo("Position y: " + str(entity.position.y))
Logger.loginfo("Position z: " + str(entity.position.z))
Logger.loginfo("Waypoint x: " + str(entity.waypoint.x))
Logger.loginfo("Waypoint y: " + str(entity.waypoint.y))
Logger.loginfo("Waypoint yaw: " + str(entity.waypoint.theta))
Logger.loginfo("Velocity x: " + str(entity.velocity.x))
Logger.loginfo("Velocity y: " + str(entity.velocity.y))
Logger.loginfo("Velocity z: " + str(entity.velocity.z))
|
Create a state for print an entity in flexbe logger.#!/usr/bin/env python
# encoding=utf8
from flexbe_core import EventState, Logger
from sara_msgs.msg import Entity, Entities
class LogEntity(EventState):
'''
Print an entity or entities object in Flexbe runtime. Should be only used for debug.
># entity Entity The entity or entities array following the `sara_msgs/entity.msg` structure
<= done returned when am error happen during the HTTP request
'''
def __init__(self):
# See example_state.py for basic explanations.
super(LogEntity, self).__init__(outcomes=['done'], input_keys=['entity'])
def execute(self, userdata):
if type(userdata.entity) is Entity:
self.print_entity(userdata.entity)
elif type(userdata.entity) is Entities:
for entity in userdata.entity:
self.print_entity(entity)
return 'done'
@staticmethod
def print_entity(entity):
Logger.loginfo("===== ===== This is an entity. ===== =====")
Logger.loginfo("ID: " + str(entity.ID))
Logger.loginfo("Wonderland ID: " + str(entity.wonderlandId))
Logger.loginfo("Name: " + entity.name)
for alias in entity.aliases:
Logger.loginfo("Alias: " + alias)
Logger.loginfo("Category: " + entity.category)
Logger.loginfo("Color: " + entity.color)
Logger.loginfo("Weight: " + str(entity.weight))
Logger.loginfo("Size: " + str(entity.size))
Logger.loginfo("Gender: " + entity.gender)
Logger.loginfo("Emotion: " + entity.emotion)
Logger.loginfo("Container ID: " + str(entity.containerId))
Logger.loginfo("Position x: " + str(entity.position.x))
Logger.loginfo("Position y: " + str(entity.position.y))
Logger.loginfo("Position z: " + str(entity.position.z))
Logger.loginfo("Waypoint x: " + str(entity.waypoint.x))
Logger.loginfo("Waypoint y: " + str(entity.waypoint.y))
Logger.loginfo("Waypoint yaw: " + str(entity.waypoint.theta))
Logger.loginfo("Velocity x: " + str(entity.velocity.x))
Logger.loginfo("Velocity y: " + str(entity.velocity.y))
Logger.loginfo("Velocity z: " + str(entity.velocity.z))
|
<commit_before><commit_msg>Create a state for print an entity in flexbe logger.<commit_after>#!/usr/bin/env python
# encoding=utf8
from flexbe_core import EventState, Logger
from sara_msgs.msg import Entity, Entities
class LogEntity(EventState):
'''
Print an entity or entities object in Flexbe runtime. Should be only used for debug.
># entity Entity The entity or entities array following the `sara_msgs/entity.msg` structure
<= done returned when am error happen during the HTTP request
'''
def __init__(self):
# See example_state.py for basic explanations.
super(LogEntity, self).__init__(outcomes=['done'], input_keys=['entity'])
def execute(self, userdata):
if type(userdata.entity) is Entity:
self.print_entity(userdata.entity)
elif type(userdata.entity) is Entities:
for entity in userdata.entity:
self.print_entity(entity)
return 'done'
@staticmethod
def print_entity(entity):
Logger.loginfo("===== ===== This is an entity. ===== =====")
Logger.loginfo("ID: " + str(entity.ID))
Logger.loginfo("Wonderland ID: " + str(entity.wonderlandId))
Logger.loginfo("Name: " + entity.name)
for alias in entity.aliases:
Logger.loginfo("Alias: " + alias)
Logger.loginfo("Category: " + entity.category)
Logger.loginfo("Color: " + entity.color)
Logger.loginfo("Weight: " + str(entity.weight))
Logger.loginfo("Size: " + str(entity.size))
Logger.loginfo("Gender: " + entity.gender)
Logger.loginfo("Emotion: " + entity.emotion)
Logger.loginfo("Container ID: " + str(entity.containerId))
Logger.loginfo("Position x: " + str(entity.position.x))
Logger.loginfo("Position y: " + str(entity.position.y))
Logger.loginfo("Position z: " + str(entity.position.z))
Logger.loginfo("Waypoint x: " + str(entity.waypoint.x))
Logger.loginfo("Waypoint y: " + str(entity.waypoint.y))
Logger.loginfo("Waypoint yaw: " + str(entity.waypoint.theta))
Logger.loginfo("Velocity x: " + str(entity.velocity.x))
Logger.loginfo("Velocity y: " + str(entity.velocity.y))
Logger.loginfo("Velocity z: " + str(entity.velocity.z))
|
|
085bc060daef833951717eb4d9131397ec056b90
|
alg_prim_minimum_spanning_tree.py
|
alg_prim_minimum_spanning_tree.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def prim():
pass
def main():
pass
if __name__ == '__main__':
main()
|
Add Prim's minimum spanning tree alg
|
Add Prim's minimum spanning tree alg
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
Add Prim's minimum spanning tree alg
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def prim():
pass
def main():
pass
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add Prim's minimum spanning tree alg<commit_after>
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def prim():
pass
def main():
pass
if __name__ == '__main__':
main()
|
Add Prim's minimum spanning tree algfrom __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def prim():
pass
def main():
pass
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add Prim's minimum spanning tree alg<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def prim():
pass
def main():
pass
if __name__ == '__main__':
main()
|
|
6d4712f5b077d9b00116a1181377584bafe170f2
|
bluebottle/segments/migrations/0007_auto_20220119_0945.py
|
bluebottle/segments/migrations/0007_auto_20220119_0945.py
|
# Generated by Django 2.2.24 on 2022-01-19 08:45
from django.db import migrations
def migrate_extra_fields_to_segments(apps, schema_editor):
Member = apps.get_model('members', 'Member')
SegmentType = apps.get_model('segments', 'SegmentType')
Segment = apps.get_model('segments', 'Segment')
CustomMemberFieldSettings = apps.get_model('members', 'CustomMemberFieldSettings')
MemberPlatformSettings = apps.get_model('members', 'MemberPlatformSettings')
CustomMemberField = apps.get_model('members', 'CustomMemberField')
if CustomMemberFieldSettings.objects.count() == 1:
MemberPlatformSettings.objects.update(
create_segments=True,
enable_segments=True
)
department, _ = SegmentType.objects.get_or_create(
name='Department',
slug='department',
)
for member in Member.objects.all():
field = CustomMemberField.objects.filter(member=member, field__name='department').first()
if field:
segment, _ = Segment.objects.get_or_create(
type=department,
name=field.value
)
member.segments.add(segment)
if CustomMemberFieldSettings.objects.count() == 4:
city, _ = SegmentType.objects.get_or_create(
name='City',
slug='city',
)
country, _ = SegmentType.objects.get_or_create(
name='Country',
slug='country',
)
for member in Member.objects.all():
field = CustomMemberField.objects.filter(member=member, field__name='country').first()
if field:
my_country, _ = Segment.objects.get_or_create(
type=country,
name=field.value
)
member.segments.add(my_country)
field = CustomMemberField.objects.filter(member=member, field__name='city').first()
if field:
my_city, _ = Segment.objects.get_or_create(
type=city,
name=field.value
)
member.segments.add(my_city)
class Migration(migrations.Migration):
dependencies = [
('segments', '0006_auto_20210914_1134'),
]
operations = [
migrations.RunPython(
migrate_extra_fields_to_segments,
migrations.RunPython.noop
)
]
|
Migrate custom user fields to segments
|
Migrate custom user fields to segments
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Migrate custom user fields to segments
|
# Generated by Django 2.2.24 on 2022-01-19 08:45
from django.db import migrations
def migrate_extra_fields_to_segments(apps, schema_editor):
Member = apps.get_model('members', 'Member')
SegmentType = apps.get_model('segments', 'SegmentType')
Segment = apps.get_model('segments', 'Segment')
CustomMemberFieldSettings = apps.get_model('members', 'CustomMemberFieldSettings')
MemberPlatformSettings = apps.get_model('members', 'MemberPlatformSettings')
CustomMemberField = apps.get_model('members', 'CustomMemberField')
if CustomMemberFieldSettings.objects.count() == 1:
MemberPlatformSettings.objects.update(
create_segments=True,
enable_segments=True
)
department, _ = SegmentType.objects.get_or_create(
name='Department',
slug='department',
)
for member in Member.objects.all():
field = CustomMemberField.objects.filter(member=member, field__name='department').first()
if field:
segment, _ = Segment.objects.get_or_create(
type=department,
name=field.value
)
member.segments.add(segment)
if CustomMemberFieldSettings.objects.count() == 4:
city, _ = SegmentType.objects.get_or_create(
name='City',
slug='city',
)
country, _ = SegmentType.objects.get_or_create(
name='Country',
slug='country',
)
for member in Member.objects.all():
field = CustomMemberField.objects.filter(member=member, field__name='country').first()
if field:
my_country, _ = Segment.objects.get_or_create(
type=country,
name=field.value
)
member.segments.add(my_country)
field = CustomMemberField.objects.filter(member=member, field__name='city').first()
if field:
my_city, _ = Segment.objects.get_or_create(
type=city,
name=field.value
)
member.segments.add(my_city)
class Migration(migrations.Migration):
dependencies = [
('segments', '0006_auto_20210914_1134'),
]
operations = [
migrations.RunPython(
migrate_extra_fields_to_segments,
migrations.RunPython.noop
)
]
|
<commit_before><commit_msg>Migrate custom user fields to segments<commit_after>
|
# Generated by Django 2.2.24 on 2022-01-19 08:45
from django.db import migrations
def migrate_extra_fields_to_segments(apps, schema_editor):
Member = apps.get_model('members', 'Member')
SegmentType = apps.get_model('segments', 'SegmentType')
Segment = apps.get_model('segments', 'Segment')
CustomMemberFieldSettings = apps.get_model('members', 'CustomMemberFieldSettings')
MemberPlatformSettings = apps.get_model('members', 'MemberPlatformSettings')
CustomMemberField = apps.get_model('members', 'CustomMemberField')
if CustomMemberFieldSettings.objects.count() == 1:
MemberPlatformSettings.objects.update(
create_segments=True,
enable_segments=True
)
department, _ = SegmentType.objects.get_or_create(
name='Department',
slug='department',
)
for member in Member.objects.all():
field = CustomMemberField.objects.filter(member=member, field__name='department').first()
if field:
segment, _ = Segment.objects.get_or_create(
type=department,
name=field.value
)
member.segments.add(segment)
if CustomMemberFieldSettings.objects.count() == 4:
city, _ = SegmentType.objects.get_or_create(
name='City',
slug='city',
)
country, _ = SegmentType.objects.get_or_create(
name='Country',
slug='country',
)
for member in Member.objects.all():
field = CustomMemberField.objects.filter(member=member, field__name='country').first()
if field:
my_country, _ = Segment.objects.get_or_create(
type=country,
name=field.value
)
member.segments.add(my_country)
field = CustomMemberField.objects.filter(member=member, field__name='city').first()
if field:
my_city, _ = Segment.objects.get_or_create(
type=city,
name=field.value
)
member.segments.add(my_city)
class Migration(migrations.Migration):
dependencies = [
('segments', '0006_auto_20210914_1134'),
]
operations = [
migrations.RunPython(
migrate_extra_fields_to_segments,
migrations.RunPython.noop
)
]
|
Migrate custom user fields to segments# Generated by Django 2.2.24 on 2022-01-19 08:45
from django.db import migrations
def migrate_extra_fields_to_segments(apps, schema_editor):
Member = apps.get_model('members', 'Member')
SegmentType = apps.get_model('segments', 'SegmentType')
Segment = apps.get_model('segments', 'Segment')
CustomMemberFieldSettings = apps.get_model('members', 'CustomMemberFieldSettings')
MemberPlatformSettings = apps.get_model('members', 'MemberPlatformSettings')
CustomMemberField = apps.get_model('members', 'CustomMemberField')
if CustomMemberFieldSettings.objects.count() == 1:
MemberPlatformSettings.objects.update(
create_segments=True,
enable_segments=True
)
department, _ = SegmentType.objects.get_or_create(
name='Department',
slug='department',
)
for member in Member.objects.all():
field = CustomMemberField.objects.filter(member=member, field__name='department').first()
if field:
segment, _ = Segment.objects.get_or_create(
type=department,
name=field.value
)
member.segments.add(segment)
if CustomMemberFieldSettings.objects.count() == 4:
city, _ = SegmentType.objects.get_or_create(
name='City',
slug='city',
)
country, _ = SegmentType.objects.get_or_create(
name='Country',
slug='country',
)
for member in Member.objects.all():
field = CustomMemberField.objects.filter(member=member, field__name='country').first()
if field:
my_country, _ = Segment.objects.get_or_create(
type=country,
name=field.value
)
member.segments.add(my_country)
field = CustomMemberField.objects.filter(member=member, field__name='city').first()
if field:
my_city, _ = Segment.objects.get_or_create(
type=city,
name=field.value
)
member.segments.add(my_city)
class Migration(migrations.Migration):
dependencies = [
('segments', '0006_auto_20210914_1134'),
]
operations = [
migrations.RunPython(
migrate_extra_fields_to_segments,
migrations.RunPython.noop
)
]
|
<commit_before><commit_msg>Migrate custom user fields to segments<commit_after># Generated by Django 2.2.24 on 2022-01-19 08:45
from django.db import migrations
def migrate_extra_fields_to_segments(apps, schema_editor):
Member = apps.get_model('members', 'Member')
SegmentType = apps.get_model('segments', 'SegmentType')
Segment = apps.get_model('segments', 'Segment')
CustomMemberFieldSettings = apps.get_model('members', 'CustomMemberFieldSettings')
MemberPlatformSettings = apps.get_model('members', 'MemberPlatformSettings')
CustomMemberField = apps.get_model('members', 'CustomMemberField')
if CustomMemberFieldSettings.objects.count() == 1:
MemberPlatformSettings.objects.update(
create_segments=True,
enable_segments=True
)
department, _ = SegmentType.objects.get_or_create(
name='Department',
slug='department',
)
for member in Member.objects.all():
field = CustomMemberField.objects.filter(member=member, field__name='department').first()
if field:
segment, _ = Segment.objects.get_or_create(
type=department,
name=field.value
)
member.segments.add(segment)
if CustomMemberFieldSettings.objects.count() == 4:
city, _ = SegmentType.objects.get_or_create(
name='City',
slug='city',
)
country, _ = SegmentType.objects.get_or_create(
name='Country',
slug='country',
)
for member in Member.objects.all():
field = CustomMemberField.objects.filter(member=member, field__name='country').first()
if field:
my_country, _ = Segment.objects.get_or_create(
type=country,
name=field.value
)
member.segments.add(my_country)
field = CustomMemberField.objects.filter(member=member, field__name='city').first()
if field:
my_city, _ = Segment.objects.get_or_create(
type=city,
name=field.value
)
member.segments.add(my_city)
class Migration(migrations.Migration):
dependencies = [
('segments', '0006_auto_20210914_1134'),
]
operations = [
migrations.RunPython(
migrate_extra_fields_to_segments,
migrations.RunPython.noop
)
]
|
|
e151466d2bf2a5843d683cada3c3d2563c1ab9d5
|
indra/tests/test_trrust.py
|
indra/tests/test_trrust.py
|
from nose.plugins.attrib import attr
from indra.sources import trrust
from indra.statements import RegulateAmount
@attr('slow', 'webservice')
def test_process_from_web():
tp = trrust.process_from_web()
assert len(tp.statements) > 6200
for stmt in tp.statements:
assert isinstance(stmt, RegulateAmount)
assert len(stmt.evidence) == 1
assert stmt.obj.db_refs.get('HGNC')
assert stmt.subj.db_refs.get('HGNC')
assert stmt.evidence[0].source_api == 'trrust'
assert stmt.evidence[0].pmid is not None
|
Add test for basic consistency
|
Add test for basic consistency
|
Python
|
bsd-2-clause
|
sorgerlab/belpy,sorgerlab/indra,bgyori/indra,sorgerlab/belpy,pvtodorov/indra,bgyori/indra,johnbachman/indra,sorgerlab/indra,pvtodorov/indra,johnbachman/belpy,pvtodorov/indra,sorgerlab/indra,johnbachman/belpy,johnbachman/indra,johnbachman/belpy,johnbachman/indra,pvtodorov/indra,sorgerlab/belpy,bgyori/indra
|
Add test for basic consistency
|
from nose.plugins.attrib import attr
from indra.sources import trrust
from indra.statements import RegulateAmount
@attr('slow', 'webservice')
def test_process_from_web():
tp = trrust.process_from_web()
assert len(tp.statements) > 6200
for stmt in tp.statements:
assert isinstance(stmt, RegulateAmount)
assert len(stmt.evidence) == 1
assert stmt.obj.db_refs.get('HGNC')
assert stmt.subj.db_refs.get('HGNC')
assert stmt.evidence[0].source_api == 'trrust'
assert stmt.evidence[0].pmid is not None
|
<commit_before><commit_msg>Add test for basic consistency<commit_after>
|
from nose.plugins.attrib import attr
from indra.sources import trrust
from indra.statements import RegulateAmount
@attr('slow', 'webservice')
def test_process_from_web():
tp = trrust.process_from_web()
assert len(tp.statements) > 6200
for stmt in tp.statements:
assert isinstance(stmt, RegulateAmount)
assert len(stmt.evidence) == 1
assert stmt.obj.db_refs.get('HGNC')
assert stmt.subj.db_refs.get('HGNC')
assert stmt.evidence[0].source_api == 'trrust'
assert stmt.evidence[0].pmid is not None
|
Add test for basic consistencyfrom nose.plugins.attrib import attr
from indra.sources import trrust
from indra.statements import RegulateAmount
@attr('slow', 'webservice')
def test_process_from_web():
tp = trrust.process_from_web()
assert len(tp.statements) > 6200
for stmt in tp.statements:
assert isinstance(stmt, RegulateAmount)
assert len(stmt.evidence) == 1
assert stmt.obj.db_refs.get('HGNC')
assert stmt.subj.db_refs.get('HGNC')
assert stmt.evidence[0].source_api == 'trrust'
assert stmt.evidence[0].pmid is not None
|
<commit_before><commit_msg>Add test for basic consistency<commit_after>from nose.plugins.attrib import attr
from indra.sources import trrust
from indra.statements import RegulateAmount
@attr('slow', 'webservice')
def test_process_from_web():
tp = trrust.process_from_web()
assert len(tp.statements) > 6200
for stmt in tp.statements:
assert isinstance(stmt, RegulateAmount)
assert len(stmt.evidence) == 1
assert stmt.obj.db_refs.get('HGNC')
assert stmt.subj.db_refs.get('HGNC')
assert stmt.evidence[0].source_api == 'trrust'
assert stmt.evidence[0].pmid is not None
|
|
3f15d22e126ba902afafce7484a1f7103401e9bb
|
instrument-classification/generate_audio_samples.py
|
instrument-classification/generate_audio_samples.py
|
"""
This generates a single-tone audio clip for instrument classification.
"""
import music21
from music21.chord import Chord
from music21.duration import Duration
from music21.instrument import Instrument
from music21.note import Note
from music21.stream import Stream
from music21.tempo import MetronomeMark
from music21.volume import Volume
def make_instrument(id):
i = Instrument()
i.midiProgram = id
return i
def chord_with_volume(chord, volume):
chord.volume = Volume(velocityScalar=volume)
return chord
def generate_single_note(midi_number, midi_instrument, volume, duration, tempo):
"""
Generates a stream containing a single note with given parameters.
midi_number - MIDI note number, 0 to 127
midi_instrument - MIDI intrument number, 0 to 127
duration - floating point number (in quarter note lengths)
volume - 0.0 to 1.0
tempo - number of quarter notes per minute (eg. 120)
Note that there's a quarter note rest at the beginning and at the end.
"""
return Stream([
MetronomeMark(number=tempo),
make_instrument(midi_instrument),
chord_with_volume(Chord([
Note(midi=midi_number, duration=Duration(duration))
]), volume)
])
def write_midi(stream, output_file):
stream.write('midi', output_file)
if __name__ == '__main__':
# example
stream = generate_single_note(midi_number=60, midi_instrument=2, volume=1.0, duration=0.5, tempo=120)
write_midi(stream, 'data/working/example-parametric-note/01.midi')
# TODO:
# - create a better API
# - make a random generator of the parameters
# - produce audio samples in batch
# - either one file per samples (many runs of FS - may be slow)
# - or make a big MIDI (then audio) and then split (synthesize in one run)
|
Add a script to generate a parametric single-note audio sample.
|
Add a script to generate a parametric single-note audio sample.
It uses the music21 library to produce MIDI which can be synthesized then via fluidsynth.
Parameters: pitch (MIDI note number), MIDI instrument, volume, duration, tempo.
|
Python
|
mit
|
bzamecnik/ml,bzamecnik/ml,bzamecnik/ml,bzamecnik/ml-playground,bzamecnik/ml-playground
|
Add a script to generate a parametric single-note audio sample.
It uses the music21 library to produce MIDI which can be synthesized then via fluidsynth.
Parameters: pitch (MIDI note number), MIDI instrument, volume, duration, tempo.
|
"""
This generates a single-tone audio clip for instrument classification.
"""
import music21
from music21.chord import Chord
from music21.duration import Duration
from music21.instrument import Instrument
from music21.note import Note
from music21.stream import Stream
from music21.tempo import MetronomeMark
from music21.volume import Volume
def make_instrument(id):
i = Instrument()
i.midiProgram = id
return i
def chord_with_volume(chord, volume):
chord.volume = Volume(velocityScalar=volume)
return chord
def generate_single_note(midi_number, midi_instrument, volume, duration, tempo):
"""
Generates a stream containing a single note with given parameters.
midi_number - MIDI note number, 0 to 127
midi_instrument - MIDI intrument number, 0 to 127
duration - floating point number (in quarter note lengths)
volume - 0.0 to 1.0
tempo - number of quarter notes per minute (eg. 120)
Note that there's a quarter note rest at the beginning and at the end.
"""
return Stream([
MetronomeMark(number=tempo),
make_instrument(midi_instrument),
chord_with_volume(Chord([
Note(midi=midi_number, duration=Duration(duration))
]), volume)
])
def write_midi(stream, output_file):
stream.write('midi', output_file)
if __name__ == '__main__':
# example
stream = generate_single_note(midi_number=60, midi_instrument=2, volume=1.0, duration=0.5, tempo=120)
write_midi(stream, 'data/working/example-parametric-note/01.midi')
# TODO:
# - create a better API
# - make a random generator of the parameters
# - produce audio samples in batch
# - either one file per samples (many runs of FS - may be slow)
# - or make a big MIDI (then audio) and then split (synthesize in one run)
|
<commit_before><commit_msg>Add a script to generate a parametric single-note audio sample.
It uses the music21 library to produce MIDI which can be synthesized then via fluidsynth.
Parameters: pitch (MIDI note number), MIDI instrument, volume, duration, tempo.<commit_after>
|
"""
This generates a single-tone audio clip for instrument classification.
"""
import music21
from music21.chord import Chord
from music21.duration import Duration
from music21.instrument import Instrument
from music21.note import Note
from music21.stream import Stream
from music21.tempo import MetronomeMark
from music21.volume import Volume
def make_instrument(id):
i = Instrument()
i.midiProgram = id
return i
def chord_with_volume(chord, volume):
chord.volume = Volume(velocityScalar=volume)
return chord
def generate_single_note(midi_number, midi_instrument, volume, duration, tempo):
"""
Generates a stream containing a single note with given parameters.
midi_number - MIDI note number, 0 to 127
midi_instrument - MIDI intrument number, 0 to 127
duration - floating point number (in quarter note lengths)
volume - 0.0 to 1.0
tempo - number of quarter notes per minute (eg. 120)
Note that there's a quarter note rest at the beginning and at the end.
"""
return Stream([
MetronomeMark(number=tempo),
make_instrument(midi_instrument),
chord_with_volume(Chord([
Note(midi=midi_number, duration=Duration(duration))
]), volume)
])
def write_midi(stream, output_file):
stream.write('midi', output_file)
if __name__ == '__main__':
# example
stream = generate_single_note(midi_number=60, midi_instrument=2, volume=1.0, duration=0.5, tempo=120)
write_midi(stream, 'data/working/example-parametric-note/01.midi')
# TODO:
# - create a better API
# - make a random generator of the parameters
# - produce audio samples in batch
# - either one file per samples (many runs of FS - may be slow)
# - or make a big MIDI (then audio) and then split (synthesize in one run)
|
Add a script to generate a parametric single-note audio sample.
It uses the music21 library to produce MIDI which can be synthesized then via fluidsynth.
Parameters: pitch (MIDI note number), MIDI instrument, volume, duration, tempo."""
This generates a single-tone audio clip for instrument classification.
"""
import music21
from music21.chord import Chord
from music21.duration import Duration
from music21.instrument import Instrument
from music21.note import Note
from music21.stream import Stream
from music21.tempo import MetronomeMark
from music21.volume import Volume
def make_instrument(id):
i = Instrument()
i.midiProgram = id
return i
def chord_with_volume(chord, volume):
chord.volume = Volume(velocityScalar=volume)
return chord
def generate_single_note(midi_number, midi_instrument, volume, duration, tempo):
"""
Generates a stream containing a single note with given parameters.
midi_number - MIDI note number, 0 to 127
midi_instrument - MIDI intrument number, 0 to 127
duration - floating point number (in quarter note lengths)
volume - 0.0 to 1.0
tempo - number of quarter notes per minute (eg. 120)
Note that there's a quarter note rest at the beginning and at the end.
"""
return Stream([
MetronomeMark(number=tempo),
make_instrument(midi_instrument),
chord_with_volume(Chord([
Note(midi=midi_number, duration=Duration(duration))
]), volume)
])
def write_midi(stream, output_file):
stream.write('midi', output_file)
if __name__ == '__main__':
# example
stream = generate_single_note(midi_number=60, midi_instrument=2, volume=1.0, duration=0.5, tempo=120)
write_midi(stream, 'data/working/example-parametric-note/01.midi')
# TODO:
# - create a better API
# - make a random generator of the parameters
# - produce audio samples in batch
# - either one file per samples (many runs of FS - may be slow)
# - or make a big MIDI (then audio) and then split (synthesize in one run)
|
<commit_before><commit_msg>Add a script to generate a parametric single-note audio sample.
It uses the music21 library to produce MIDI which can be synthesized then via fluidsynth.
Parameters: pitch (MIDI note number), MIDI instrument, volume, duration, tempo.<commit_after>"""
This generates a single-tone audio clip for instrument classification.
"""
import music21
from music21.chord import Chord
from music21.duration import Duration
from music21.instrument import Instrument
from music21.note import Note
from music21.stream import Stream
from music21.tempo import MetronomeMark
from music21.volume import Volume
def make_instrument(id):
i = Instrument()
i.midiProgram = id
return i
def chord_with_volume(chord, volume):
chord.volume = Volume(velocityScalar=volume)
return chord
def generate_single_note(midi_number, midi_instrument, volume, duration, tempo):
"""
Generates a stream containing a single note with given parameters.
midi_number - MIDI note number, 0 to 127
midi_instrument - MIDI intrument number, 0 to 127
duration - floating point number (in quarter note lengths)
volume - 0.0 to 1.0
tempo - number of quarter notes per minute (eg. 120)
Note that there's a quarter note rest at the beginning and at the end.
"""
return Stream([
MetronomeMark(number=tempo),
make_instrument(midi_instrument),
chord_with_volume(Chord([
Note(midi=midi_number, duration=Duration(duration))
]), volume)
])
def write_midi(stream, output_file):
stream.write('midi', output_file)
if __name__ == '__main__':
# example
stream = generate_single_note(midi_number=60, midi_instrument=2, volume=1.0, duration=0.5, tempo=120)
write_midi(stream, 'data/working/example-parametric-note/01.midi')
# TODO:
# - create a better API
# - make a random generator of the parameters
# - produce audio samples in batch
# - either one file per samples (many runs of FS - may be slow)
# - or make a big MIDI (then audio) and then split (synthesize in one run)
|
|
4a372880df375a030caf7f3e6f688734b402601d
|
rpi/find_obj.py
|
rpi/find_obj.py
|
import io
import socket
import struct
import time
import picamera
import picamera.array
import cv2
import numpy as np
client_socket = socket.socket()
client_socket.connect(('10.42.0.1', 8000))
height = 240
width = 320
#height = 480
#width = 640
# Accept a single connection and make a file-like object out of it
connection = client_socket.makefile('wb')
try:
#connection.write(struct.pack("<LLL", height, width, 3))
client_socket.sendall(struct.pack("<LLL", height, width, 1))
with picamera.PiCamera() as camera:
camera.resolution = (width, height)
camera.framerate = 10
rawCapture = picamera.array.PiRGBArray(camera, size=(width, height))
# Start a preview and let the camera warm up
#camera.start_preview()
time.sleep(0.1)
#start = time.time()
#conn_write = connection.write
# Indentify color red
lower = np.array([3, 165, 110], dtype = "uint8")
upper = np.array([12, 255, 255], dtype = "uint8")
kernel = np.ones((5,5),np.uint8)
print("Sending images")
for frame in camera.capture_continuous(rawCapture, format='bgr', use_video_port=True):
# Grab the raw NumPy array representing the image
image = frame.array
#print("size: {0}".format(image.size))
blurred = cv2.GaussianBlur(image, (5, 5), 0)
hsv = cv2.cvtColor(blurred, cv2.COLOR_BGR2HSV)
mask = cv2.inRange(hsv, lower, upper)
#output = cv2.bitwise_and(image, image, mask=mask)
#thresh = cv2.threshold(blurred, 60, 255, cv2.THRESH_BINARY)[1]
#opening = cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel)
#closing = cv2.morphologyEx(opening, cv2.MORPH_CLOSE, kernel)
client_socket.sendall(closing.data)
#connection.flush()
#print("Image sent")
#finish = time.time()
#print("%.1f FPS" % float(1/(finish-start)))
#start = finish
rawCapture.truncate(0)
except (KeyboardInterrupt):
pass
finally:
#connection.close()
client_socket.close()
|
Add RPI file to find object
|
Add RPI file to find object
|
Python
|
mit
|
danielmundi/laser-tracking
|
Add RPI file to find object
|
import io
import socket
import struct
import time
import picamera
import picamera.array
import cv2
import numpy as np
client_socket = socket.socket()
client_socket.connect(('10.42.0.1', 8000))
height = 240
width = 320
#height = 480
#width = 640
# Accept a single connection and make a file-like object out of it
connection = client_socket.makefile('wb')
try:
#connection.write(struct.pack("<LLL", height, width, 3))
client_socket.sendall(struct.pack("<LLL", height, width, 1))
with picamera.PiCamera() as camera:
camera.resolution = (width, height)
camera.framerate = 10
rawCapture = picamera.array.PiRGBArray(camera, size=(width, height))
# Start a preview and let the camera warm up
#camera.start_preview()
time.sleep(0.1)
#start = time.time()
#conn_write = connection.write
# Indentify color red
lower = np.array([3, 165, 110], dtype = "uint8")
upper = np.array([12, 255, 255], dtype = "uint8")
kernel = np.ones((5,5),np.uint8)
print("Sending images")
for frame in camera.capture_continuous(rawCapture, format='bgr', use_video_port=True):
# Grab the raw NumPy array representing the image
image = frame.array
#print("size: {0}".format(image.size))
blurred = cv2.GaussianBlur(image, (5, 5), 0)
hsv = cv2.cvtColor(blurred, cv2.COLOR_BGR2HSV)
mask = cv2.inRange(hsv, lower, upper)
#output = cv2.bitwise_and(image, image, mask=mask)
#thresh = cv2.threshold(blurred, 60, 255, cv2.THRESH_BINARY)[1]
#opening = cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel)
#closing = cv2.morphologyEx(opening, cv2.MORPH_CLOSE, kernel)
client_socket.sendall(closing.data)
#connection.flush()
#print("Image sent")
#finish = time.time()
#print("%.1f FPS" % float(1/(finish-start)))
#start = finish
rawCapture.truncate(0)
except (KeyboardInterrupt):
pass
finally:
#connection.close()
client_socket.close()
|
<commit_before><commit_msg>Add RPI file to find object<commit_after>
|
import io
import socket
import struct
import time
import picamera
import picamera.array
import cv2
import numpy as np
client_socket = socket.socket()
client_socket.connect(('10.42.0.1', 8000))
height = 240
width = 320
#height = 480
#width = 640
# Accept a single connection and make a file-like object out of it
connection = client_socket.makefile('wb')
try:
#connection.write(struct.pack("<LLL", height, width, 3))
client_socket.sendall(struct.pack("<LLL", height, width, 1))
with picamera.PiCamera() as camera:
camera.resolution = (width, height)
camera.framerate = 10
rawCapture = picamera.array.PiRGBArray(camera, size=(width, height))
# Start a preview and let the camera warm up
#camera.start_preview()
time.sleep(0.1)
#start = time.time()
#conn_write = connection.write
# Indentify color red
lower = np.array([3, 165, 110], dtype = "uint8")
upper = np.array([12, 255, 255], dtype = "uint8")
kernel = np.ones((5,5),np.uint8)
print("Sending images")
for frame in camera.capture_continuous(rawCapture, format='bgr', use_video_port=True):
# Grab the raw NumPy array representing the image
image = frame.array
#print("size: {0}".format(image.size))
blurred = cv2.GaussianBlur(image, (5, 5), 0)
hsv = cv2.cvtColor(blurred, cv2.COLOR_BGR2HSV)
mask = cv2.inRange(hsv, lower, upper)
#output = cv2.bitwise_and(image, image, mask=mask)
#thresh = cv2.threshold(blurred, 60, 255, cv2.THRESH_BINARY)[1]
#opening = cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel)
#closing = cv2.morphologyEx(opening, cv2.MORPH_CLOSE, kernel)
client_socket.sendall(closing.data)
#connection.flush()
#print("Image sent")
#finish = time.time()
#print("%.1f FPS" % float(1/(finish-start)))
#start = finish
rawCapture.truncate(0)
except (KeyboardInterrupt):
pass
finally:
#connection.close()
client_socket.close()
|
Add RPI file to find objectimport io
import socket
import struct
import time
import picamera
import picamera.array
import cv2
import numpy as np
client_socket = socket.socket()
client_socket.connect(('10.42.0.1', 8000))
height = 240
width = 320
#height = 480
#width = 640
# Accept a single connection and make a file-like object out of it
connection = client_socket.makefile('wb')
try:
#connection.write(struct.pack("<LLL", height, width, 3))
client_socket.sendall(struct.pack("<LLL", height, width, 1))
with picamera.PiCamera() as camera:
camera.resolution = (width, height)
camera.framerate = 10
rawCapture = picamera.array.PiRGBArray(camera, size=(width, height))
# Start a preview and let the camera warm up
#camera.start_preview()
time.sleep(0.1)
#start = time.time()
#conn_write = connection.write
# Indentify color red
lower = np.array([3, 165, 110], dtype = "uint8")
upper = np.array([12, 255, 255], dtype = "uint8")
kernel = np.ones((5,5),np.uint8)
print("Sending images")
for frame in camera.capture_continuous(rawCapture, format='bgr', use_video_port=True):
# Grab the raw NumPy array representing the image
image = frame.array
#print("size: {0}".format(image.size))
blurred = cv2.GaussianBlur(image, (5, 5), 0)
hsv = cv2.cvtColor(blurred, cv2.COLOR_BGR2HSV)
mask = cv2.inRange(hsv, lower, upper)
#output = cv2.bitwise_and(image, image, mask=mask)
#thresh = cv2.threshold(blurred, 60, 255, cv2.THRESH_BINARY)[1]
#opening = cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel)
#closing = cv2.morphologyEx(opening, cv2.MORPH_CLOSE, kernel)
client_socket.sendall(closing.data)
#connection.flush()
#print("Image sent")
#finish = time.time()
#print("%.1f FPS" % float(1/(finish-start)))
#start = finish
rawCapture.truncate(0)
except (KeyboardInterrupt):
pass
finally:
#connection.close()
client_socket.close()
|
<commit_before><commit_msg>Add RPI file to find object<commit_after>import io
import socket
import struct
import time
import picamera
import picamera.array
import cv2
import numpy as np
client_socket = socket.socket()
client_socket.connect(('10.42.0.1', 8000))
height = 240
width = 320
#height = 480
#width = 640
# Accept a single connection and make a file-like object out of it
connection = client_socket.makefile('wb')
try:
#connection.write(struct.pack("<LLL", height, width, 3))
client_socket.sendall(struct.pack("<LLL", height, width, 1))
with picamera.PiCamera() as camera:
camera.resolution = (width, height)
camera.framerate = 10
rawCapture = picamera.array.PiRGBArray(camera, size=(width, height))
# Start a preview and let the camera warm up
#camera.start_preview()
time.sleep(0.1)
#start = time.time()
#conn_write = connection.write
# Indentify color red
lower = np.array([3, 165, 110], dtype = "uint8")
upper = np.array([12, 255, 255], dtype = "uint8")
kernel = np.ones((5,5),np.uint8)
print("Sending images")
for frame in camera.capture_continuous(rawCapture, format='bgr', use_video_port=True):
# Grab the raw NumPy array representing the image
image = frame.array
#print("size: {0}".format(image.size))
blurred = cv2.GaussianBlur(image, (5, 5), 0)
hsv = cv2.cvtColor(blurred, cv2.COLOR_BGR2HSV)
mask = cv2.inRange(hsv, lower, upper)
#output = cv2.bitwise_and(image, image, mask=mask)
#thresh = cv2.threshold(blurred, 60, 255, cv2.THRESH_BINARY)[1]
#opening = cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel)
#closing = cv2.morphologyEx(opening, cv2.MORPH_CLOSE, kernel)
client_socket.sendall(closing.data)
#connection.flush()
#print("Image sent")
#finish = time.time()
#print("%.1f FPS" % float(1/(finish-start)))
#start = finish
rawCapture.truncate(0)
except (KeyboardInterrupt):
pass
finally:
#connection.close()
client_socket.close()
|
|
ea4857f1f456e5f44d2706b3756c07695b1ee66b
|
filter_plugins/custom_plugins.py
|
filter_plugins/custom_plugins.py
|
# depem: Strip PEM headers and remove all whitespace from string
# Usage: {{ foo | depem }}
def depem(string):
import re
return re.sub(r'\s+|(-----(BEGIN|END).*-----)', '', string)
class FilterModule(object):
def filters(self):
return {
'depem': depem,
}
|
Add depem custom filter plugin
|
Add depem custom filter plugin
|
Python
|
apache-2.0
|
OpenConext/OpenConext-deploy,OpenConext/OpenConext-deploy,OpenConext/OpenConext-deploy,OpenConext/OpenConext-deploy,OpenConext/OpenConext-deploy
|
Add depem custom filter plugin
|
# depem: Strip PEM headers and remove all whitespace from string
# Usage: {{ foo | depem }}
def depem(string):
import re
return re.sub(r'\s+|(-----(BEGIN|END).*-----)', '', string)
class FilterModule(object):
def filters(self):
return {
'depem': depem,
}
|
<commit_before><commit_msg>Add depem custom filter plugin<commit_after>
|
# depem: Strip PEM headers and remove all whitespace from string
# Usage: {{ foo | depem }}
def depem(string):
import re
return re.sub(r'\s+|(-----(BEGIN|END).*-----)', '', string)
class FilterModule(object):
def filters(self):
return {
'depem': depem,
}
|
Add depem custom filter plugin# depem: Strip PEM headers and remove all whitespace from string
# Usage: {{ foo | depem }}
def depem(string):
import re
return re.sub(r'\s+|(-----(BEGIN|END).*-----)', '', string)
class FilterModule(object):
def filters(self):
return {
'depem': depem,
}
|
<commit_before><commit_msg>Add depem custom filter plugin<commit_after># depem: Strip PEM headers and remove all whitespace from string
# Usage: {{ foo | depem }}
def depem(string):
import re
return re.sub(r'\s+|(-----(BEGIN|END).*-----)', '', string)
class FilterModule(object):
def filters(self):
return {
'depem': depem,
}
|
|
526e90b2b47a68f128e3cd618fd67ed6aefeaff5
|
mysite/profile/management/commands/profile_hourly_tasks.py
|
mysite/profile/management/commands/profile_hourly_tasks.py
|
import datetime
import logging
from django.core.management.base import BaseCommand
import mysite.profile.tasks
import mysite.search.models
import mysite.search.tasks
## FIXME: Move to a search management command?
def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch():
logging.info("Checking if bug epoch eclipsed the cached search epoch")
cache_time = mysite.search.models.Epoch.get_for_string('search_cache')
bug_time = mysite.search.models.Epoch.get_for_string('search_cache')
if cache_time < bug_time:
mysite.search.tasks.clear_search_cache()
mysite.search.models.Epoch.bump_for_string('search_cache')
logging.info("Finished dealing with bug epoch vs. cached search epoch.")
class Command(BaseCommand):
help = "Run this once hourly for the OpenHatch profile app."
def handle(self, *args, **options):
mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache()
mysite.profile.tasks.fill_recommended_bugs_cache()
# Every 4 hours, clear search cache
if (datetime.datetime.utcnow().hour % 4) == 0:
periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
|
import datetime
import logging
from django.core.management.base import BaseCommand
import mysite.profile.tasks
import mysite.search.models
import mysite.search.tasks
## FIXME: Move to a search management command?
def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch():
logging.info("Checking if bug epoch eclipsed the cached search epoch")
cache_time = mysite.search.models.Epoch.get_for_string('search_cache')
bug_time = mysite.search.models.Epoch.get_for_string('search_cache')
if cache_time < bug_time:
mysite.search.tasks.clear_search_cache()
mysite.search.models.Epoch.bump_for_string('search_cache')
logging.info("Finished dealing with bug epoch vs. cached search epoch.")
class Command(BaseCommand):
help = "Run this once hourly for the OpenHatch profile app."
def handle(self, *args, **options):
mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache()
# Every 4 hours, clear search cache
if (datetime.datetime.utcnow().hour % 4) == 0:
periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
|
Remove apparently superfluous call to fill_recommended_bugs_cache.
|
Remove apparently superfluous call to fill_recommended_bugs_cache.
|
Python
|
agpl-3.0
|
vipul-sharma20/oh-mainline,heeraj123/oh-mainline,willingc/oh-mainline,Changaco/oh-mainline,eeshangarg/oh-mainline,moijes12/oh-mainline,SnappleCap/oh-mainline,sudheesh001/oh-mainline,vipul-sharma20/oh-mainline,openhatch/oh-mainline,nirmeshk/oh-mainline,heeraj123/oh-mainline,mzdaniel/oh-mainline,vipul-sharma20/oh-mainline,onceuponatimeforever/oh-mainline,mzdaniel/oh-mainline,SnappleCap/oh-mainline,waseem18/oh-mainline,sudheesh001/oh-mainline,moijes12/oh-mainline,Changaco/oh-mainline,ehashman/oh-mainline,willingc/oh-mainline,jledbetter/openhatch,onceuponatimeforever/oh-mainline,campbe13/openhatch,campbe13/openhatch,Changaco/oh-mainline,eeshangarg/oh-mainline,vipul-sharma20/oh-mainline,mzdaniel/oh-mainline,ehashman/oh-mainline,ehashman/oh-mainline,Changaco/oh-mainline,sudheesh001/oh-mainline,heeraj123/oh-mainline,heeraj123/oh-mainline,SnappleCap/oh-mainline,waseem18/oh-mainline,jledbetter/openhatch,eeshangarg/oh-mainline,openhatch/oh-mainline,Changaco/oh-mainline,mzdaniel/oh-mainline,nirmeshk/oh-mainline,eeshangarg/oh-mainline,willingc/oh-mainline,onceuponatimeforever/oh-mainline,openhatch/oh-mainline,waseem18/oh-mainline,willingc/oh-mainline,onceuponatimeforever/oh-mainline,heeraj123/oh-mainline,moijes12/oh-mainline,mzdaniel/oh-mainline,ojengwa/oh-mainline,sudheesh001/oh-mainline,sudheesh001/oh-mainline,campbe13/openhatch,SnappleCap/oh-mainline,jledbetter/openhatch,campbe13/openhatch,moijes12/oh-mainline,ehashman/oh-mainline,jledbetter/openhatch,waseem18/oh-mainline,openhatch/oh-mainline,ojengwa/oh-mainline,onceuponatimeforever/oh-mainline,nirmeshk/oh-mainline,waseem18/oh-mainline,campbe13/openhatch,willingc/oh-mainline,ojengwa/oh-mainline,moijes12/oh-mainline,nirmeshk/oh-mainline,mzdaniel/oh-mainline,ojengwa/oh-mainline,ehashman/oh-mainline,openhatch/oh-mainline,SnappleCap/oh-mainline,vipul-sharma20/oh-mainline,jledbetter/openhatch,mzdaniel/oh-mainline,nirmeshk/oh-mainline,ojengwa/oh-mainline,eeshangarg/oh-mainline
|
import datetime
import logging
from django.core.management.base import BaseCommand
import mysite.profile.tasks
import mysite.search.models
import mysite.search.tasks
## FIXME: Move to a search management command?
def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch():
logging.info("Checking if bug epoch eclipsed the cached search epoch")
cache_time = mysite.search.models.Epoch.get_for_string('search_cache')
bug_time = mysite.search.models.Epoch.get_for_string('search_cache')
if cache_time < bug_time:
mysite.search.tasks.clear_search_cache()
mysite.search.models.Epoch.bump_for_string('search_cache')
logging.info("Finished dealing with bug epoch vs. cached search epoch.")
class Command(BaseCommand):
help = "Run this once hourly for the OpenHatch profile app."
def handle(self, *args, **options):
mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache()
mysite.profile.tasks.fill_recommended_bugs_cache()
# Every 4 hours, clear search cache
if (datetime.datetime.utcnow().hour % 4) == 0:
periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
Remove apparently superfluous call to fill_recommended_bugs_cache.
|
import datetime
import logging
from django.core.management.base import BaseCommand
import mysite.profile.tasks
import mysite.search.models
import mysite.search.tasks
## FIXME: Move to a search management command?
def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch():
logging.info("Checking if bug epoch eclipsed the cached search epoch")
cache_time = mysite.search.models.Epoch.get_for_string('search_cache')
bug_time = mysite.search.models.Epoch.get_for_string('search_cache')
if cache_time < bug_time:
mysite.search.tasks.clear_search_cache()
mysite.search.models.Epoch.bump_for_string('search_cache')
logging.info("Finished dealing with bug epoch vs. cached search epoch.")
class Command(BaseCommand):
help = "Run this once hourly for the OpenHatch profile app."
def handle(self, *args, **options):
mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache()
# Every 4 hours, clear search cache
if (datetime.datetime.utcnow().hour % 4) == 0:
periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
|
<commit_before>import datetime
import logging
from django.core.management.base import BaseCommand
import mysite.profile.tasks
import mysite.search.models
import mysite.search.tasks
## FIXME: Move to a search management command?
def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch():
logging.info("Checking if bug epoch eclipsed the cached search epoch")
cache_time = mysite.search.models.Epoch.get_for_string('search_cache')
bug_time = mysite.search.models.Epoch.get_for_string('search_cache')
if cache_time < bug_time:
mysite.search.tasks.clear_search_cache()
mysite.search.models.Epoch.bump_for_string('search_cache')
logging.info("Finished dealing with bug epoch vs. cached search epoch.")
class Command(BaseCommand):
help = "Run this once hourly for the OpenHatch profile app."
def handle(self, *args, **options):
mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache()
mysite.profile.tasks.fill_recommended_bugs_cache()
# Every 4 hours, clear search cache
if (datetime.datetime.utcnow().hour % 4) == 0:
periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
<commit_msg>Remove apparently superfluous call to fill_recommended_bugs_cache.<commit_after>
|
import datetime
import logging
from django.core.management.base import BaseCommand
import mysite.profile.tasks
import mysite.search.models
import mysite.search.tasks
## FIXME: Move to a search management command?
def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch():
logging.info("Checking if bug epoch eclipsed the cached search epoch")
cache_time = mysite.search.models.Epoch.get_for_string('search_cache')
bug_time = mysite.search.models.Epoch.get_for_string('search_cache')
if cache_time < bug_time:
mysite.search.tasks.clear_search_cache()
mysite.search.models.Epoch.bump_for_string('search_cache')
logging.info("Finished dealing with bug epoch vs. cached search epoch.")
class Command(BaseCommand):
help = "Run this once hourly for the OpenHatch profile app."
def handle(self, *args, **options):
mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache()
# Every 4 hours, clear search cache
if (datetime.datetime.utcnow().hour % 4) == 0:
periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
|
import datetime
import logging
from django.core.management.base import BaseCommand
import mysite.profile.tasks
import mysite.search.models
import mysite.search.tasks
## FIXME: Move to a search management command?
def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch():
logging.info("Checking if bug epoch eclipsed the cached search epoch")
cache_time = mysite.search.models.Epoch.get_for_string('search_cache')
bug_time = mysite.search.models.Epoch.get_for_string('search_cache')
if cache_time < bug_time:
mysite.search.tasks.clear_search_cache()
mysite.search.models.Epoch.bump_for_string('search_cache')
logging.info("Finished dealing with bug epoch vs. cached search epoch.")
class Command(BaseCommand):
help = "Run this once hourly for the OpenHatch profile app."
def handle(self, *args, **options):
mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache()
mysite.profile.tasks.fill_recommended_bugs_cache()
# Every 4 hours, clear search cache
if (datetime.datetime.utcnow().hour % 4) == 0:
periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
Remove apparently superfluous call to fill_recommended_bugs_cache.import datetime
import logging
from django.core.management.base import BaseCommand
import mysite.profile.tasks
import mysite.search.models
import mysite.search.tasks
## FIXME: Move to a search management command?
def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch():
logging.info("Checking if bug epoch eclipsed the cached search epoch")
cache_time = mysite.search.models.Epoch.get_for_string('search_cache')
bug_time = mysite.search.models.Epoch.get_for_string('search_cache')
if cache_time < bug_time:
mysite.search.tasks.clear_search_cache()
mysite.search.models.Epoch.bump_for_string('search_cache')
logging.info("Finished dealing with bug epoch vs. cached search epoch.")
class Command(BaseCommand):
help = "Run this once hourly for the OpenHatch profile app."
def handle(self, *args, **options):
mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache()
# Every 4 hours, clear search cache
if (datetime.datetime.utcnow().hour % 4) == 0:
periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
|
<commit_before>import datetime
import logging
from django.core.management.base import BaseCommand
import mysite.profile.tasks
import mysite.search.models
import mysite.search.tasks
## FIXME: Move to a search management command?
def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch():
logging.info("Checking if bug epoch eclipsed the cached search epoch")
cache_time = mysite.search.models.Epoch.get_for_string('search_cache')
bug_time = mysite.search.models.Epoch.get_for_string('search_cache')
if cache_time < bug_time:
mysite.search.tasks.clear_search_cache()
mysite.search.models.Epoch.bump_for_string('search_cache')
logging.info("Finished dealing with bug epoch vs. cached search epoch.")
class Command(BaseCommand):
help = "Run this once hourly for the OpenHatch profile app."
def handle(self, *args, **options):
mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache()
mysite.profile.tasks.fill_recommended_bugs_cache()
# Every 4 hours, clear search cache
if (datetime.datetime.utcnow().hour % 4) == 0:
periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
<commit_msg>Remove apparently superfluous call to fill_recommended_bugs_cache.<commit_after>import datetime
import logging
from django.core.management.base import BaseCommand
import mysite.profile.tasks
import mysite.search.models
import mysite.search.tasks
## FIXME: Move to a search management command?
def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch():
logging.info("Checking if bug epoch eclipsed the cached search epoch")
cache_time = mysite.search.models.Epoch.get_for_string('search_cache')
bug_time = mysite.search.models.Epoch.get_for_string('search_cache')
if cache_time < bug_time:
mysite.search.tasks.clear_search_cache()
mysite.search.models.Epoch.bump_for_string('search_cache')
logging.info("Finished dealing with bug epoch vs. cached search epoch.")
class Command(BaseCommand):
help = "Run this once hourly for the OpenHatch profile app."
def handle(self, *args, **options):
mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache()
# Every 4 hours, clear search cache
if (datetime.datetime.utcnow().hour % 4) == 0:
periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
|
89172832b94959dd54bc27ea446064f4167c8de1
|
syntacticframes_project/syntacticframes/migrations/0007_auto_20141106_2121.py
|
syntacticframes_project/syntacticframes/migrations/0007_auto_20141106_2121.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('syntacticframes', '0006_auto_20141103_0939'),
]
operations = [
migrations.AlterModelOptions(
name='verbnetclass',
options={'ordering': ['levin_class', 'name']},
),
migrations.AlterModelOptions(
name='verbnetframeset',
options={'ordering': ['tree_id']},
),
migrations.AlterField(
model_name='verbnetframeset',
name='tree_id',
field=models.PositiveSmallIntegerField(),
preserve_default=True,
),
]
|
Migrate DB for fixed tree ordering
|
Migrate DB for fixed tree ordering
|
Python
|
mit
|
aymara/verbenet-editor,aymara/verbenet-editor,aymara/verbenet-editor
|
Migrate DB for fixed tree ordering
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('syntacticframes', '0006_auto_20141103_0939'),
]
operations = [
migrations.AlterModelOptions(
name='verbnetclass',
options={'ordering': ['levin_class', 'name']},
),
migrations.AlterModelOptions(
name='verbnetframeset',
options={'ordering': ['tree_id']},
),
migrations.AlterField(
model_name='verbnetframeset',
name='tree_id',
field=models.PositiveSmallIntegerField(),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Migrate DB for fixed tree ordering<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('syntacticframes', '0006_auto_20141103_0939'),
]
operations = [
migrations.AlterModelOptions(
name='verbnetclass',
options={'ordering': ['levin_class', 'name']},
),
migrations.AlterModelOptions(
name='verbnetframeset',
options={'ordering': ['tree_id']},
),
migrations.AlterField(
model_name='verbnetframeset',
name='tree_id',
field=models.PositiveSmallIntegerField(),
preserve_default=True,
),
]
|
Migrate DB for fixed tree ordering# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('syntacticframes', '0006_auto_20141103_0939'),
]
operations = [
migrations.AlterModelOptions(
name='verbnetclass',
options={'ordering': ['levin_class', 'name']},
),
migrations.AlterModelOptions(
name='verbnetframeset',
options={'ordering': ['tree_id']},
),
migrations.AlterField(
model_name='verbnetframeset',
name='tree_id',
field=models.PositiveSmallIntegerField(),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Migrate DB for fixed tree ordering<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('syntacticframes', '0006_auto_20141103_0939'),
]
operations = [
migrations.AlterModelOptions(
name='verbnetclass',
options={'ordering': ['levin_class', 'name']},
),
migrations.AlterModelOptions(
name='verbnetframeset',
options={'ordering': ['tree_id']},
),
migrations.AlterField(
model_name='verbnetframeset',
name='tree_id',
field=models.PositiveSmallIntegerField(),
preserve_default=True,
),
]
|
|
d143bb49bb632015d69a82adfb9ced6c27a706e7
|
voltracker/api/versions.py
|
voltracker/api/versions.py
|
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import httplib
from oslo.config import cfg
import webob.dec
from voltracker.common import wsgi
from voltracker.openstack.common import jsonutils
CONF = cfg.CONF
class Controller(object):
"""A wsgi controller that reports which API versions are supported."""
def index(self, req):
"""Respond to a request for all OpenStack API versions."""
def build_version_object(version, path, status):
return {
'id': 'v%s' % version,
'status': status,
'links': [
{
'rel': 'self',
'href': '%s/%s/' % (req.host_url, path),
},
],
}
version_objs = [
build_version_object(1.0, 'v1', 'CURRENT'),
]
response = webob.Response(request=req,
status=httplib.MULTIPLE_CHOICES,
content_type='application/json')
response.body = jsonutils.dumps(dict(versions=version_objs))
return response
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
return self.index(req)
def create_resource(conf):
return wsgi.Resource(Controller())
|
Add voltracker api version display application
|
Add voltracker api version display application
At present, the api only has its first version to support
|
Python
|
apache-2.0
|
vmthunder/volt
|
Add voltracker api version display application
At present, the api only has its first version to support
|
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import httplib
from oslo.config import cfg
import webob.dec
from voltracker.common import wsgi
from voltracker.openstack.common import jsonutils
CONF = cfg.CONF
class Controller(object):
"""A wsgi controller that reports which API versions are supported."""
def index(self, req):
"""Respond to a request for all OpenStack API versions."""
def build_version_object(version, path, status):
return {
'id': 'v%s' % version,
'status': status,
'links': [
{
'rel': 'self',
'href': '%s/%s/' % (req.host_url, path),
},
],
}
version_objs = [
build_version_object(1.0, 'v1', 'CURRENT'),
]
response = webob.Response(request=req,
status=httplib.MULTIPLE_CHOICES,
content_type='application/json')
response.body = jsonutils.dumps(dict(versions=version_objs))
return response
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
return self.index(req)
def create_resource(conf):
return wsgi.Resource(Controller())
|
<commit_before><commit_msg>Add voltracker api version display application
At present, the api only has its first version to support<commit_after>
|
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import httplib
from oslo.config import cfg
import webob.dec
from voltracker.common import wsgi
from voltracker.openstack.common import jsonutils
CONF = cfg.CONF
class Controller(object):
"""A wsgi controller that reports which API versions are supported."""
def index(self, req):
"""Respond to a request for all OpenStack API versions."""
def build_version_object(version, path, status):
return {
'id': 'v%s' % version,
'status': status,
'links': [
{
'rel': 'self',
'href': '%s/%s/' % (req.host_url, path),
},
],
}
version_objs = [
build_version_object(1.0, 'v1', 'CURRENT'),
]
response = webob.Response(request=req,
status=httplib.MULTIPLE_CHOICES,
content_type='application/json')
response.body = jsonutils.dumps(dict(versions=version_objs))
return response
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
return self.index(req)
def create_resource(conf):
return wsgi.Resource(Controller())
|
Add voltracker api version display application
At present, the api only has its first version to support# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import httplib
from oslo.config import cfg
import webob.dec
from voltracker.common import wsgi
from voltracker.openstack.common import jsonutils
CONF = cfg.CONF
class Controller(object):
"""A wsgi controller that reports which API versions are supported."""
def index(self, req):
"""Respond to a request for all OpenStack API versions."""
def build_version_object(version, path, status):
return {
'id': 'v%s' % version,
'status': status,
'links': [
{
'rel': 'self',
'href': '%s/%s/' % (req.host_url, path),
},
],
}
version_objs = [
build_version_object(1.0, 'v1', 'CURRENT'),
]
response = webob.Response(request=req,
status=httplib.MULTIPLE_CHOICES,
content_type='application/json')
response.body = jsonutils.dumps(dict(versions=version_objs))
return response
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
return self.index(req)
def create_resource(conf):
return wsgi.Resource(Controller())
|
<commit_before><commit_msg>Add voltracker api version display application
At present, the api only has its first version to support<commit_after># Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import httplib
from oslo.config import cfg
import webob.dec
from voltracker.common import wsgi
from voltracker.openstack.common import jsonutils
CONF = cfg.CONF
class Controller(object):
"""A wsgi controller that reports which API versions are supported."""
def index(self, req):
"""Respond to a request for all OpenStack API versions."""
def build_version_object(version, path, status):
return {
'id': 'v%s' % version,
'status': status,
'links': [
{
'rel': 'self',
'href': '%s/%s/' % (req.host_url, path),
},
],
}
version_objs = [
build_version_object(1.0, 'v1', 'CURRENT'),
]
response = webob.Response(request=req,
status=httplib.MULTIPLE_CHOICES,
content_type='application/json')
response.body = jsonutils.dumps(dict(versions=version_objs))
return response
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
return self.index(req)
def create_resource(conf):
return wsgi.Resource(Controller())
|
|
0ce23f06834c2ba9c7ece89472206e94ea664067
|
utilities/folderize.py
|
utilities/folderize.py
|
"""
For labs which were supposed to be submitted as .zip files, but the students
submitted multiple flat files.
Creates a folder based on their name that matches what gradefast expects,
and moves all of the flat files into that folder.
"""
ext = "\.py" # regex fragment for the filetype
import os
import re
for file in os.listdir():
matchObject = re.match("^[0-9]+-[0-9]+ - (.+) - (.+" + ext + ")$", file)
if (matchObject is not None):
foldername = matchObject.group(1)
foldername = "0-0 - {} - a.zip".format(foldername)
if (not os.access(foldername, os.F_OK)):
os.mkdir(foldername)
os.rename(file, foldername + "\\" + matchObject.group(2))
|
Add utility for flat files -> folder
|
Add utility for flat files -> folder
|
Python
|
mit
|
jhartz/gradefast,jhartz/gradefast,jhartz/gradefast,jhartz/gradefast
|
Add utility for flat files -> folder
|
"""
For labs which were supposed to be submitted as .zip files, but the students
submitted multiple flat files.
Creates a folder based on their name that matches what gradefast expects,
and moves all of the flat files into that folder.
"""
ext = "\.py" # regex fragment for the filetype
import os
import re
for file in os.listdir():
matchObject = re.match("^[0-9]+-[0-9]+ - (.+) - (.+" + ext + ")$", file)
if (matchObject is not None):
foldername = matchObject.group(1)
foldername = "0-0 - {} - a.zip".format(foldername)
if (not os.access(foldername, os.F_OK)):
os.mkdir(foldername)
os.rename(file, foldername + "\\" + matchObject.group(2))
|
<commit_before><commit_msg>Add utility for flat files -> folder<commit_after>
|
"""
For labs which were supposed to be submitted as .zip files, but the students
submitted multiple flat files.
Creates a folder based on their name that matches what gradefast expects,
and moves all of the flat files into that folder.
"""
ext = "\.py" # regex fragment for the filetype
import os
import re
for file in os.listdir():
matchObject = re.match("^[0-9]+-[0-9]+ - (.+) - (.+" + ext + ")$", file)
if (matchObject is not None):
foldername = matchObject.group(1)
foldername = "0-0 - {} - a.zip".format(foldername)
if (not os.access(foldername, os.F_OK)):
os.mkdir(foldername)
os.rename(file, foldername + "\\" + matchObject.group(2))
|
Add utility for flat files -> folder"""
For labs which were supposed to be submitted as .zip files, but the students
submitted multiple flat files.
Creates a folder based on their name that matches what gradefast expects,
and moves all of the flat files into that folder.
"""
ext = "\.py" # regex fragment for the filetype
import os
import re
for file in os.listdir():
matchObject = re.match("^[0-9]+-[0-9]+ - (.+) - (.+" + ext + ")$", file)
if (matchObject is not None):
foldername = matchObject.group(1)
foldername = "0-0 - {} - a.zip".format(foldername)
if (not os.access(foldername, os.F_OK)):
os.mkdir(foldername)
os.rename(file, foldername + "\\" + matchObject.group(2))
|
<commit_before><commit_msg>Add utility for flat files -> folder<commit_after>"""
For labs which were supposed to be submitted as .zip files, but the students
submitted multiple flat files.
Creates a folder based on their name that matches what gradefast expects,
and moves all of the flat files into that folder.
"""
ext = "\.py" # regex fragment for the filetype
import os
import re
for file in os.listdir():
matchObject = re.match("^[0-9]+-[0-9]+ - (.+) - (.+" + ext + ")$", file)
if (matchObject is not None):
foldername = matchObject.group(1)
foldername = "0-0 - {} - a.zip".format(foldername)
if (not os.access(foldername, os.F_OK)):
os.mkdir(foldername)
os.rename(file, foldername + "\\" + matchObject.group(2))
|
|
6051f6aac666c56941ed3f0c10467762accd25d4
|
test/test_opencascade_regular_extrusion.py
|
test/test_opencascade_regular_extrusion.py
|
# -*- coding: utf-8 -*-
"""Creates regular cube mesh by extrusion.
"""
import pygmsh
from helpers import compute_volume
def test():
x = 5
y = 4
z = 3
x_layers = 10
y_layers = 5
z_layers = 3
geom = pygmsh.opencascade.Geometry()
p = geom.add_point([0, 0, 0], 1)
_, l, _ = geom.extrude(p, [x, 0, 0], num_layers=x_layers)
_, s, _ = geom.extrude(l, [0, y, 0], num_layers=y_layers)
geom.extrude(s, [0, 0, z], num_layers=z_layers)
points, cells, _, _, _ = pygmsh.generate_mesh(geom)
ref_vol = x * y * z
assert abs(compute_volume(points, cells) - ref_vol) < 1.0e-2 * ref_vol
# Each grid-cell from layered extrusion will result in 6 tetrahedrons.
ref_tetras = 6 * x_layers * y_layers * z_layers
assert len(cells["tetra"]) == ref_tetras
return points, cells
if __name__ == "__main__":
import meshio
meshio.write_points_cells("cube.vtu", *test())
|
Add regular extrusion test case for OpenCASCADE
|
Add regular extrusion test case for OpenCASCADE
|
Python
|
bsd-3-clause
|
nschloe/python4gmsh
|
Add regular extrusion test case for OpenCASCADE
|
# -*- coding: utf-8 -*-
"""Creates regular cube mesh by extrusion.
"""
import pygmsh
from helpers import compute_volume
def test():
x = 5
y = 4
z = 3
x_layers = 10
y_layers = 5
z_layers = 3
geom = pygmsh.opencascade.Geometry()
p = geom.add_point([0, 0, 0], 1)
_, l, _ = geom.extrude(p, [x, 0, 0], num_layers=x_layers)
_, s, _ = geom.extrude(l, [0, y, 0], num_layers=y_layers)
geom.extrude(s, [0, 0, z], num_layers=z_layers)
points, cells, _, _, _ = pygmsh.generate_mesh(geom)
ref_vol = x * y * z
assert abs(compute_volume(points, cells) - ref_vol) < 1.0e-2 * ref_vol
# Each grid-cell from layered extrusion will result in 6 tetrahedrons.
ref_tetras = 6 * x_layers * y_layers * z_layers
assert len(cells["tetra"]) == ref_tetras
return points, cells
if __name__ == "__main__":
import meshio
meshio.write_points_cells("cube.vtu", *test())
|
<commit_before><commit_msg>Add regular extrusion test case for OpenCASCADE<commit_after>
|
# -*- coding: utf-8 -*-
"""Creates regular cube mesh by extrusion.
"""
import pygmsh
from helpers import compute_volume
def test():
x = 5
y = 4
z = 3
x_layers = 10
y_layers = 5
z_layers = 3
geom = pygmsh.opencascade.Geometry()
p = geom.add_point([0, 0, 0], 1)
_, l, _ = geom.extrude(p, [x, 0, 0], num_layers=x_layers)
_, s, _ = geom.extrude(l, [0, y, 0], num_layers=y_layers)
geom.extrude(s, [0, 0, z], num_layers=z_layers)
points, cells, _, _, _ = pygmsh.generate_mesh(geom)
ref_vol = x * y * z
assert abs(compute_volume(points, cells) - ref_vol) < 1.0e-2 * ref_vol
# Each grid-cell from layered extrusion will result in 6 tetrahedrons.
ref_tetras = 6 * x_layers * y_layers * z_layers
assert len(cells["tetra"]) == ref_tetras
return points, cells
if __name__ == "__main__":
import meshio
meshio.write_points_cells("cube.vtu", *test())
|
Add regular extrusion test case for OpenCASCADE# -*- coding: utf-8 -*-
"""Creates regular cube mesh by extrusion.
"""
import pygmsh
from helpers import compute_volume
def test():
x = 5
y = 4
z = 3
x_layers = 10
y_layers = 5
z_layers = 3
geom = pygmsh.opencascade.Geometry()
p = geom.add_point([0, 0, 0], 1)
_, l, _ = geom.extrude(p, [x, 0, 0], num_layers=x_layers)
_, s, _ = geom.extrude(l, [0, y, 0], num_layers=y_layers)
geom.extrude(s, [0, 0, z], num_layers=z_layers)
points, cells, _, _, _ = pygmsh.generate_mesh(geom)
ref_vol = x * y * z
assert abs(compute_volume(points, cells) - ref_vol) < 1.0e-2 * ref_vol
# Each grid-cell from layered extrusion will result in 6 tetrahedrons.
ref_tetras = 6 * x_layers * y_layers * z_layers
assert len(cells["tetra"]) == ref_tetras
return points, cells
if __name__ == "__main__":
import meshio
meshio.write_points_cells("cube.vtu", *test())
|
<commit_before><commit_msg>Add regular extrusion test case for OpenCASCADE<commit_after># -*- coding: utf-8 -*-
"""Creates regular cube mesh by extrusion.
"""
import pygmsh
from helpers import compute_volume
def test():
x = 5
y = 4
z = 3
x_layers = 10
y_layers = 5
z_layers = 3
geom = pygmsh.opencascade.Geometry()
p = geom.add_point([0, 0, 0], 1)
_, l, _ = geom.extrude(p, [x, 0, 0], num_layers=x_layers)
_, s, _ = geom.extrude(l, [0, y, 0], num_layers=y_layers)
geom.extrude(s, [0, 0, z], num_layers=z_layers)
points, cells, _, _, _ = pygmsh.generate_mesh(geom)
ref_vol = x * y * z
assert abs(compute_volume(points, cells) - ref_vol) < 1.0e-2 * ref_vol
# Each grid-cell from layered extrusion will result in 6 tetrahedrons.
ref_tetras = 6 * x_layers * y_layers * z_layers
assert len(cells["tetra"]) == ref_tetras
return points, cells
if __name__ == "__main__":
import meshio
meshio.write_points_cells("cube.vtu", *test())
|
|
fdc1131e1a32c68bfa999f57dad3110457b9be8e
|
tools/check_distribute.py
|
tools/check_distribute.py
|
#!/usr/bin/python
'''Examine variables set by parsing input files and determine if they are being distributed to the other nodes or not.
This is exceptionally useful for sanity-checking when debugging parallel code.'''
import os
import re
import sys
# script resides in the tools subdirectory of the project.
tools_dir = os.path.dirname(sys.argv[0])
# hence location of the src subdirectory.
src_dir = os.path.abspath(os.path.join(tools_dir, '../src'))
# hence location of parse_input.F90
file = os.path.join(src_dir,'parse_input.F90')
f = open(file, 'r')
# Get keywords set in read_input.F90
start = re.compile('^ *do ! loop over lines in input file.', re.I)
end = re.compile('^ *end do ! end reading of input.', re.I)
read = re.compile('^.*? *call read[aif]|^.*? *call get[aif]', re.I)
setvar = re.compile('^ *[a-z_]+ ?=', re.I)
parentheses = re.compile('(?<=\()(.*?)(?=\(|\))')
data_in = False
variables = set([])
for line in f:
if start.match(line):
data_in = True
if data_in:
if read.match(line):
# e.g. if (test) call readi(a)
# obtain a.
# 1. obtain 'readi(a)'
fn_call = line.split('call')[-1].strip()
# 2. obtain 'a'.
var = parentheses.search(fn_call).group(0)
variables.update([var])
if setvar.match(line):
# e.g. a = b
# obtain a.
var = line.split('=')[-2].strip()
variables.update([var])
if end.match(line):
data_in = False
break
# special case: output filenames are not needed apart from on the head node.
variables.remove('hamiltonian_file')
variables.remove('determinant_file')
# Now get variables which are distributed in distribute_input
distributed = set([])
bcast = re.compile('(?<=call mpi_bcast\()(.*?)(?=,)', re.I)
for line in f:
bcast_match = bcast.search(line)
if bcast_match:
distributed.update([bcast_match.group(0)])
# special case: option_set is used only for some book-keeping in distribute_input.
distributed.remove('option_set')
exit = 0
if distributed.difference(variables):
print 'Distributed variables that are not read from input file are:', ' '.join(distributed.difference(variables))
exit += 1
else:
print 'All distributed variables can be set in the input file.'
if variables.difference(distributed):
print 'Variables read from input file that are not distributed are:', ' '.join(variables.difference(distributed))
exit += 2
else:
print 'All variables set in the input file are distributed.'
sys.exit(exit)
|
Add a small script to check that input options are being distributed.
|
Add a small script to check that input options are being distributed.
A useful sanity check for when doing parallel coding. ;-)
|
Python
|
lgpl-2.1
|
hande-qmc/hande,hande-qmc/hande,hande-qmc/hande,hande-qmc/hande,ruthfranklin/hande,hande-qmc/hande
|
Add a small script to check that input options are being distributed.
A useful sanity check for when doing parallel coding. ;-)
|
#!/usr/bin/python
'''Examine variables set by parsing input files and determine if they are being distributed to the other nodes or not.
This is exceptionally useful for sanity-checking when debugging parallel code.'''
import os
import re
import sys
# script resides in the tools subdirectory of the project.
tools_dir = os.path.dirname(sys.argv[0])
# hence location of the src subdirectory.
src_dir = os.path.abspath(os.path.join(tools_dir, '../src'))
# hence location of parse_input.F90
file = os.path.join(src_dir,'parse_input.F90')
f = open(file, 'r')
# Get keywords set in read_input.F90
start = re.compile('^ *do ! loop over lines in input file.', re.I)
end = re.compile('^ *end do ! end reading of input.', re.I)
read = re.compile('^.*? *call read[aif]|^.*? *call get[aif]', re.I)
setvar = re.compile('^ *[a-z_]+ ?=', re.I)
parentheses = re.compile('(?<=\()(.*?)(?=\(|\))')
data_in = False
variables = set([])
for line in f:
if start.match(line):
data_in = True
if data_in:
if read.match(line):
# e.g. if (test) call readi(a)
# obtain a.
# 1. obtain 'readi(a)'
fn_call = line.split('call')[-1].strip()
# 2. obtain 'a'.
var = parentheses.search(fn_call).group(0)
variables.update([var])
if setvar.match(line):
# e.g. a = b
# obtain a.
var = line.split('=')[-2].strip()
variables.update([var])
if end.match(line):
data_in = False
break
# special case: output filenames are not needed apart from on the head node.
variables.remove('hamiltonian_file')
variables.remove('determinant_file')
# Now get variables which are distributed in distribute_input
distributed = set([])
bcast = re.compile('(?<=call mpi_bcast\()(.*?)(?=,)', re.I)
for line in f:
bcast_match = bcast.search(line)
if bcast_match:
distributed.update([bcast_match.group(0)])
# special case: option_set is used only for some book-keeping in distribute_input.
distributed.remove('option_set')
exit = 0
if distributed.difference(variables):
print 'Distributed variables that are not read from input file are:', ' '.join(distributed.difference(variables))
exit += 1
else:
print 'All distributed variables can be set in the input file.'
if variables.difference(distributed):
print 'Variables read from input file that are not distributed are:', ' '.join(variables.difference(distributed))
exit += 2
else:
print 'All variables set in the input file are distributed.'
sys.exit(exit)
|
<commit_before><commit_msg>Add a small script to check that input options are being distributed.
A useful sanity check for when doing parallel coding. ;-)<commit_after>
|
#!/usr/bin/python
'''Examine variables set by parsing input files and determine if they are being distributed to the other nodes or not.
This is exceptionally useful for sanity-checking when debugging parallel code.'''
import os
import re
import sys
# script resides in the tools subdirectory of the project.
tools_dir = os.path.dirname(sys.argv[0])
# hence location of the src subdirectory.
src_dir = os.path.abspath(os.path.join(tools_dir, '../src'))
# hence location of parse_input.F90
file = os.path.join(src_dir,'parse_input.F90')
f = open(file, 'r')
# Get keywords set in read_input.F90
start = re.compile('^ *do ! loop over lines in input file.', re.I)
end = re.compile('^ *end do ! end reading of input.', re.I)
read = re.compile('^.*? *call read[aif]|^.*? *call get[aif]', re.I)
setvar = re.compile('^ *[a-z_]+ ?=', re.I)
parentheses = re.compile('(?<=\()(.*?)(?=\(|\))')
data_in = False
variables = set([])
for line in f:
if start.match(line):
data_in = True
if data_in:
if read.match(line):
# e.g. if (test) call readi(a)
# obtain a.
# 1. obtain 'readi(a)'
fn_call = line.split('call')[-1].strip()
# 2. obtain 'a'.
var = parentheses.search(fn_call).group(0)
variables.update([var])
if setvar.match(line):
# e.g. a = b
# obtain a.
var = line.split('=')[-2].strip()
variables.update([var])
if end.match(line):
data_in = False
break
# special case: output filenames are not needed apart from on the head node.
variables.remove('hamiltonian_file')
variables.remove('determinant_file')
# Now get variables which are distributed in distribute_input
distributed = set([])
bcast = re.compile('(?<=call mpi_bcast\()(.*?)(?=,)', re.I)
for line in f:
bcast_match = bcast.search(line)
if bcast_match:
distributed.update([bcast_match.group(0)])
# special case: option_set is used only for some book-keeping in distribute_input.
distributed.remove('option_set')
exit = 0
if distributed.difference(variables):
print 'Distributed variables that are not read from input file are:', ' '.join(distributed.difference(variables))
exit += 1
else:
print 'All distributed variables can be set in the input file.'
if variables.difference(distributed):
print 'Variables read from input file that are not distributed are:', ' '.join(variables.difference(distributed))
exit += 2
else:
print 'All variables set in the input file are distributed.'
sys.exit(exit)
|
Add a small script to check that input options are being distributed.
A useful sanity check for when doing parallel coding. ;-)#!/usr/bin/python
'''Examine variables set by parsing input files and determine if they are being distributed to the other nodes or not.
This is exceptionally useful for sanity-checking when debugging parallel code.'''
import os
import re
import sys
# script resides in the tools subdirectory of the project.
tools_dir = os.path.dirname(sys.argv[0])
# hence location of the src subdirectory.
src_dir = os.path.abspath(os.path.join(tools_dir, '../src'))
# hence location of parse_input.F90
file = os.path.join(src_dir,'parse_input.F90')
f = open(file, 'r')
# Get keywords set in read_input.F90
start = re.compile('^ *do ! loop over lines in input file.', re.I)
end = re.compile('^ *end do ! end reading of input.', re.I)
read = re.compile('^.*? *call read[aif]|^.*? *call get[aif]', re.I)
setvar = re.compile('^ *[a-z_]+ ?=', re.I)
parentheses = re.compile('(?<=\()(.*?)(?=\(|\))')
data_in = False
variables = set([])
for line in f:
if start.match(line):
data_in = True
if data_in:
if read.match(line):
# e.g. if (test) call readi(a)
# obtain a.
# 1. obtain 'readi(a)'
fn_call = line.split('call')[-1].strip()
# 2. obtain 'a'.
var = parentheses.search(fn_call).group(0)
variables.update([var])
if setvar.match(line):
# e.g. a = b
# obtain a.
var = line.split('=')[-2].strip()
variables.update([var])
if end.match(line):
data_in = False
break
# special case: output filenames are not needed apart from on the head node.
variables.remove('hamiltonian_file')
variables.remove('determinant_file')
# Now get variables which are distributed in distribute_input
distributed = set([])
bcast = re.compile('(?<=call mpi_bcast\()(.*?)(?=,)', re.I)
for line in f:
bcast_match = bcast.search(line)
if bcast_match:
distributed.update([bcast_match.group(0)])
# special case: option_set is used only for some book-keeping in distribute_input.
distributed.remove('option_set')
exit = 0
if distributed.difference(variables):
print 'Distributed variables that are not read from input file are:', ' '.join(distributed.difference(variables))
exit += 1
else:
print 'All distributed variables can be set in the input file.'
if variables.difference(distributed):
print 'Variables read from input file that are not distributed are:', ' '.join(variables.difference(distributed))
exit += 2
else:
print 'All variables set in the input file are distributed.'
sys.exit(exit)
|
<commit_before><commit_msg>Add a small script to check that input options are being distributed.
A useful sanity check for when doing parallel coding. ;-)<commit_after>#!/usr/bin/python
'''Examine variables set by parsing input files and determine if they are being distributed to the other nodes or not.
This is exceptionally useful for sanity-checking when debugging parallel code.'''
import os
import re
import sys
# script resides in the tools subdirectory of the project.
tools_dir = os.path.dirname(sys.argv[0])
# hence location of the src subdirectory.
src_dir = os.path.abspath(os.path.join(tools_dir, '../src'))
# hence location of parse_input.F90
file = os.path.join(src_dir,'parse_input.F90')
f = open(file, 'r')
# Get keywords set in read_input.F90
start = re.compile('^ *do ! loop over lines in input file.', re.I)
end = re.compile('^ *end do ! end reading of input.', re.I)
read = re.compile('^.*? *call read[aif]|^.*? *call get[aif]', re.I)
setvar = re.compile('^ *[a-z_]+ ?=', re.I)
parentheses = re.compile('(?<=\()(.*?)(?=\(|\))')
data_in = False
variables = set([])
for line in f:
if start.match(line):
data_in = True
if data_in:
if read.match(line):
# e.g. if (test) call readi(a)
# obtain a.
# 1. obtain 'readi(a)'
fn_call = line.split('call')[-1].strip()
# 2. obtain 'a'.
var = parentheses.search(fn_call).group(0)
variables.update([var])
if setvar.match(line):
# e.g. a = b
# obtain a.
var = line.split('=')[-2].strip()
variables.update([var])
if end.match(line):
data_in = False
break
# special case: output filenames are not needed apart from on the head node.
variables.remove('hamiltonian_file')
variables.remove('determinant_file')
# Now get variables which are distributed in distribute_input
distributed = set([])
bcast = re.compile('(?<=call mpi_bcast\()(.*?)(?=,)', re.I)
for line in f:
bcast_match = bcast.search(line)
if bcast_match:
distributed.update([bcast_match.group(0)])
# special case: option_set is used only for some book-keeping in distribute_input.
distributed.remove('option_set')
exit = 0
if distributed.difference(variables):
print 'Distributed variables that are not read from input file are:', ' '.join(distributed.difference(variables))
exit += 1
else:
print 'All distributed variables can be set in the input file.'
if variables.difference(distributed):
print 'Variables read from input file that are not distributed are:', ' '.join(variables.difference(distributed))
exit += 2
else:
print 'All variables set in the input file are distributed.'
sys.exit(exit)
|
|
003738065233d9b9834fdc77a25177bd5a7b384e
|
corehq/apps/data_interfaces/migrations/0021_remove_automaticupdaterule_migrated.py
|
corehq/apps/data_interfaces/migrations/0021_remove_automaticupdaterule_migrated.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-09-14 12:21
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('data_interfaces', '0020_make_migrated_nullable'),
]
operations = [
migrations.RemoveField(
model_name='automaticupdaterule',
name='migrated',
),
]
|
Remove 'migrated' field from AutomaticUpdateRule
|
Remove 'migrated' field from AutomaticUpdateRule
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
Remove 'migrated' field from AutomaticUpdateRule
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-09-14 12:21
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('data_interfaces', '0020_make_migrated_nullable'),
]
operations = [
migrations.RemoveField(
model_name='automaticupdaterule',
name='migrated',
),
]
|
<commit_before><commit_msg>Remove 'migrated' field from AutomaticUpdateRule<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-09-14 12:21
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('data_interfaces', '0020_make_migrated_nullable'),
]
operations = [
migrations.RemoveField(
model_name='automaticupdaterule',
name='migrated',
),
]
|
Remove 'migrated' field from AutomaticUpdateRule# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-09-14 12:21
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('data_interfaces', '0020_make_migrated_nullable'),
]
operations = [
migrations.RemoveField(
model_name='automaticupdaterule',
name='migrated',
),
]
|
<commit_before><commit_msg>Remove 'migrated' field from AutomaticUpdateRule<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-09-14 12:21
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('data_interfaces', '0020_make_migrated_nullable'),
]
operations = [
migrations.RemoveField(
model_name='automaticupdaterule',
name='migrated',
),
]
|
|
604f299cf6398fa490acc48c55f7999e935283ac
|
spacy/lang/char_classes.py
|
spacy/lang/char_classes.py
|
# coding: utf8
from __future__ import unicode_literals
import regex as re
re.DEFAULT_VERSION = re.VERSION1
merge_char_classes = lambda classes: '[{}]'.format('||'.join(classes))
split_chars = lambda char: list(char.strip().split(' '))
merge_chars = lambda char: char.strip().replace(' ', '|')
_bengali = r'[\p{L}&&\p{Bengali}]'
_hebrew = r'[\p{L}&&\p{Hebrew}]'
_latin_lower = r'[\p{Ll}&&\p{Latin}]'
_latin_upper = r'[\p{Lu}&&\p{Latin}]'
_latin = r'[[\p{Ll}||\p{Lu}]&&\p{Latin}]'
_upper = [_latin_upper]
_lower = [_latin_lower]
_uncased = [_bengali, _hebrew]
ALPHA = merge_char_classes(_upper + _lower + _uncased)
ALPHA_LOWER = merge_char_classes(_lower + _uncased)
ALPHA_UPPER = merge_char_classes(_upper + _uncased)
_units = ('km km² km³ m m² m³ dm dm² dm³ cm cm² cm³ mm mm² mm³ ha µm nm yd in ft '
'kg g mg µg t lb oz m/s km/h kmh mph hPa Pa mbar mb MB kb KB gb GB tb '
'TB T G M K')
_currency = r'\$ £ € ¥ ฿ US\$ C\$ A\$'
_punct = r'… , : ; \! \? ¿ ¡ \( \) \[ \] \{ \} < > _ # \* &'
_quotes = r'\' \'\' " ” “ `` ` ‘ ´ ‚ , „ » «'
_hyphens = '- – — -- ---'
UNITS = merge_chars(_units)
CURRENCY = merge_chars(_currency)
QUOTES = merge_chars(_quotes)
PUNCT = merge_chars(_punct)
HYPHENS = merge_chars(_hyphens)
LIST_UNITS = split_chars(_units)
LIST_CURRENCY = split_chars(_currency)
LIST_QUOTES = split_chars(_quotes)
LIST_PUNCT = split_chars(_punct)
LIST_HYPHENS = split_chars(_hyphens)
LIST_ELLIPSES = [r'\.\.+', '…']
|
Add char classes to global language data
|
Add char classes to global language data
|
Python
|
mit
|
explosion/spaCy,recognai/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,spacy-io/spaCy,aikramer2/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,honnibal/spaCy,spacy-io/spaCy,aikramer2/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,aikramer2/spaCy,aikramer2/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,honnibal/spaCy,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy
|
Add char classes to global language data
|
# coding: utf8
from __future__ import unicode_literals
import regex as re
re.DEFAULT_VERSION = re.VERSION1
merge_char_classes = lambda classes: '[{}]'.format('||'.join(classes))
split_chars = lambda char: list(char.strip().split(' '))
merge_chars = lambda char: char.strip().replace(' ', '|')
_bengali = r'[\p{L}&&\p{Bengali}]'
_hebrew = r'[\p{L}&&\p{Hebrew}]'
_latin_lower = r'[\p{Ll}&&\p{Latin}]'
_latin_upper = r'[\p{Lu}&&\p{Latin}]'
_latin = r'[[\p{Ll}||\p{Lu}]&&\p{Latin}]'
_upper = [_latin_upper]
_lower = [_latin_lower]
_uncased = [_bengali, _hebrew]
ALPHA = merge_char_classes(_upper + _lower + _uncased)
ALPHA_LOWER = merge_char_classes(_lower + _uncased)
ALPHA_UPPER = merge_char_classes(_upper + _uncased)
_units = ('km km² km³ m m² m³ dm dm² dm³ cm cm² cm³ mm mm² mm³ ha µm nm yd in ft '
'kg g mg µg t lb oz m/s km/h kmh mph hPa Pa mbar mb MB kb KB gb GB tb '
'TB T G M K')
_currency = r'\$ £ € ¥ ฿ US\$ C\$ A\$'
_punct = r'… , : ; \! \? ¿ ¡ \( \) \[ \] \{ \} < > _ # \* &'
_quotes = r'\' \'\' " ” “ `` ` ‘ ´ ‚ , „ » «'
_hyphens = '- – — -- ---'
UNITS = merge_chars(_units)
CURRENCY = merge_chars(_currency)
QUOTES = merge_chars(_quotes)
PUNCT = merge_chars(_punct)
HYPHENS = merge_chars(_hyphens)
LIST_UNITS = split_chars(_units)
LIST_CURRENCY = split_chars(_currency)
LIST_QUOTES = split_chars(_quotes)
LIST_PUNCT = split_chars(_punct)
LIST_HYPHENS = split_chars(_hyphens)
LIST_ELLIPSES = [r'\.\.+', '…']
|
<commit_before><commit_msg>Add char classes to global language data<commit_after>
|
# coding: utf8
from __future__ import unicode_literals
import regex as re
re.DEFAULT_VERSION = re.VERSION1
merge_char_classes = lambda classes: '[{}]'.format('||'.join(classes))
split_chars = lambda char: list(char.strip().split(' '))
merge_chars = lambda char: char.strip().replace(' ', '|')
_bengali = r'[\p{L}&&\p{Bengali}]'
_hebrew = r'[\p{L}&&\p{Hebrew}]'
_latin_lower = r'[\p{Ll}&&\p{Latin}]'
_latin_upper = r'[\p{Lu}&&\p{Latin}]'
_latin = r'[[\p{Ll}||\p{Lu}]&&\p{Latin}]'
_upper = [_latin_upper]
_lower = [_latin_lower]
_uncased = [_bengali, _hebrew]
ALPHA = merge_char_classes(_upper + _lower + _uncased)
ALPHA_LOWER = merge_char_classes(_lower + _uncased)
ALPHA_UPPER = merge_char_classes(_upper + _uncased)
_units = ('km km² km³ m m² m³ dm dm² dm³ cm cm² cm³ mm mm² mm³ ha µm nm yd in ft '
'kg g mg µg t lb oz m/s km/h kmh mph hPa Pa mbar mb MB kb KB gb GB tb '
'TB T G M K')
_currency = r'\$ £ € ¥ ฿ US\$ C\$ A\$'
_punct = r'… , : ; \! \? ¿ ¡ \( \) \[ \] \{ \} < > _ # \* &'
_quotes = r'\' \'\' " ” “ `` ` ‘ ´ ‚ , „ » «'
_hyphens = '- – — -- ---'
UNITS = merge_chars(_units)
CURRENCY = merge_chars(_currency)
QUOTES = merge_chars(_quotes)
PUNCT = merge_chars(_punct)
HYPHENS = merge_chars(_hyphens)
LIST_UNITS = split_chars(_units)
LIST_CURRENCY = split_chars(_currency)
LIST_QUOTES = split_chars(_quotes)
LIST_PUNCT = split_chars(_punct)
LIST_HYPHENS = split_chars(_hyphens)
LIST_ELLIPSES = [r'\.\.+', '…']
|
Add char classes to global language data# coding: utf8
from __future__ import unicode_literals
import regex as re
re.DEFAULT_VERSION = re.VERSION1
merge_char_classes = lambda classes: '[{}]'.format('||'.join(classes))
split_chars = lambda char: list(char.strip().split(' '))
merge_chars = lambda char: char.strip().replace(' ', '|')
_bengali = r'[\p{L}&&\p{Bengali}]'
_hebrew = r'[\p{L}&&\p{Hebrew}]'
_latin_lower = r'[\p{Ll}&&\p{Latin}]'
_latin_upper = r'[\p{Lu}&&\p{Latin}]'
_latin = r'[[\p{Ll}||\p{Lu}]&&\p{Latin}]'
_upper = [_latin_upper]
_lower = [_latin_lower]
_uncased = [_bengali, _hebrew]
ALPHA = merge_char_classes(_upper + _lower + _uncased)
ALPHA_LOWER = merge_char_classes(_lower + _uncased)
ALPHA_UPPER = merge_char_classes(_upper + _uncased)
_units = ('km km² km³ m m² m³ dm dm² dm³ cm cm² cm³ mm mm² mm³ ha µm nm yd in ft '
'kg g mg µg t lb oz m/s km/h kmh mph hPa Pa mbar mb MB kb KB gb GB tb '
'TB T G M K')
_currency = r'\$ £ € ¥ ฿ US\$ C\$ A\$'
_punct = r'… , : ; \! \? ¿ ¡ \( \) \[ \] \{ \} < > _ # \* &'
_quotes = r'\' \'\' " ” “ `` ` ‘ ´ ‚ , „ » «'
_hyphens = '- – — -- ---'
UNITS = merge_chars(_units)
CURRENCY = merge_chars(_currency)
QUOTES = merge_chars(_quotes)
PUNCT = merge_chars(_punct)
HYPHENS = merge_chars(_hyphens)
LIST_UNITS = split_chars(_units)
LIST_CURRENCY = split_chars(_currency)
LIST_QUOTES = split_chars(_quotes)
LIST_PUNCT = split_chars(_punct)
LIST_HYPHENS = split_chars(_hyphens)
LIST_ELLIPSES = [r'\.\.+', '…']
|
<commit_before><commit_msg>Add char classes to global language data<commit_after># coding: utf8
from __future__ import unicode_literals
import regex as re
re.DEFAULT_VERSION = re.VERSION1
merge_char_classes = lambda classes: '[{}]'.format('||'.join(classes))
split_chars = lambda char: list(char.strip().split(' '))
merge_chars = lambda char: char.strip().replace(' ', '|')
_bengali = r'[\p{L}&&\p{Bengali}]'
_hebrew = r'[\p{L}&&\p{Hebrew}]'
_latin_lower = r'[\p{Ll}&&\p{Latin}]'
_latin_upper = r'[\p{Lu}&&\p{Latin}]'
_latin = r'[[\p{Ll}||\p{Lu}]&&\p{Latin}]'
_upper = [_latin_upper]
_lower = [_latin_lower]
_uncased = [_bengali, _hebrew]
ALPHA = merge_char_classes(_upper + _lower + _uncased)
ALPHA_LOWER = merge_char_classes(_lower + _uncased)
ALPHA_UPPER = merge_char_classes(_upper + _uncased)
_units = ('km km² km³ m m² m³ dm dm² dm³ cm cm² cm³ mm mm² mm³ ha µm nm yd in ft '
'kg g mg µg t lb oz m/s km/h kmh mph hPa Pa mbar mb MB kb KB gb GB tb '
'TB T G M K')
_currency = r'\$ £ € ¥ ฿ US\$ C\$ A\$'
_punct = r'… , : ; \! \? ¿ ¡ \( \) \[ \] \{ \} < > _ # \* &'
_quotes = r'\' \'\' " ” “ `` ` ‘ ´ ‚ , „ » «'
_hyphens = '- – — -- ---'
UNITS = merge_chars(_units)
CURRENCY = merge_chars(_currency)
QUOTES = merge_chars(_quotes)
PUNCT = merge_chars(_punct)
HYPHENS = merge_chars(_hyphens)
LIST_UNITS = split_chars(_units)
LIST_CURRENCY = split_chars(_currency)
LIST_QUOTES = split_chars(_quotes)
LIST_PUNCT = split_chars(_punct)
LIST_HYPHENS = split_chars(_hyphens)
LIST_ELLIPSES = [r'\.\.+', '…']
|
|
dad00fa29fb7084ec62f299c5c4cae5fa5d35a2f
|
zephyr/management/commands/create_realm.py
|
zephyr/management/commands/create_realm.py
|
from django.core.management.base import BaseCommand
from zephyr.models import Realm, Message, UserProfile, Recipient, create_stream_if_needed, \
get_client
from zephyr.views import do_send_message
from django.utils.timezone import now
class Command(BaseCommand):
help = "Create a realm for the specified domain(s)."
def handle(self, *args, **options):
for domain in args:
realm, created = Realm.objects.get_or_create(domain=domain)
if not created:
print domain + ": Realm already exists!"
else:
message = Message()
message.sender = UserProfile.objects.get(user__email="humbug+signups@humbughq.com")
message.recipient = Recipient.objects.get(type_id=create_stream_if_needed(
message.sender.realm, "signups").id, type=Recipient.STREAM)
message.subject = domain
message.content = "Signups enabled."
message.pub_date = now()
message.sending_client = get_client("Internal")
do_send_message(message)
print domain + ": Created."
|
Add script to create new realms.
|
Add script to create new realms.
(imported from commit 764d41f28b1af10860ea4d03a6aa983f40d3ffd7)
|
Python
|
apache-2.0
|
paxapy/zulip,sonali0901/zulip,yocome/zulip,aps-sids/zulip,Drooids/zulip,vabs22/zulip,m1ssou/zulip,luyifan/zulip,mansilladev/zulip,sharmaeklavya2/zulip,gigawhitlocks/zulip,andersk/zulip,Batterfii/zulip,amyliu345/zulip,alliejones/zulip,ipernet/zulip,calvinleenyc/zulip,jphilipsen05/zulip,tiansiyuan/zulip,hustlzp/zulip,KingxBanana/zulip,firstblade/zulip,blaze225/zulip,Cheppers/zulip,tdr130/zulip,akuseru/zulip,LAndreas/zulip,hayderimran7/zulip,tdr130/zulip,stamhe/zulip,avastu/zulip,xuanhan863/zulip,fw1121/zulip,saitodisse/zulip,TigorC/zulip,kou/zulip,TigorC/zulip,Frouk/zulip,hafeez3000/zulip,udxxabp/zulip,alliejones/zulip,dnmfarrell/zulip,jimmy54/zulip,amallia/zulip,jimmy54/zulip,aakash-cr7/zulip,TigorC/zulip,qq1012803704/zulip,Juanvulcano/zulip,vakila/zulip,dnmfarrell/zulip,MayB/zulip,praveenaki/zulip,dxq-git/zulip,christi3k/zulip,dwrpayne/zulip,moria/zulip,punchagan/zulip,RobotCaleb/zulip,zachallaun/zulip,EasonYi/zulip,jimmy54/zulip,yocome/zulip,moria/zulip,shaunstanislaus/zulip,shrikrishnaholla/zulip,ahmadassaf/zulip,Galexrt/zulip,brockwhittaker/zulip,sharmaeklavya2/zulip,j831/zulip,jimmy54/zulip,blaze225/zulip,proliming/zulip,adnanh/zulip,joyhchen/zulip,tbutter/zulip,Diptanshu8/zulip,AZtheAsian/zulip,rht/zulip,aliceriot/zulip,akuseru/zulip,dawran6/zulip,sup95/zulip,Cheppers/zulip,j831/zulip,joyhchen/zulip,jrowan/zulip,codeKonami/zulip,isht3/zulip,suxinde2009/zulip,willingc/zulip,moria/zulip,zofuthan/zulip,thomasboyt/zulip,zulip/zulip,kaiyuanheshang/zulip,itnihao/zulip,kaiyuanheshang/zulip,ApsOps/zulip,udxxabp/zulip,guiquanz/zulip,m1ssou/zulip,rht/zulip,bitemyapp/zulip,ahmadassaf/zulip,ashwinirudrappa/zulip,joshisa/zulip,aakash-cr7/zulip,aakash-cr7/zulip,LeeRisk/zulip,dawran6/zulip,jrowan/zulip,LAndreas/zulip,arpitpanwar/zulip,Gabriel0402/zulip,jonesgithub/zulip,krtkmj/zulip,esander91/zulip,andersk/zulip,joyhchen/zulip,alliejones/zulip,Frouk/zulip,souravbadami/zulip,karamcnair/zulip,Galexrt/zulip,he15his/zulip,PaulPetring/zulip,vaidap/zulip,ikasumiwt/zulip,christi3k/zulip,krtkmj/zulip,huangkebo/zulip,deer-hope/zulip,jimmy54/zulip,voidException/zulip,xuanhan863/zulip,m1ssou/zulip,showell/zulip,mdavid/zulip,jeffcao/zulip,zhaoweigg/zulip,sup95/zulip,SmartPeople/zulip,wangdeshui/zulip,natanovia/zulip,Jianchun1/zulip,developerfm/zulip,Suninus/zulip,eeshangarg/zulip,jainayush975/zulip,hengqujushi/zulip,bastianh/zulip,bssrdf/zulip,themass/zulip,tommyip/zulip,dotcool/zulip,Cheppers/zulip,wdaher/zulip,samatdav/zulip,Suninus/zulip,mahim97/zulip,zhaoweigg/zulip,verma-varsha/zulip,amyliu345/zulip,DazWorrall/zulip,paxapy/zulip,babbage/zulip,blaze225/zulip,xuxiao/zulip,eeshangarg/zulip,dotcool/zulip,hj3938/zulip,DazWorrall/zulip,karamcnair/zulip,schatt/zulip,gkotian/zulip,avastu/zulip,susansls/zulip,saitodisse/zulip,noroot/zulip,zwily/zulip,alliejones/zulip,umkay/zulip,sharmaeklavya2/zulip,proliming/zulip,ericzhou2008/zulip,pradiptad/zulip,LeeRisk/zulip,JPJPJPOPOP/zulip,vakila/zulip,mansilladev/zulip,technicalpickles/zulip,dnmfarrell/zulip,lfranchi/zulip,hackerkid/zulip,wangdeshui/zulip,eeshangarg/zulip,he15his/zulip,timabbott/zulip,swinghu/zulip,vikas-parashar/zulip,gigawhitlocks/zulip,huangkebo/zulip,ashwinirudrappa/zulip,hayderimran7/zulip,karamcnair/zulip,blaze225/zulip,arpitpanwar/zulip,dotcool/zulip,RobotCaleb/zulip,swinghu/zulip,glovebx/zulip,MayB/zulip,babbage/zulip,developerfm/zulip,ApsOps/zulip,ryanbackman/zulip,noroot/zulip,amallia/zulip,brainwane/zulip,xuanhan863/zulip,ashwinirudrappa/zulip,hayderimran7/zulip,souravbadami/zulip,yuvipanda/zulip,easyfmxu/zulip,amanharitsh123/zulip,peguin40/zulip,bssrdf/zulip,dattatreya303/zulip,jonesgithub/zulip,pradiptad/zulip,wweiradio/zulip,Gabriel0402/zulip,voidException/zulip,jerryge/zulip,shrikrishnaholla/zulip,zacps/zulip,ufosky-server/zulip,jphilipsen05/zulip,hustlzp/zulip,sonali0901/zulip,codeKonami/zulip,bssrdf/zulip,developerfm/zulip,peguin40/zulip,TigorC/zulip,verma-varsha/zulip,amanharitsh123/zulip,samatdav/zulip,nicholasbs/zulip,SmartPeople/zulip,Vallher/zulip,showell/zulip,KingxBanana/zulip,Frouk/zulip,hengqujushi/zulip,timabbott/zulip,saitodisse/zulip,krtkmj/zulip,aakash-cr7/zulip,wweiradio/zulip,jimmy54/zulip,dhcrzf/zulip,LeeRisk/zulip,tiansiyuan/zulip,RobotCaleb/zulip,tiansiyuan/zulip,Cheppers/zulip,esander91/zulip,vakila/zulip,he15his/zulip,paxapy/zulip,Gabriel0402/zulip,rishig/zulip,zwily/zulip,gkotian/zulip,peiwei/zulip,so0k/zulip,LAndreas/zulip,krtkmj/zulip,so0k/zulip,sup95/zulip,karamcnair/zulip,suxinde2009/zulip,brainwane/zulip,Gabriel0402/zulip,qq1012803704/zulip,alliejones/zulip,hustlzp/zulip,so0k/zulip,yocome/zulip,ryansnowboarder/zulip,JPJPJPOPOP/zulip,technicalpickles/zulip,dawran6/zulip,mansilladev/zulip,brainwane/zulip,ufosky-server/zulip,synicalsyntax/zulip,eastlhu/zulip,jeffcao/zulip,thomasboyt/zulip,udxxabp/zulip,moria/zulip,LeeRisk/zulip,isht3/zulip,sharmaeklavya2/zulip,guiquanz/zulip,Vallher/zulip,blaze225/zulip,technicalpickles/zulip,noroot/zulip,bitemyapp/zulip,jackrzhang/zulip,ashwinirudrappa/zulip,peiwei/zulip,JPJPJPOPOP/zulip,grave-w-grave/zulip,ufosky-server/zulip,hafeez3000/zulip,Qgap/zulip,mohsenSy/zulip,amallia/zulip,esander91/zulip,tbutter/zulip,dxq-git/zulip,guiquanz/zulip,vakila/zulip,calvinleenyc/zulip,deer-hope/zulip,levixie/zulip,hustlzp/zulip,babbage/zulip,showell/zulip,seapasulli/zulip,esander91/zulip,johnny9/zulip,mdavid/zulip,Vallher/zulip,LAndreas/zulip,easyfmxu/zulip,udxxabp/zulip,aliceriot/zulip,proliming/zulip,lfranchi/zulip,vaidap/zulip,schatt/zulip,tommyip/zulip,Vallher/zulip,hustlzp/zulip,vaidap/zulip,amyliu345/zulip,arpitpanwar/zulip,codeKonami/zulip,bluesea/zulip,firstblade/zulip,tbutter/zulip,gkotian/zulip,willingc/zulip,zacps/zulip,amanharitsh123/zulip,mahim97/zulip,bowlofstew/zulip,amallia/zulip,isht3/zulip,Frouk/zulip,KingxBanana/zulip,luyifan/zulip,aliceriot/zulip,zorojean/zulip,kaiyuanheshang/zulip,alliejones/zulip,wangdeshui/zulip,wavelets/zulip,adnanh/zulip,ApsOps/zulip,jainayush975/zulip,ufosky-server/zulip,hj3938/zulip,themass/zulip,arpith/zulip,punchagan/zulip,ipernet/zulip,tbutter/zulip,Suninus/zulip,Jianchun1/zulip,stamhe/zulip,LAndreas/zulip,JPJPJPOPOP/zulip,bastianh/zulip,ipernet/zulip,peguin40/zulip,swinghu/zulip,eastlhu/zulip,atomic-labs/zulip,hafeez3000/zulip,bitemyapp/zulip,jonesgithub/zulip,amallia/zulip,suxinde2009/zulip,dnmfarrell/zulip,itnihao/zulip,wavelets/zulip,tommyip/zulip,zofuthan/zulip,proliming/zulip,j831/zulip,verma-varsha/zulip,vakila/zulip,Frouk/zulip,dawran6/zulip,gigawhitlocks/zulip,susansls/zulip,susansls/zulip,hackerkid/zulip,niftynei/zulip,DazWorrall/zulip,saitodisse/zulip,qq1012803704/zulip,firstblade/zulip,susansls/zulip,Galexrt/zulip,RobotCaleb/zulip,dawran6/zulip,glovebx/zulip,lfranchi/zulip,ApsOps/zulip,swinghu/zulip,zwily/zulip,rishig/zulip,brockwhittaker/zulip,JanzTam/zulip,SmartPeople/zulip,tiansiyuan/zulip,Cheppers/zulip,wdaher/zulip,mohsenSy/zulip,DazWorrall/zulip,deer-hope/zulip,pradiptad/zulip,shubhamdhama/zulip,jphilipsen05/zulip,jphilipsen05/zulip,jessedhillon/zulip,akuseru/zulip,Galexrt/zulip,nicholasbs/zulip,MariaFaBella85/zulip,shubhamdhama/zulip,themass/zulip,dattatreya303/zulip,johnnygaddarr/zulip,synicalsyntax/zulip,ashwinirudrappa/zulip,tiansiyuan/zulip,EasonYi/zulip,praveenaki/zulip,tommyip/zulip,vabs22/zulip,akuseru/zulip,praveenaki/zulip,amyliu345/zulip,brainwane/zulip,yocome/zulip,reyha/zulip,bowlofstew/zulip,Qgap/zulip,voidException/zulip,eastlhu/zulip,andersk/zulip,jrowan/zulip,hengqujushi/zulip,Batterfii/zulip,hackerkid/zulip,wweiradio/zulip,luyifan/zulip,qq1012803704/zulip,mdavid/zulip,jeffcao/zulip,EasonYi/zulip,esander91/zulip,themass/zulip,ahmadassaf/zulip,stamhe/zulip,shaunstanislaus/zulip,developerfm/zulip,mdavid/zulip,kou/zulip,johnny9/zulip,kou/zulip,seapasulli/zulip,willingc/zulip,KJin99/zulip,JanzTam/zulip,zofuthan/zulip,arpitpanwar/zulip,kokoar/zulip,yuvipanda/zulip,Juanvulcano/zulip,guiquanz/zulip,praveenaki/zulip,luyifan/zulip,j831/zulip,jimmy54/zulip,mahim97/zulip,verma-varsha/zulip,natanovia/zulip,hackerkid/zulip,bowlofstew/zulip,so0k/zulip,jphilipsen05/zulip,huangkebo/zulip,isht3/zulip,hayderimran7/zulip,vakila/zulip,natanovia/zulip,johnnygaddarr/zulip,qq1012803704/zulip,hafeez3000/zulip,peiwei/zulip,ericzhou2008/zulip,brockwhittaker/zulip,avastu/zulip,jackrzhang/zulip,grave-w-grave/zulip,yuvipanda/zulip,kokoar/zulip,ipernet/zulip,moria/zulip,Suninus/zulip,tbutter/zulip,KJin99/zulip,ApsOps/zulip,DazWorrall/zulip,glovebx/zulip,vaidap/zulip,jainayush975/zulip,ericzhou2008/zulip,hafeez3000/zulip,babbage/zulip,ApsOps/zulip,schatt/zulip,shaunstanislaus/zulip,he15his/zulip,hj3938/zulip,zachallaun/zulip,levixie/zulip,showell/zulip,bssrdf/zulip,karamcnair/zulip,he15his/zulip,avastu/zulip,aliceriot/zulip,stamhe/zulip,glovebx/zulip,Jianchun1/zulip,arpith/zulip,brainwane/zulip,atomic-labs/zulip,sup95/zulip,souravbadami/zulip,xuanhan863/zulip,qq1012803704/zulip,fw1121/zulip,wavelets/zulip,m1ssou/zulip,dwrpayne/zulip,willingc/zulip,hafeez3000/zulip,themass/zulip,swinghu/zulip,udxxabp/zulip,itnihao/zulip,PhilSk/zulip,joshisa/zulip,shubhamdhama/zulip,zhaoweigg/zulip,sharmaeklavya2/zulip,akuseru/zulip,brockwhittaker/zulip,jackrzhang/zulip,arpitpanwar/zulip,zulip/zulip,kou/zulip,udxxabp/zulip,rishig/zulip,ApsOps/zulip,arpith/zulip,armooo/zulip,jonesgithub/zulip,hustlzp/zulip,mohsenSy/zulip,shaunstanislaus/zulip,itnihao/zulip,he15his/zulip,saitodisse/zulip,zachallaun/zulip,bluesea/zulip,Vallher/zulip,PaulPetring/zulip,jphilipsen05/zulip,yuvipanda/zulip,dattatreya303/zulip,hj3938/zulip,Batterfii/zulip,kaiyuanheshang/zulip,JPJPJPOPOP/zulip,rishig/zulip,levixie/zulip,joshisa/zulip,ryansnowboarder/zulip,Suninus/zulip,peguin40/zulip,tbutter/zulip,MariaFaBella85/zulip,dnmfarrell/zulip,aps-sids/zulip,shaunstanislaus/zulip,babbage/zulip,firstblade/zulip,xuanhan863/zulip,armooo/zulip,amanharitsh123/zulip,wdaher/zulip,LeeRisk/zulip,niftynei/zulip,swinghu/zulip,bssrdf/zulip,codeKonami/zulip,bluesea/zulip,jerryge/zulip,JanzTam/zulip,niftynei/zulip,zulip/zulip,synicalsyntax/zulip,shrikrishnaholla/zulip,ahmadassaf/zulip,aliceriot/zulip,Qgap/zulip,atomic-labs/zulip,MayB/zulip,peiwei/zulip,SmartPeople/zulip,showell/zulip,dnmfarrell/zulip,wavelets/zulip,mansilladev/zulip,technicalpickles/zulip,wweiradio/zulip,zachallaun/zulip,PaulPetring/zulip,tommyip/zulip,cosmicAsymmetry/zulip,Gabriel0402/zulip,Jianchun1/zulip,j831/zulip,punchagan/zulip,dxq-git/zulip,Gabriel0402/zulip,bluesea/zulip,fw1121/zulip,hayderimran7/zulip,dhcrzf/zulip,jackrzhang/zulip,dwrpayne/zulip,willingc/zulip,christi3k/zulip,Drooids/zulip,peiwei/zulip,susansls/zulip,glovebx/zulip,amallia/zulip,jerryge/zulip,ikasumiwt/zulip,thomasboyt/zulip,shaunstanislaus/zulip,willingc/zulip,Qgap/zulip,shrikrishnaholla/zulip,hengqujushi/zulip,luyifan/zulip,krtkmj/zulip,levixie/zulip,deer-hope/zulip,thomasboyt/zulip,andersk/zulip,codeKonami/zulip,fw1121/zulip,paxapy/zulip,timabbott/zulip,Juanvulcano/zulip,xuanhan863/zulip,zhaoweigg/zulip,wavelets/zulip,kaiyuanheshang/zulip,m1ssou/zulip,easyfmxu/zulip,AZtheAsian/zulip,ericzhou2008/zulip,zorojean/zulip,natanovia/zulip,bowlofstew/zulip,gigawhitlocks/zulip,avastu/zulip,reyha/zulip,niftynei/zulip,EasonYi/zulip,pradiptad/zulip,hengqujushi/zulip,stamhe/zulip,guiquanz/zulip,atomic-labs/zulip,esander91/zulip,krtkmj/zulip,umkay/zulip,SmartPeople/zulip,brockwhittaker/zulip,MayB/zulip,vikas-parashar/zulip,ryanbackman/zulip,jessedhillon/zulip,seapasulli/zulip,KJin99/zulip,easyfmxu/zulip,zofuthan/zulip,dotcool/zulip,littledogboy/zulip,nicholasbs/zulip,AZtheAsian/zulip,thomasboyt/zulip,joyhchen/zulip,kokoar/zulip,hustlzp/zulip,Batterfii/zulip,cosmicAsymmetry/zulip,ryansnowboarder/zulip,umkay/zulip,ipernet/zulip,xuxiao/zulip,eastlhu/zulip,sonali0901/zulip,dhcrzf/zulip,technicalpickles/zulip,bssrdf/zulip,dhcrzf/zulip,zacps/zulip,DazWorrall/zulip,glovebx/zulip,souravbadami/zulip,atomic-labs/zulip,eeshangarg/zulip,AZtheAsian/zulip,cosmicAsymmetry/zulip,saitodisse/zulip,avastu/zulip,reyha/zulip,natanovia/zulip,arpith/zulip,amyliu345/zulip,RobotCaleb/zulip,verma-varsha/zulip,littledogboy/zulip,Galexrt/zulip,easyfmxu/zulip,isht3/zulip,rht/zulip,tommyip/zulip,tdr130/zulip,deer-hope/zulip,kou/zulip,mahim97/zulip,tbutter/zulip,moria/zulip,vabs22/zulip,JanzTam/zulip,calvinleenyc/zulip,aps-sids/zulip,sharmaeklavya2/zulip,PhilSk/zulip,huangkebo/zulip,mdavid/zulip,jainayush975/zulip,PhilSk/zulip,amallia/zulip,zulip/zulip,mansilladev/zulip,mohsenSy/zulip,MariaFaBella85/zulip,JPJPJPOPOP/zulip,jessedhillon/zulip,jerryge/zulip,jackrzhang/zulip,KingxBanana/zulip,wdaher/zulip,wweiradio/zulip,hackerkid/zulip,guiquanz/zulip,paxapy/zulip,stamhe/zulip,udxxabp/zulip,synicalsyntax/zulip,littledogboy/zulip,souravbadami/zulip,jessedhillon/zulip,zhaoweigg/zulip,Galexrt/zulip,KJin99/zulip,shrikrishnaholla/zulip,jerryge/zulip,zhaoweigg/zulip,jessedhillon/zulip,bowlofstew/zulip,swinghu/zulip,grave-w-grave/zulip,hj3938/zulip,eastlhu/zulip,johnnygaddarr/zulip,KingxBanana/zulip,willingc/zulip,EasonYi/zulip,praveenaki/zulip,vabs22/zulip,arpith/zulip,LeeRisk/zulip,ikasumiwt/zulip,AZtheAsian/zulip,proliming/zulip,yocome/zulip,ahmadassaf/zulip,saitodisse/zulip,xuanhan863/zulip,fw1121/zulip,johnny9/zulip,jerryge/zulip,shubhamdhama/zulip,fw1121/zulip,dattatreya303/zulip,noroot/zulip,mansilladev/zulip,Frouk/zulip,zwily/zulip,xuxiao/zulip,PaulPetring/zulip,ryanbackman/zulip,technicalpickles/zulip,tiansiyuan/zulip,bluesea/zulip,jonesgithub/zulip,christi3k/zulip,vabs22/zulip,DazWorrall/zulip,noroot/zulip,joshisa/zulip,littledogboy/zulip,JanzTam/zulip,esander91/zulip,suxinde2009/zulip,suxinde2009/zulip,yocome/zulip,shaunstanislaus/zulip,pradiptad/zulip,rht/zulip,zofuthan/zulip,synicalsyntax/zulip,aakash-cr7/zulip,Jianchun1/zulip,firstblade/zulip,MayB/zulip,hj3938/zulip,xuxiao/zulip,joyhchen/zulip,andersk/zulip,KJin99/zulip,umkay/zulip,Drooids/zulip,wangdeshui/zulip,hayderimran7/zulip,jessedhillon/zulip,niftynei/zulip,hengqujushi/zulip,dattatreya303/zulip,seapasulli/zulip,brainwane/zulip,itnihao/zulip,seapasulli/zulip,reyha/zulip,hackerkid/zulip,ikasumiwt/zulip,PaulPetring/zulip,levixie/zulip,timabbott/zulip,sup95/zulip,voidException/zulip,Diptanshu8/zulip,aakash-cr7/zulip,adnanh/zulip,dwrpayne/zulip,samatdav/zulip,mohsenSy/zulip,PaulPetring/zulip,MariaFaBella85/zulip,zulip/zulip,codeKonami/zulip,christi3k/zulip,dhcrzf/zulip,joshisa/zulip,MayB/zulip,tommyip/zulip,nicholasbs/zulip,eastlhu/zulip,Cheppers/zulip,adnanh/zulip,hafeez3000/zulip,arpitpanwar/zulip,yuvipanda/zulip,jeffcao/zulip,praveenaki/zulip,KingxBanana/zulip,jeffcao/zulip,wweiradio/zulip,schatt/zulip,voidException/zulip,codeKonami/zulip,ryansnowboarder/zulip,aliceriot/zulip,zachallaun/zulip,bssrdf/zulip,timabbott/zulip,shubhamdhama/zulip,aps-sids/zulip,gigawhitlocks/zulip,Galexrt/zulip,jrowan/zulip,johnnygaddarr/zulip,ipernet/zulip,xuxiao/zulip,calvinleenyc/zulip,mdavid/zulip,LAndreas/zulip,bluesea/zulip,christi3k/zulip,EasonYi/zulip,huangkebo/zulip,so0k/zulip,brockwhittaker/zulip,ryansnowboarder/zulip,mansilladev/zulip,developerfm/zulip,mahim97/zulip,ryanbackman/zulip,timabbott/zulip,armooo/zulip,so0k/zulip,zwily/zulip,Drooids/zulip,johnny9/zulip,wavelets/zulip,seapasulli/zulip,ufosky-server/zulip,ashwinirudrappa/zulip,thomasboyt/zulip,dotcool/zulip,paxapy/zulip,zorojean/zulip,hj3938/zulip,gkotian/zulip,KJin99/zulip,TigorC/zulip,littledogboy/zulip,schatt/zulip,he15his/zulip,levixie/zulip,punchagan/zulip,tiansiyuan/zulip,thomasboyt/zulip,zorojean/zulip,jonesgithub/zulip,jainayush975/zulip,dxq-git/zulip,technicalpickles/zulip,ikasumiwt/zulip,RobotCaleb/zulip,calvinleenyc/zulip,voidException/zulip,seapasulli/zulip,zofuthan/zulip,ericzhou2008/zulip,so0k/zulip,wangdeshui/zulip,kokoar/zulip,eastlhu/zulip,niftynei/zulip,shrikrishnaholla/zulip,Vallher/zulip,zulip/zulip,xuxiao/zulip,Gabriel0402/zulip,Drooids/zulip,dawran6/zulip,karamcnair/zulip,vakila/zulip,johnny9/zulip,Qgap/zulip,deer-hope/zulip,zachallaun/zulip,hackerkid/zulip,wdaher/zulip,punchagan/zulip,easyfmxu/zulip,ryansnowboarder/zulip,jessedhillon/zulip,suxinde2009/zulip,proliming/zulip,cosmicAsymmetry/zulip,vabs22/zulip,wdaher/zulip,bowlofstew/zulip,atomic-labs/zulip,MayB/zulip,m1ssou/zulip,qq1012803704/zulip,natanovia/zulip,arpith/zulip,zhaoweigg/zulip,grave-w-grave/zulip,Qgap/zulip,amanharitsh123/zulip,vaidap/zulip,mdavid/zulip,cosmicAsymmetry/zulip,vikas-parashar/zulip,stamhe/zulip,gkotian/zulip,shrikrishnaholla/zulip,dwrpayne/zulip,armooo/zulip,Juanvulcano/zulip,jeffcao/zulip,Diptanshu8/zulip,jerryge/zulip,showell/zulip,themass/zulip,ikasumiwt/zulip,samatdav/zulip,johnnygaddarr/zulip,samatdav/zulip,deer-hope/zulip,bastianh/zulip,kaiyuanheshang/zulip,m1ssou/zulip,bowlofstew/zulip,noroot/zulip,AZtheAsian/zulip,amyliu345/zulip,atomic-labs/zulip,nicholasbs/zulip,samatdav/zulip,peiwei/zulip,alliejones/zulip,adnanh/zulip,lfranchi/zulip,susansls/zulip,wdaher/zulip,zacps/zulip,joshisa/zulip,mohsenSy/zulip,yuvipanda/zulip,littledogboy/zulip,natanovia/zulip,rishig/zulip,Juanvulcano/zulip,kokoar/zulip,andersk/zulip,armooo/zulip,vaidap/zulip,kou/zulip,huangkebo/zulip,Juanvulcano/zulip,luyifan/zulip,ipernet/zulip,TigorC/zulip,rht/zulip,themass/zulip,johnnygaddarr/zulip,firstblade/zulip,fw1121/zulip,adnanh/zulip,blaze225/zulip,KJin99/zulip,synicalsyntax/zulip,grave-w-grave/zulip,reyha/zulip,bitemyapp/zulip,Qgap/zulip,guiquanz/zulip,eeshangarg/zulip,levixie/zulip,sonali0901/zulip,ericzhou2008/zulip,rht/zulip,peiwei/zulip,LAndreas/zulip,aliceriot/zulip,Suninus/zulip,Suninus/zulip,Batterfii/zulip,punchagan/zulip,isht3/zulip,dhcrzf/zulip,zofuthan/zulip,sup95/zulip,dwrpayne/zulip,jackrzhang/zulip,verma-varsha/zulip,ahmadassaf/zulip,jrowan/zulip,showell/zulip,zulip/zulip,zacps/zulip,vikas-parashar/zulip,itnihao/zulip,PaulPetring/zulip,Cheppers/zulip,jainayush975/zulip,easyfmxu/zulip,gkotian/zulip,moria/zulip,dwrpayne/zulip,avastu/zulip,SmartPeople/zulip,eeshangarg/zulip,shubhamdhama/zulip,Vallher/zulip,Drooids/zulip,umkay/zulip,dotcool/zulip,PhilSk/zulip,bitemyapp/zulip,arpitpanwar/zulip,zorojean/zulip,schatt/zulip,bitemyapp/zulip,jeffcao/zulip,bastianh/zulip,adnanh/zulip,kokoar/zulip,zachallaun/zulip,timabbott/zulip,proliming/zulip,ikasumiwt/zulip,punchagan/zulip,firstblade/zulip,zwily/zulip,Frouk/zulip,PhilSk/zulip,Diptanshu8/zulip,babbage/zulip,aps-sids/zulip,akuseru/zulip,ufosky-server/zulip,nicholasbs/zulip,dotcool/zulip,Jianchun1/zulip,itnihao/zulip,PhilSk/zulip,akuseru/zulip,schatt/zulip,kokoar/zulip,vikas-parashar/zulip,jackrzhang/zulip,joyhchen/zulip,xuxiao/zulip,brainwane/zulip,synicalsyntax/zulip,grave-w-grave/zulip,bastianh/zulip,lfranchi/zulip,shubhamdhama/zulip,wweiradio/zulip,tdr130/zulip,EasonYi/zulip,pradiptad/zulip,JanzTam/zulip,rishig/zulip,Diptanshu8/zulip,yuvipanda/zulip,wavelets/zulip,armooo/zulip,aps-sids/zulip,MariaFaBella85/zulip,suxinde2009/zulip,rht/zulip,sonali0901/zulip,kou/zulip,cosmicAsymmetry/zulip,amanharitsh123/zulip,RobotCaleb/zulip,Drooids/zulip,souravbadami/zulip,joshisa/zulip,bitemyapp/zulip,umkay/zulip,zorojean/zulip,rishig/zulip,zorojean/zulip,tdr130/zulip,ufosky-server/zulip,ryanbackman/zulip,eeshangarg/zulip,babbage/zulip,glovebx/zulip,littledogboy/zulip,hengqujushi/zulip,dxq-git/zulip,gigawhitlocks/zulip,ashwinirudrappa/zulip,Diptanshu8/zulip,vikas-parashar/zulip,jrowan/zulip,wangdeshui/zulip,umkay/zulip,dxq-git/zulip,dhcrzf/zulip,armooo/zulip,bastianh/zulip,noroot/zulip,voidException/zulip,gigawhitlocks/zulip,dnmfarrell/zulip,lfranchi/zulip,dattatreya303/zulip,calvinleenyc/zulip,dxq-git/zulip,karamcnair/zulip,lfranchi/zulip,praveenaki/zulip,reyha/zulip,yocome/zulip,andersk/zulip,Batterfii/zulip,developerfm/zulip,krtkmj/zulip,gkotian/zulip,pradiptad/zulip,wangdeshui/zulip,JanzTam/zulip,developerfm/zulip,tdr130/zulip,johnny9/zulip,sonali0901/zulip,hayderimran7/zulip,j831/zulip,ryanbackman/zulip,aps-sids/zulip,MariaFaBella85/zulip,johnny9/zulip,MariaFaBella85/zulip,nicholasbs/zulip,mahim97/zulip,jonesgithub/zulip,ahmadassaf/zulip,LeeRisk/zulip,kaiyuanheshang/zulip,peguin40/zulip,zwily/zulip,peguin40/zulip,zacps/zulip,tdr130/zulip,ryansnowboarder/zulip,bluesea/zulip,bastianh/zulip,luyifan/zulip,Batterfii/zulip,ericzhou2008/zulip,johnnygaddarr/zulip,huangkebo/zulip
|
Add script to create new realms.
(imported from commit 764d41f28b1af10860ea4d03a6aa983f40d3ffd7)
|
from django.core.management.base import BaseCommand
from zephyr.models import Realm, Message, UserProfile, Recipient, create_stream_if_needed, \
get_client
from zephyr.views import do_send_message
from django.utils.timezone import now
class Command(BaseCommand):
help = "Create a realm for the specified domain(s)."
def handle(self, *args, **options):
for domain in args:
realm, created = Realm.objects.get_or_create(domain=domain)
if not created:
print domain + ": Realm already exists!"
else:
message = Message()
message.sender = UserProfile.objects.get(user__email="humbug+signups@humbughq.com")
message.recipient = Recipient.objects.get(type_id=create_stream_if_needed(
message.sender.realm, "signups").id, type=Recipient.STREAM)
message.subject = domain
message.content = "Signups enabled."
message.pub_date = now()
message.sending_client = get_client("Internal")
do_send_message(message)
print domain + ": Created."
|
<commit_before><commit_msg>Add script to create new realms.
(imported from commit 764d41f28b1af10860ea4d03a6aa983f40d3ffd7)<commit_after>
|
from django.core.management.base import BaseCommand
from zephyr.models import Realm, Message, UserProfile, Recipient, create_stream_if_needed, \
get_client
from zephyr.views import do_send_message
from django.utils.timezone import now
class Command(BaseCommand):
help = "Create a realm for the specified domain(s)."
def handle(self, *args, **options):
for domain in args:
realm, created = Realm.objects.get_or_create(domain=domain)
if not created:
print domain + ": Realm already exists!"
else:
message = Message()
message.sender = UserProfile.objects.get(user__email="humbug+signups@humbughq.com")
message.recipient = Recipient.objects.get(type_id=create_stream_if_needed(
message.sender.realm, "signups").id, type=Recipient.STREAM)
message.subject = domain
message.content = "Signups enabled."
message.pub_date = now()
message.sending_client = get_client("Internal")
do_send_message(message)
print domain + ": Created."
|
Add script to create new realms.
(imported from commit 764d41f28b1af10860ea4d03a6aa983f40d3ffd7)from django.core.management.base import BaseCommand
from zephyr.models import Realm, Message, UserProfile, Recipient, create_stream_if_needed, \
get_client
from zephyr.views import do_send_message
from django.utils.timezone import now
class Command(BaseCommand):
help = "Create a realm for the specified domain(s)."
def handle(self, *args, **options):
for domain in args:
realm, created = Realm.objects.get_or_create(domain=domain)
if not created:
print domain + ": Realm already exists!"
else:
message = Message()
message.sender = UserProfile.objects.get(user__email="humbug+signups@humbughq.com")
message.recipient = Recipient.objects.get(type_id=create_stream_if_needed(
message.sender.realm, "signups").id, type=Recipient.STREAM)
message.subject = domain
message.content = "Signups enabled."
message.pub_date = now()
message.sending_client = get_client("Internal")
do_send_message(message)
print domain + ": Created."
|
<commit_before><commit_msg>Add script to create new realms.
(imported from commit 764d41f28b1af10860ea4d03a6aa983f40d3ffd7)<commit_after>from django.core.management.base import BaseCommand
from zephyr.models import Realm, Message, UserProfile, Recipient, create_stream_if_needed, \
get_client
from zephyr.views import do_send_message
from django.utils.timezone import now
class Command(BaseCommand):
help = "Create a realm for the specified domain(s)."
def handle(self, *args, **options):
for domain in args:
realm, created = Realm.objects.get_or_create(domain=domain)
if not created:
print domain + ": Realm already exists!"
else:
message = Message()
message.sender = UserProfile.objects.get(user__email="humbug+signups@humbughq.com")
message.recipient = Recipient.objects.get(type_id=create_stream_if_needed(
message.sender.realm, "signups").id, type=Recipient.STREAM)
message.subject = domain
message.content = "Signups enabled."
message.pub_date = now()
message.sending_client = get_client("Internal")
do_send_message(message)
print domain + ": Created."
|
|
6a5a6203258be862e3b47ed62f61ca0dfef24628
|
common/djangoapps/student/migrations/0029_add_data_researcher.py
|
common/djangoapps/student/migrations/0029_add_data_researcher.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-27 19:02
from __future__ import unicode_literals
from django.db import migrations
from student.models import CourseAccessRole
def add_data_researcher(apps, schema_editor):
"""
Add a `data_researcher` role for everyone who is currently `staff` or `instructor`.
"""
for role in CourseAccessRole.objects.filter(role__in=('staff', 'instructor')):
new_role, created = CourseAccessRole.objects.get_or_create(
user=role.user,
org=role.org,
course_id=role.course_id,
role='data_researcher'
)
class Migration(migrations.Migration):
dependencies = [
('student', '0028_historicalmanualenrollmentaudit'),
]
operations = [
migrations.RunPython(add_data_researcher, reverse_code=migrations.RunPython.noop),
]
|
Add a data researcher role for everyone who is currently staff or instructor.
|
Add a data researcher role for everyone who is currently staff or instructor.
|
Python
|
agpl-3.0
|
edx-solutions/edx-platform,cpennington/edx-platform,msegado/edx-platform,eduNEXT/edx-platform,angelapper/edx-platform,mitocw/edx-platform,EDUlib/edx-platform,stvstnfrd/edx-platform,edx-solutions/edx-platform,cpennington/edx-platform,arbrandes/edx-platform,arbrandes/edx-platform,appsembler/edx-platform,arbrandes/edx-platform,msegado/edx-platform,angelapper/edx-platform,mitocw/edx-platform,eduNEXT/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform,appsembler/edx-platform,EDUlib/edx-platform,stvstnfrd/edx-platform,EDUlib/edx-platform,msegado/edx-platform,stvstnfrd/edx-platform,arbrandes/edx-platform,msegado/edx-platform,eduNEXT/edunext-platform,edx/edx-platform,eduNEXT/edx-platform,cpennington/edx-platform,edx/edx-platform,angelapper/edx-platform,appsembler/edx-platform,msegado/edx-platform,eduNEXT/edunext-platform,angelapper/edx-platform,eduNEXT/edunext-platform,eduNEXT/edunext-platform,eduNEXT/edx-platform,mitocw/edx-platform,stvstnfrd/edx-platform,EDUlib/edx-platform,edx/edx-platform,appsembler/edx-platform,edx/edx-platform,cpennington/edx-platform,mitocw/edx-platform
|
Add a data researcher role for everyone who is currently staff or instructor.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-27 19:02
from __future__ import unicode_literals
from django.db import migrations
from student.models import CourseAccessRole
def add_data_researcher(apps, schema_editor):
"""
Add a `data_researcher` role for everyone who is currently `staff` or `instructor`.
"""
for role in CourseAccessRole.objects.filter(role__in=('staff', 'instructor')):
new_role, created = CourseAccessRole.objects.get_or_create(
user=role.user,
org=role.org,
course_id=role.course_id,
role='data_researcher'
)
class Migration(migrations.Migration):
dependencies = [
('student', '0028_historicalmanualenrollmentaudit'),
]
operations = [
migrations.RunPython(add_data_researcher, reverse_code=migrations.RunPython.noop),
]
|
<commit_before><commit_msg>Add a data researcher role for everyone who is currently staff or instructor.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-27 19:02
from __future__ import unicode_literals
from django.db import migrations
from student.models import CourseAccessRole
def add_data_researcher(apps, schema_editor):
"""
Add a `data_researcher` role for everyone who is currently `staff` or `instructor`.
"""
for role in CourseAccessRole.objects.filter(role__in=('staff', 'instructor')):
new_role, created = CourseAccessRole.objects.get_or_create(
user=role.user,
org=role.org,
course_id=role.course_id,
role='data_researcher'
)
class Migration(migrations.Migration):
dependencies = [
('student', '0028_historicalmanualenrollmentaudit'),
]
operations = [
migrations.RunPython(add_data_researcher, reverse_code=migrations.RunPython.noop),
]
|
Add a data researcher role for everyone who is currently staff or instructor.# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-27 19:02
from __future__ import unicode_literals
from django.db import migrations
from student.models import CourseAccessRole
def add_data_researcher(apps, schema_editor):
"""
Add a `data_researcher` role for everyone who is currently `staff` or `instructor`.
"""
for role in CourseAccessRole.objects.filter(role__in=('staff', 'instructor')):
new_role, created = CourseAccessRole.objects.get_or_create(
user=role.user,
org=role.org,
course_id=role.course_id,
role='data_researcher'
)
class Migration(migrations.Migration):
dependencies = [
('student', '0028_historicalmanualenrollmentaudit'),
]
operations = [
migrations.RunPython(add_data_researcher, reverse_code=migrations.RunPython.noop),
]
|
<commit_before><commit_msg>Add a data researcher role for everyone who is currently staff or instructor.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-27 19:02
from __future__ import unicode_literals
from django.db import migrations
from student.models import CourseAccessRole
def add_data_researcher(apps, schema_editor):
"""
Add a `data_researcher` role for everyone who is currently `staff` or `instructor`.
"""
for role in CourseAccessRole.objects.filter(role__in=('staff', 'instructor')):
new_role, created = CourseAccessRole.objects.get_or_create(
user=role.user,
org=role.org,
course_id=role.course_id,
role='data_researcher'
)
class Migration(migrations.Migration):
dependencies = [
('student', '0028_historicalmanualenrollmentaudit'),
]
operations = [
migrations.RunPython(add_data_researcher, reverse_code=migrations.RunPython.noop),
]
|
|
2fedb73b2c83fc7bb1b354d8b1ebd8dfe8497995
|
dataportal/tests/test_examples.py
|
dataportal/tests/test_examples.py
|
import unittest
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
class CommonSampleDataTests(object):
def setUp(self):
pass
def test_basic_usage(self):
events = self.example.run()
# check expected types
self.assertTrue(isinstance(events, list))
self.assertTrue(isinstance(events[0], Document))
class TestTemperatureRamp(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = temperature_ramp
class TestMultisourceEvent(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = multisource_event
class TestImageAndScalar(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = image_and_scalar
|
from nose.tools import assert_true
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
def run_example(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
def test_examples():
for example in [temperature_ramp, multisource_event, image_and_scalar]:
yield run_example, example
|
Use generator test for examples.
|
REF: Use generator test for examples.
|
Python
|
bsd-3-clause
|
ericdill/datamuxer,danielballan/datamuxer,NSLS-II/dataportal,tacaswell/dataportal,danielballan/dataportal,ericdill/databroker,NSLS-II/datamuxer,danielballan/datamuxer,NSLS-II/dataportal,danielballan/dataportal,ericdill/databroker,tacaswell/dataportal,ericdill/datamuxer
|
import unittest
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
class CommonSampleDataTests(object):
def setUp(self):
pass
def test_basic_usage(self):
events = self.example.run()
# check expected types
self.assertTrue(isinstance(events, list))
self.assertTrue(isinstance(events[0], Document))
class TestTemperatureRamp(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = temperature_ramp
class TestMultisourceEvent(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = multisource_event
class TestImageAndScalar(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = image_and_scalar
REF: Use generator test for examples.
|
from nose.tools import assert_true
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
def run_example(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
def test_examples():
for example in [temperature_ramp, multisource_event, image_and_scalar]:
yield run_example, example
|
<commit_before>import unittest
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
class CommonSampleDataTests(object):
def setUp(self):
pass
def test_basic_usage(self):
events = self.example.run()
# check expected types
self.assertTrue(isinstance(events, list))
self.assertTrue(isinstance(events[0], Document))
class TestTemperatureRamp(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = temperature_ramp
class TestMultisourceEvent(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = multisource_event
class TestImageAndScalar(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = image_and_scalar
<commit_msg>REF: Use generator test for examples.<commit_after>
|
from nose.tools import assert_true
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
def run_example(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
def test_examples():
for example in [temperature_ramp, multisource_event, image_and_scalar]:
yield run_example, example
|
import unittest
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
class CommonSampleDataTests(object):
def setUp(self):
pass
def test_basic_usage(self):
events = self.example.run()
# check expected types
self.assertTrue(isinstance(events, list))
self.assertTrue(isinstance(events[0], Document))
class TestTemperatureRamp(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = temperature_ramp
class TestMultisourceEvent(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = multisource_event
class TestImageAndScalar(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = image_and_scalar
REF: Use generator test for examples.from nose.tools import assert_true
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
def run_example(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
def test_examples():
for example in [temperature_ramp, multisource_event, image_and_scalar]:
yield run_example, example
|
<commit_before>import unittest
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
class CommonSampleDataTests(object):
def setUp(self):
pass
def test_basic_usage(self):
events = self.example.run()
# check expected types
self.assertTrue(isinstance(events, list))
self.assertTrue(isinstance(events[0], Document))
class TestTemperatureRamp(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = temperature_ramp
class TestMultisourceEvent(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = multisource_event
class TestImageAndScalar(CommonSampleDataTests, unittest.TestCase):
def setUp(self):
self.example = image_and_scalar
<commit_msg>REF: Use generator test for examples.<commit_after>from nose.tools import assert_true
from ..examples.sample_data import (temperature_ramp, multisource_event,
image_and_scalar)
from metadatastore.api import Document
def run_example(example):
events = example.run()
assert_true(isinstance(events, list))
assert_true(isinstance(events[0], Document))
def test_examples():
for example in [temperature_ramp, multisource_event, image_and_scalar]:
yield run_example, example
|
f87b10b6a6639843b68777e5346109acb44c948a
|
profile_compressible_solver/gaussian.py
|
profile_compressible_solver/gaussian.py
|
from firedrake import (SpatialCoordinate, dot, cross, sqrt, atan_2,
exp, as_vector, Constant, acos)
import numpy as np
class Gaussian(object):
def __init__(self,
mesh,
dir_from_center,
radial_dist,
sigma_theta,
sigma_r,
amplitude=1):
self._mesh = mesh
self._n0 = dir_from_center
self._r0 = radial_dist
self._sigma_theta = sigma_theta
self._sigma_r = sigma_r
self._amp = amplitude
self.x = SpatialCoordinate(mesh)
@property
def r(self):
x = self.x
return sqrt(x[0]**2 + x[1]**2 + x[2]**2)
@property
def theta(self):
x = self.x
n0 = self._n0
return acos(dot(x, n0) / abs(dot(x, n0)))
@property
def r_expr(self):
r = self.r
r0 = self._r0
return r - r0
@property
def expression(self):
A = self._amp
theta = self.theta
R = self.r_expr
sigma_theta = self._sigma_theta
sigma_r = self._sigma_r
return A*exp(-0.5*((theta/sigma_theta)**2 + (R/sigma_r)**2))
class MultipleGaussians(object):
def __init__(self, n_gaussians, r_earth, thickness, seed=2097152):
self._N = n_gaussians
self._R = r_earth
self._H = thickness
self._seed = seed
self._generate_random_vars()
def _generate_random_vars(self):
np.random.rand(self._seed)
ns = []
rs = []
for i in range(self._N):
nrm = 0.0
while (nrm < 0.5) or (nrm > 1.0):
n = 2*np.random.rand(3) - 1.0
nrm = np.linalg.norm(n)
ns.append(as_vector(list(n)))
rs.append(Constant(self._R + self._H * np.random.rand()))
self._random_Ns = ns
self._random_Rs = rs
def expression(self, mesh):
gs = []
for i, (n, r0) in enumerate(zip(self._random_Ns, self._random_Rs)):
sigma_theta = 1.0 - 0.5 * (i / self._N)
sigma_r = (1.0 - 0.5 * (i / self._N)) * self._H
amplitude = 1.0
g = Gaussian(mesh, n, r0, sigma_theta, sigma_r, amplitude)
gs.append(g.expression)
return sum(gs)
|
Set up object to create random pressure field
|
Set up object to create random pressure field
|
Python
|
mit
|
thomasgibson/firedrake-hybridization
|
Set up object to create random pressure field
|
from firedrake import (SpatialCoordinate, dot, cross, sqrt, atan_2,
exp, as_vector, Constant, acos)
import numpy as np
class Gaussian(object):
def __init__(self,
mesh,
dir_from_center,
radial_dist,
sigma_theta,
sigma_r,
amplitude=1):
self._mesh = mesh
self._n0 = dir_from_center
self._r0 = radial_dist
self._sigma_theta = sigma_theta
self._sigma_r = sigma_r
self._amp = amplitude
self.x = SpatialCoordinate(mesh)
@property
def r(self):
x = self.x
return sqrt(x[0]**2 + x[1]**2 + x[2]**2)
@property
def theta(self):
x = self.x
n0 = self._n0
return acos(dot(x, n0) / abs(dot(x, n0)))
@property
def r_expr(self):
r = self.r
r0 = self._r0
return r - r0
@property
def expression(self):
A = self._amp
theta = self.theta
R = self.r_expr
sigma_theta = self._sigma_theta
sigma_r = self._sigma_r
return A*exp(-0.5*((theta/sigma_theta)**2 + (R/sigma_r)**2))
class MultipleGaussians(object):
def __init__(self, n_gaussians, r_earth, thickness, seed=2097152):
self._N = n_gaussians
self._R = r_earth
self._H = thickness
self._seed = seed
self._generate_random_vars()
def _generate_random_vars(self):
np.random.rand(self._seed)
ns = []
rs = []
for i in range(self._N):
nrm = 0.0
while (nrm < 0.5) or (nrm > 1.0):
n = 2*np.random.rand(3) - 1.0
nrm = np.linalg.norm(n)
ns.append(as_vector(list(n)))
rs.append(Constant(self._R + self._H * np.random.rand()))
self._random_Ns = ns
self._random_Rs = rs
def expression(self, mesh):
gs = []
for i, (n, r0) in enumerate(zip(self._random_Ns, self._random_Rs)):
sigma_theta = 1.0 - 0.5 * (i / self._N)
sigma_r = (1.0 - 0.5 * (i / self._N)) * self._H
amplitude = 1.0
g = Gaussian(mesh, n, r0, sigma_theta, sigma_r, amplitude)
gs.append(g.expression)
return sum(gs)
|
<commit_before><commit_msg>Set up object to create random pressure field<commit_after>
|
from firedrake import (SpatialCoordinate, dot, cross, sqrt, atan_2,
exp, as_vector, Constant, acos)
import numpy as np
class Gaussian(object):
def __init__(self,
mesh,
dir_from_center,
radial_dist,
sigma_theta,
sigma_r,
amplitude=1):
self._mesh = mesh
self._n0 = dir_from_center
self._r0 = radial_dist
self._sigma_theta = sigma_theta
self._sigma_r = sigma_r
self._amp = amplitude
self.x = SpatialCoordinate(mesh)
@property
def r(self):
x = self.x
return sqrt(x[0]**2 + x[1]**2 + x[2]**2)
@property
def theta(self):
x = self.x
n0 = self._n0
return acos(dot(x, n0) / abs(dot(x, n0)))
@property
def r_expr(self):
r = self.r
r0 = self._r0
return r - r0
@property
def expression(self):
A = self._amp
theta = self.theta
R = self.r_expr
sigma_theta = self._sigma_theta
sigma_r = self._sigma_r
return A*exp(-0.5*((theta/sigma_theta)**2 + (R/sigma_r)**2))
class MultipleGaussians(object):
def __init__(self, n_gaussians, r_earth, thickness, seed=2097152):
self._N = n_gaussians
self._R = r_earth
self._H = thickness
self._seed = seed
self._generate_random_vars()
def _generate_random_vars(self):
np.random.rand(self._seed)
ns = []
rs = []
for i in range(self._N):
nrm = 0.0
while (nrm < 0.5) or (nrm > 1.0):
n = 2*np.random.rand(3) - 1.0
nrm = np.linalg.norm(n)
ns.append(as_vector(list(n)))
rs.append(Constant(self._R + self._H * np.random.rand()))
self._random_Ns = ns
self._random_Rs = rs
def expression(self, mesh):
gs = []
for i, (n, r0) in enumerate(zip(self._random_Ns, self._random_Rs)):
sigma_theta = 1.0 - 0.5 * (i / self._N)
sigma_r = (1.0 - 0.5 * (i / self._N)) * self._H
amplitude = 1.0
g = Gaussian(mesh, n, r0, sigma_theta, sigma_r, amplitude)
gs.append(g.expression)
return sum(gs)
|
Set up object to create random pressure fieldfrom firedrake import (SpatialCoordinate, dot, cross, sqrt, atan_2,
exp, as_vector, Constant, acos)
import numpy as np
class Gaussian(object):
def __init__(self,
mesh,
dir_from_center,
radial_dist,
sigma_theta,
sigma_r,
amplitude=1):
self._mesh = mesh
self._n0 = dir_from_center
self._r0 = radial_dist
self._sigma_theta = sigma_theta
self._sigma_r = sigma_r
self._amp = amplitude
self.x = SpatialCoordinate(mesh)
@property
def r(self):
x = self.x
return sqrt(x[0]**2 + x[1]**2 + x[2]**2)
@property
def theta(self):
x = self.x
n0 = self._n0
return acos(dot(x, n0) / abs(dot(x, n0)))
@property
def r_expr(self):
r = self.r
r0 = self._r0
return r - r0
@property
def expression(self):
A = self._amp
theta = self.theta
R = self.r_expr
sigma_theta = self._sigma_theta
sigma_r = self._sigma_r
return A*exp(-0.5*((theta/sigma_theta)**2 + (R/sigma_r)**2))
class MultipleGaussians(object):
def __init__(self, n_gaussians, r_earth, thickness, seed=2097152):
self._N = n_gaussians
self._R = r_earth
self._H = thickness
self._seed = seed
self._generate_random_vars()
def _generate_random_vars(self):
np.random.rand(self._seed)
ns = []
rs = []
for i in range(self._N):
nrm = 0.0
while (nrm < 0.5) or (nrm > 1.0):
n = 2*np.random.rand(3) - 1.0
nrm = np.linalg.norm(n)
ns.append(as_vector(list(n)))
rs.append(Constant(self._R + self._H * np.random.rand()))
self._random_Ns = ns
self._random_Rs = rs
def expression(self, mesh):
gs = []
for i, (n, r0) in enumerate(zip(self._random_Ns, self._random_Rs)):
sigma_theta = 1.0 - 0.5 * (i / self._N)
sigma_r = (1.0 - 0.5 * (i / self._N)) * self._H
amplitude = 1.0
g = Gaussian(mesh, n, r0, sigma_theta, sigma_r, amplitude)
gs.append(g.expression)
return sum(gs)
|
<commit_before><commit_msg>Set up object to create random pressure field<commit_after>from firedrake import (SpatialCoordinate, dot, cross, sqrt, atan_2,
exp, as_vector, Constant, acos)
import numpy as np
class Gaussian(object):
def __init__(self,
mesh,
dir_from_center,
radial_dist,
sigma_theta,
sigma_r,
amplitude=1):
self._mesh = mesh
self._n0 = dir_from_center
self._r0 = radial_dist
self._sigma_theta = sigma_theta
self._sigma_r = sigma_r
self._amp = amplitude
self.x = SpatialCoordinate(mesh)
@property
def r(self):
x = self.x
return sqrt(x[0]**2 + x[1]**2 + x[2]**2)
@property
def theta(self):
x = self.x
n0 = self._n0
return acos(dot(x, n0) / abs(dot(x, n0)))
@property
def r_expr(self):
r = self.r
r0 = self._r0
return r - r0
@property
def expression(self):
A = self._amp
theta = self.theta
R = self.r_expr
sigma_theta = self._sigma_theta
sigma_r = self._sigma_r
return A*exp(-0.5*((theta/sigma_theta)**2 + (R/sigma_r)**2))
class MultipleGaussians(object):
def __init__(self, n_gaussians, r_earth, thickness, seed=2097152):
self._N = n_gaussians
self._R = r_earth
self._H = thickness
self._seed = seed
self._generate_random_vars()
def _generate_random_vars(self):
np.random.rand(self._seed)
ns = []
rs = []
for i in range(self._N):
nrm = 0.0
while (nrm < 0.5) or (nrm > 1.0):
n = 2*np.random.rand(3) - 1.0
nrm = np.linalg.norm(n)
ns.append(as_vector(list(n)))
rs.append(Constant(self._R + self._H * np.random.rand()))
self._random_Ns = ns
self._random_Rs = rs
def expression(self, mesh):
gs = []
for i, (n, r0) in enumerate(zip(self._random_Ns, self._random_Rs)):
sigma_theta = 1.0 - 0.5 * (i / self._N)
sigma_r = (1.0 - 0.5 * (i / self._N)) * self._H
amplitude = 1.0
g = Gaussian(mesh, n, r0, sigma_theta, sigma_r, amplitude)
gs.append(g.expression)
return sum(gs)
|
|
1f7bad5fc9c5aa1606cb194c7cb31729e914f042
|
smap-nepse/preprocessing/csvtohdf.py
|
smap-nepse/preprocessing/csvtohdf.py
|
import pandas as pd
import numpy as np
import os
import glob
def csvtohdf(source, destination):
"""Takes a csv file as input and storest it as a hdf5 file in the
destnation path.
The hdf5 file is stored in table format.
"""
try:
data = pd.read_csv(source,index_col = 0,parse_dates=True)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return
if data.empty:
return
data.to_hdf(destination, 'data', mode='w', format='table')
return
def alltohdf(source, destination='../hdf/'):
"""Performs storing of all .csv file present on source directory in a hdf5
data format and save in destination folder."""
if not os.path.exists(destination):
os.makedirs(destination)
os.chdir(source)
for file in glob.glob("*.csv"):
filename = os.path.basename(file)
print('Saving {}...\n'.format(filename))
csvtohdf(file, destination+filename)
return
def build_hdfstore(source_dir, destination='../hdf/store.h5'):
""" Creates a hdf5 store of all the csv files present in the source directory.
The hdf5 store is placed in the destination path.
param:
source_dir: The source directory containing the csv files.
destination: The path for the hdf5 store.
returns:
destination: The path for the hdf5 store.
"""
# Delete destination file if it exists. If destination is not deleted the
# hdf contents are appended to the file which causes data consistency problems.
try:
os.remove(destination)
except OSError:
pass
os.chdir(source_dir)
for file in glob.glob("*.csv"):
print('Appending {}.csv to hdfstore...\n'.format(file))
try:
data = pd.read_csv(file,index_col = 0,parse_dates=True)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return
data.to_hdf(destination, file.strip('.csv') , mode='a', format='fixed')
return destination
|
Add function to save .csv data as HDF files.
|
Add function to save .csv data as HDF files.
The module provides functionality to save single .csv files as single
HDF file. It also provides mechanism to store the whole data into a
single HDFStore indexed using their filenames.
|
Python
|
mit
|
samshara/Stock-Market-Analysis-and-Prediction
|
Add function to save .csv data as HDF files.
The module provides functionality to save single .csv files as single
HDF file. It also provides mechanism to store the whole data into a
single HDFStore indexed using their filenames.
|
import pandas as pd
import numpy as np
import os
import glob
def csvtohdf(source, destination):
"""Takes a csv file as input and storest it as a hdf5 file in the
destnation path.
The hdf5 file is stored in table format.
"""
try:
data = pd.read_csv(source,index_col = 0,parse_dates=True)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return
if data.empty:
return
data.to_hdf(destination, 'data', mode='w', format='table')
return
def alltohdf(source, destination='../hdf/'):
"""Performs storing of all .csv file present on source directory in a hdf5
data format and save in destination folder."""
if not os.path.exists(destination):
os.makedirs(destination)
os.chdir(source)
for file in glob.glob("*.csv"):
filename = os.path.basename(file)
print('Saving {}...\n'.format(filename))
csvtohdf(file, destination+filename)
return
def build_hdfstore(source_dir, destination='../hdf/store.h5'):
""" Creates a hdf5 store of all the csv files present in the source directory.
The hdf5 store is placed in the destination path.
param:
source_dir: The source directory containing the csv files.
destination: The path for the hdf5 store.
returns:
destination: The path for the hdf5 store.
"""
# Delete destination file if it exists. If destination is not deleted the
# hdf contents are appended to the file which causes data consistency problems.
try:
os.remove(destination)
except OSError:
pass
os.chdir(source_dir)
for file in glob.glob("*.csv"):
print('Appending {}.csv to hdfstore...\n'.format(file))
try:
data = pd.read_csv(file,index_col = 0,parse_dates=True)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return
data.to_hdf(destination, file.strip('.csv') , mode='a', format='fixed')
return destination
|
<commit_before><commit_msg>Add function to save .csv data as HDF files.
The module provides functionality to save single .csv files as single
HDF file. It also provides mechanism to store the whole data into a
single HDFStore indexed using their filenames.<commit_after>
|
import pandas as pd
import numpy as np
import os
import glob
def csvtohdf(source, destination):
"""Takes a csv file as input and storest it as a hdf5 file in the
destnation path.
The hdf5 file is stored in table format.
"""
try:
data = pd.read_csv(source,index_col = 0,parse_dates=True)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return
if data.empty:
return
data.to_hdf(destination, 'data', mode='w', format='table')
return
def alltohdf(source, destination='../hdf/'):
"""Performs storing of all .csv file present on source directory in a hdf5
data format and save in destination folder."""
if not os.path.exists(destination):
os.makedirs(destination)
os.chdir(source)
for file in glob.glob("*.csv"):
filename = os.path.basename(file)
print('Saving {}...\n'.format(filename))
csvtohdf(file, destination+filename)
return
def build_hdfstore(source_dir, destination='../hdf/store.h5'):
""" Creates a hdf5 store of all the csv files present in the source directory.
The hdf5 store is placed in the destination path.
param:
source_dir: The source directory containing the csv files.
destination: The path for the hdf5 store.
returns:
destination: The path for the hdf5 store.
"""
# Delete destination file if it exists. If destination is not deleted the
# hdf contents are appended to the file which causes data consistency problems.
try:
os.remove(destination)
except OSError:
pass
os.chdir(source_dir)
for file in glob.glob("*.csv"):
print('Appending {}.csv to hdfstore...\n'.format(file))
try:
data = pd.read_csv(file,index_col = 0,parse_dates=True)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return
data.to_hdf(destination, file.strip('.csv') , mode='a', format='fixed')
return destination
|
Add function to save .csv data as HDF files.
The module provides functionality to save single .csv files as single
HDF file. It also provides mechanism to store the whole data into a
single HDFStore indexed using their filenames.import pandas as pd
import numpy as np
import os
import glob
def csvtohdf(source, destination):
"""Takes a csv file as input and storest it as a hdf5 file in the
destnation path.
The hdf5 file is stored in table format.
"""
try:
data = pd.read_csv(source,index_col = 0,parse_dates=True)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return
if data.empty:
return
data.to_hdf(destination, 'data', mode='w', format='table')
return
def alltohdf(source, destination='../hdf/'):
"""Performs storing of all .csv file present on source directory in a hdf5
data format and save in destination folder."""
if not os.path.exists(destination):
os.makedirs(destination)
os.chdir(source)
for file in glob.glob("*.csv"):
filename = os.path.basename(file)
print('Saving {}...\n'.format(filename))
csvtohdf(file, destination+filename)
return
def build_hdfstore(source_dir, destination='../hdf/store.h5'):
""" Creates a hdf5 store of all the csv files present in the source directory.
The hdf5 store is placed in the destination path.
param:
source_dir: The source directory containing the csv files.
destination: The path for the hdf5 store.
returns:
destination: The path for the hdf5 store.
"""
# Delete destination file if it exists. If destination is not deleted the
# hdf contents are appended to the file which causes data consistency problems.
try:
os.remove(destination)
except OSError:
pass
os.chdir(source_dir)
for file in glob.glob("*.csv"):
print('Appending {}.csv to hdfstore...\n'.format(file))
try:
data = pd.read_csv(file,index_col = 0,parse_dates=True)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return
data.to_hdf(destination, file.strip('.csv') , mode='a', format='fixed')
return destination
|
<commit_before><commit_msg>Add function to save .csv data as HDF files.
The module provides functionality to save single .csv files as single
HDF file. It also provides mechanism to store the whole data into a
single HDFStore indexed using their filenames.<commit_after>import pandas as pd
import numpy as np
import os
import glob
def csvtohdf(source, destination):
"""Takes a csv file as input and storest it as a hdf5 file in the
destnation path.
The hdf5 file is stored in table format.
"""
try:
data = pd.read_csv(source,index_col = 0,parse_dates=True)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return
if data.empty:
return
data.to_hdf(destination, 'data', mode='w', format='table')
return
def alltohdf(source, destination='../hdf/'):
"""Performs storing of all .csv file present on source directory in a hdf5
data format and save in destination folder."""
if not os.path.exists(destination):
os.makedirs(destination)
os.chdir(source)
for file in glob.glob("*.csv"):
filename = os.path.basename(file)
print('Saving {}...\n'.format(filename))
csvtohdf(file, destination+filename)
return
def build_hdfstore(source_dir, destination='../hdf/store.h5'):
""" Creates a hdf5 store of all the csv files present in the source directory.
The hdf5 store is placed in the destination path.
param:
source_dir: The source directory containing the csv files.
destination: The path for the hdf5 store.
returns:
destination: The path for the hdf5 store.
"""
# Delete destination file if it exists. If destination is not deleted the
# hdf contents are appended to the file which causes data consistency problems.
try:
os.remove(destination)
except OSError:
pass
os.chdir(source_dir)
for file in glob.glob("*.csv"):
print('Appending {}.csv to hdfstore...\n'.format(file))
try:
data = pd.read_csv(file,index_col = 0,parse_dates=True)
except(FileNotFoundError, IOError):
print('Wrong file or file path.')
return
data.to_hdf(destination, file.strip('.csv') , mode='a', format='fixed')
return destination
|
|
801da273838654e7233a8110a252940c26aca208
|
src/conditionals/exercise3.py
|
src/conditionals/exercise3.py
|
# Rewrite fahr cel converter with try and except
inp = raw_input('Enter Fahrenheit Temperature: ')
try:
fahr = float(inp)
cel = (fahr - 32.0) * 5.0 / 9.0
print cel
except:
print 'Please enter a number'
exit(0)
|
Rewrite fahr cel converter with try and except
|
Rewrite fahr cel converter with try and except
|
Python
|
mit
|
let42/python-course
|
Rewrite fahr cel converter with try and except
|
# Rewrite fahr cel converter with try and except
inp = raw_input('Enter Fahrenheit Temperature: ')
try:
fahr = float(inp)
cel = (fahr - 32.0) * 5.0 / 9.0
print cel
except:
print 'Please enter a number'
exit(0)
|
<commit_before><commit_msg>Rewrite fahr cel converter with try and except<commit_after>
|
# Rewrite fahr cel converter with try and except
inp = raw_input('Enter Fahrenheit Temperature: ')
try:
fahr = float(inp)
cel = (fahr - 32.0) * 5.0 / 9.0
print cel
except:
print 'Please enter a number'
exit(0)
|
Rewrite fahr cel converter with try and except# Rewrite fahr cel converter with try and except
inp = raw_input('Enter Fahrenheit Temperature: ')
try:
fahr = float(inp)
cel = (fahr - 32.0) * 5.0 / 9.0
print cel
except:
print 'Please enter a number'
exit(0)
|
<commit_before><commit_msg>Rewrite fahr cel converter with try and except<commit_after># Rewrite fahr cel converter with try and except
inp = raw_input('Enter Fahrenheit Temperature: ')
try:
fahr = float(inp)
cel = (fahr - 32.0) * 5.0 / 9.0
print cel
except:
print 'Please enter a number'
exit(0)
|
|
847682bfe21eeb9475f96cdbacc5bd873af095d3
|
src/locations/migrations/0005_auto_20161024_2257.py
|
src/locations/migrations/0005_auto_20161024_2257.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-10-24 19:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('locations', '0004_auto_20160828_2114'),
]
operations = [
migrations.AlterModelOptions(
name='district',
options={'ordering': ['name'], 'verbose_name': 'District', 'verbose_name_plural': 'Districts'},
),
migrations.AlterModelOptions(
name='location',
options={'ordering': ['name'], 'verbose_name': 'Location', 'verbose_name_plural': 'Locations'},
),
]
|
Add migrations for location and district ordering
|
Add migrations for location and district ordering
|
Python
|
mit
|
mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign
|
Add migrations for location and district ordering
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-10-24 19:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('locations', '0004_auto_20160828_2114'),
]
operations = [
migrations.AlterModelOptions(
name='district',
options={'ordering': ['name'], 'verbose_name': 'District', 'verbose_name_plural': 'Districts'},
),
migrations.AlterModelOptions(
name='location',
options={'ordering': ['name'], 'verbose_name': 'Location', 'verbose_name_plural': 'Locations'},
),
]
|
<commit_before><commit_msg>Add migrations for location and district ordering<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-10-24 19:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('locations', '0004_auto_20160828_2114'),
]
operations = [
migrations.AlterModelOptions(
name='district',
options={'ordering': ['name'], 'verbose_name': 'District', 'verbose_name_plural': 'Districts'},
),
migrations.AlterModelOptions(
name='location',
options={'ordering': ['name'], 'verbose_name': 'Location', 'verbose_name_plural': 'Locations'},
),
]
|
Add migrations for location and district ordering# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-10-24 19:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('locations', '0004_auto_20160828_2114'),
]
operations = [
migrations.AlterModelOptions(
name='district',
options={'ordering': ['name'], 'verbose_name': 'District', 'verbose_name_plural': 'Districts'},
),
migrations.AlterModelOptions(
name='location',
options={'ordering': ['name'], 'verbose_name': 'Location', 'verbose_name_plural': 'Locations'},
),
]
|
<commit_before><commit_msg>Add migrations for location and district ordering<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-10-24 19:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('locations', '0004_auto_20160828_2114'),
]
operations = [
migrations.AlterModelOptions(
name='district',
options={'ordering': ['name'], 'verbose_name': 'District', 'verbose_name_plural': 'Districts'},
),
migrations.AlterModelOptions(
name='location',
options={'ordering': ['name'], 'verbose_name': 'Location', 'verbose_name_plural': 'Locations'},
),
]
|
|
f6e5d7134e1510211b7cd4cc5d87f69b7db98d5d
|
telemetry/telemetry/page/actions/action_runner_unittest.py
|
telemetry/telemetry/page/actions/action_runner_unittest.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends.chrome import tracing_backend
from telemetry.core.timeline import model
from telemetry.page.actions import action_runner as action_runner_module
# pylint: disable=W0401,W0614
from telemetry.page.actions.all_page_actions import *
from telemetry.unittest import tab_test_case
from telemetry.web_perf import timeline_interaction_record as tir_module
class ActionRunnerTest(tab_test_case.TabTestCase):
def testIssuingInteractionRecord(self):
action_runner = action_runner_module.ActionRunner(self._tab)
self.Navigate('interaction_enabled_page.html')
action_runner.RunAction(WaitAction({'seconds': 1}))
self._browser.StartTracing(tracing_backend.DEFAULT_TRACE_CATEGORIES)
action_runner.BeginInteraction('TestInteraction', [tir_module.IS_SMOOTH])
action_runner.EndInteraction('TestInteraction', [tir_module.IS_SMOOTH])
trace_data = self._browser.StopTracing()
timeline_model = model.TimelineModel(trace_data)
records = []
renderer_thread = timeline_model.GetRendererThreadFromTab(self._tab)
for event in renderer_thread.async_slices:
if not tir_module.IsTimelineInteractionRecord(event.name):
continue
records.append(tir_module.TimelineInteractionRecord.FromEvent(event))
self.assertEqual(1, len(records),
'Fail to issue the interaction record on tracing timeline.'
' Trace data:\n%s' % repr(trace_data.EventData()))
self.assertEqual('TestInteraction', records[0].logical_name)
self.assertTrue(records[0].is_smooth)
|
Add unittests for action_runner.BeginInteraction and action_runner.EndInteraction.
|
Add unittests for action_runner.BeginInteraction and action_runner.EndInteraction.
This is a reland of https://codereview.chromium.org/294943006 after it's reverted in
https://codereview.chromium.org/284183014/.
BUG=368767
Review URL: https://codereview.chromium.org/299443017
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@272782 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
benschmaus/catapult,catapult-project/catapult,sahiljain/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,benschmaus/catapult,benschmaus/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,catapult-project/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,benschmaus/catapult,benschmaus/catapult,sahiljain/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,SummerLW/Perf-Insight-Report,SummerLW/Perf-Insight-Report,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,catapult-project/catapult-csm,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,catapult-project/catapult,catapult-project/catapult,sahiljain/catapult
|
Add unittests for action_runner.BeginInteraction and action_runner.EndInteraction.
This is a reland of https://codereview.chromium.org/294943006 after it's reverted in
https://codereview.chromium.org/284183014/.
BUG=368767
Review URL: https://codereview.chromium.org/299443017
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@272782 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends.chrome import tracing_backend
from telemetry.core.timeline import model
from telemetry.page.actions import action_runner as action_runner_module
# pylint: disable=W0401,W0614
from telemetry.page.actions.all_page_actions import *
from telemetry.unittest import tab_test_case
from telemetry.web_perf import timeline_interaction_record as tir_module
class ActionRunnerTest(tab_test_case.TabTestCase):
def testIssuingInteractionRecord(self):
action_runner = action_runner_module.ActionRunner(self._tab)
self.Navigate('interaction_enabled_page.html')
action_runner.RunAction(WaitAction({'seconds': 1}))
self._browser.StartTracing(tracing_backend.DEFAULT_TRACE_CATEGORIES)
action_runner.BeginInteraction('TestInteraction', [tir_module.IS_SMOOTH])
action_runner.EndInteraction('TestInteraction', [tir_module.IS_SMOOTH])
trace_data = self._browser.StopTracing()
timeline_model = model.TimelineModel(trace_data)
records = []
renderer_thread = timeline_model.GetRendererThreadFromTab(self._tab)
for event in renderer_thread.async_slices:
if not tir_module.IsTimelineInteractionRecord(event.name):
continue
records.append(tir_module.TimelineInteractionRecord.FromEvent(event))
self.assertEqual(1, len(records),
'Fail to issue the interaction record on tracing timeline.'
' Trace data:\n%s' % repr(trace_data.EventData()))
self.assertEqual('TestInteraction', records[0].logical_name)
self.assertTrue(records[0].is_smooth)
|
<commit_before><commit_msg>Add unittests for action_runner.BeginInteraction and action_runner.EndInteraction.
This is a reland of https://codereview.chromium.org/294943006 after it's reverted in
https://codereview.chromium.org/284183014/.
BUG=368767
Review URL: https://codereview.chromium.org/299443017
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@272782 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends.chrome import tracing_backend
from telemetry.core.timeline import model
from telemetry.page.actions import action_runner as action_runner_module
# pylint: disable=W0401,W0614
from telemetry.page.actions.all_page_actions import *
from telemetry.unittest import tab_test_case
from telemetry.web_perf import timeline_interaction_record as tir_module
class ActionRunnerTest(tab_test_case.TabTestCase):
def testIssuingInteractionRecord(self):
action_runner = action_runner_module.ActionRunner(self._tab)
self.Navigate('interaction_enabled_page.html')
action_runner.RunAction(WaitAction({'seconds': 1}))
self._browser.StartTracing(tracing_backend.DEFAULT_TRACE_CATEGORIES)
action_runner.BeginInteraction('TestInteraction', [tir_module.IS_SMOOTH])
action_runner.EndInteraction('TestInteraction', [tir_module.IS_SMOOTH])
trace_data = self._browser.StopTracing()
timeline_model = model.TimelineModel(trace_data)
records = []
renderer_thread = timeline_model.GetRendererThreadFromTab(self._tab)
for event in renderer_thread.async_slices:
if not tir_module.IsTimelineInteractionRecord(event.name):
continue
records.append(tir_module.TimelineInteractionRecord.FromEvent(event))
self.assertEqual(1, len(records),
'Fail to issue the interaction record on tracing timeline.'
' Trace data:\n%s' % repr(trace_data.EventData()))
self.assertEqual('TestInteraction', records[0].logical_name)
self.assertTrue(records[0].is_smooth)
|
Add unittests for action_runner.BeginInteraction and action_runner.EndInteraction.
This is a reland of https://codereview.chromium.org/294943006 after it's reverted in
https://codereview.chromium.org/284183014/.
BUG=368767
Review URL: https://codereview.chromium.org/299443017
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@272782 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends.chrome import tracing_backend
from telemetry.core.timeline import model
from telemetry.page.actions import action_runner as action_runner_module
# pylint: disable=W0401,W0614
from telemetry.page.actions.all_page_actions import *
from telemetry.unittest import tab_test_case
from telemetry.web_perf import timeline_interaction_record as tir_module
class ActionRunnerTest(tab_test_case.TabTestCase):
def testIssuingInteractionRecord(self):
action_runner = action_runner_module.ActionRunner(self._tab)
self.Navigate('interaction_enabled_page.html')
action_runner.RunAction(WaitAction({'seconds': 1}))
self._browser.StartTracing(tracing_backend.DEFAULT_TRACE_CATEGORIES)
action_runner.BeginInteraction('TestInteraction', [tir_module.IS_SMOOTH])
action_runner.EndInteraction('TestInteraction', [tir_module.IS_SMOOTH])
trace_data = self._browser.StopTracing()
timeline_model = model.TimelineModel(trace_data)
records = []
renderer_thread = timeline_model.GetRendererThreadFromTab(self._tab)
for event in renderer_thread.async_slices:
if not tir_module.IsTimelineInteractionRecord(event.name):
continue
records.append(tir_module.TimelineInteractionRecord.FromEvent(event))
self.assertEqual(1, len(records),
'Fail to issue the interaction record on tracing timeline.'
' Trace data:\n%s' % repr(trace_data.EventData()))
self.assertEqual('TestInteraction', records[0].logical_name)
self.assertTrue(records[0].is_smooth)
|
<commit_before><commit_msg>Add unittests for action_runner.BeginInteraction and action_runner.EndInteraction.
This is a reland of https://codereview.chromium.org/294943006 after it's reverted in
https://codereview.chromium.org/284183014/.
BUG=368767
Review URL: https://codereview.chromium.org/299443017
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@272782 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends.chrome import tracing_backend
from telemetry.core.timeline import model
from telemetry.page.actions import action_runner as action_runner_module
# pylint: disable=W0401,W0614
from telemetry.page.actions.all_page_actions import *
from telemetry.unittest import tab_test_case
from telemetry.web_perf import timeline_interaction_record as tir_module
class ActionRunnerTest(tab_test_case.TabTestCase):
def testIssuingInteractionRecord(self):
action_runner = action_runner_module.ActionRunner(self._tab)
self.Navigate('interaction_enabled_page.html')
action_runner.RunAction(WaitAction({'seconds': 1}))
self._browser.StartTracing(tracing_backend.DEFAULT_TRACE_CATEGORIES)
action_runner.BeginInteraction('TestInteraction', [tir_module.IS_SMOOTH])
action_runner.EndInteraction('TestInteraction', [tir_module.IS_SMOOTH])
trace_data = self._browser.StopTracing()
timeline_model = model.TimelineModel(trace_data)
records = []
renderer_thread = timeline_model.GetRendererThreadFromTab(self._tab)
for event in renderer_thread.async_slices:
if not tir_module.IsTimelineInteractionRecord(event.name):
continue
records.append(tir_module.TimelineInteractionRecord.FromEvent(event))
self.assertEqual(1, len(records),
'Fail to issue the interaction record on tracing timeline.'
' Trace data:\n%s' % repr(trace_data.EventData()))
self.assertEqual('TestInteraction', records[0].logical_name)
self.assertTrue(records[0].is_smooth)
|
|
cd9315c288aec2e0d50402b5ce8fb830cf5c8957
|
src/ipf/getblockclasses.py
|
src/ipf/getblockclasses.py
|
# -*- coding: utf-8 -*-
import pkgutil
import inspect
import os
from ipfblock.ipfblock import IPFBlock
from ipftype.ipftype import IPFType
def get_classes_from_module(base_class,
is_accepted=lambda x: True):
""" Create dict {"class_name" : class object } for all classes
based on given base_class
is_accepted function checks if given class need to be added in dict
"""
parent_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
modules = [ cls for iter, cls, ispkg in \
pkgutil.walk_packages([parent_folder,]) ]
classes = dict()
for module_name in modules:
mod = __import__(module_name, fromlist = ["Whatever need for import"])
for name, obj in inspect.getmembers(mod):
if inspect.isclass(obj):
# Don`t add base_class to dict
if issubclass(obj, base_class) and obj != base_class:
if is_accepted(obj):
classes[name] = obj
return classes
def get_ipfblock_classes():
""" Create dict {"block_name" : IPFBlock class } for all IPFBlock subclasses
This dict will be used in file loading process
"""
return get_classes_from_module(IPFBlock, lambda x: not x.is_abstract_block)
def get_type_classes():
""" Create dict {"typename" : Type class } for all IPFType subclasses
This dict will be used in file loading process
"""
return get_classes_from_module(IPFType)
|
Move get_classes_from_module reflection functions to separate file
|
Move get_classes_from_module reflection functions to separate file
|
Python
|
lgpl-2.1
|
anton-golubkov/Garland,anton-golubkov/Garland
|
Move get_classes_from_module reflection functions to separate file
|
# -*- coding: utf-8 -*-
import pkgutil
import inspect
import os
from ipfblock.ipfblock import IPFBlock
from ipftype.ipftype import IPFType
def get_classes_from_module(base_class,
is_accepted=lambda x: True):
""" Create dict {"class_name" : class object } for all classes
based on given base_class
is_accepted function checks if given class need to be added in dict
"""
parent_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
modules = [ cls for iter, cls, ispkg in \
pkgutil.walk_packages([parent_folder,]) ]
classes = dict()
for module_name in modules:
mod = __import__(module_name, fromlist = ["Whatever need for import"])
for name, obj in inspect.getmembers(mod):
if inspect.isclass(obj):
# Don`t add base_class to dict
if issubclass(obj, base_class) and obj != base_class:
if is_accepted(obj):
classes[name] = obj
return classes
def get_ipfblock_classes():
""" Create dict {"block_name" : IPFBlock class } for all IPFBlock subclasses
This dict will be used in file loading process
"""
return get_classes_from_module(IPFBlock, lambda x: not x.is_abstract_block)
def get_type_classes():
""" Create dict {"typename" : Type class } for all IPFType subclasses
This dict will be used in file loading process
"""
return get_classes_from_module(IPFType)
|
<commit_before><commit_msg>Move get_classes_from_module reflection functions to separate file<commit_after>
|
# -*- coding: utf-8 -*-
import pkgutil
import inspect
import os
from ipfblock.ipfblock import IPFBlock
from ipftype.ipftype import IPFType
def get_classes_from_module(base_class,
is_accepted=lambda x: True):
""" Create dict {"class_name" : class object } for all classes
based on given base_class
is_accepted function checks if given class need to be added in dict
"""
parent_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
modules = [ cls for iter, cls, ispkg in \
pkgutil.walk_packages([parent_folder,]) ]
classes = dict()
for module_name in modules:
mod = __import__(module_name, fromlist = ["Whatever need for import"])
for name, obj in inspect.getmembers(mod):
if inspect.isclass(obj):
# Don`t add base_class to dict
if issubclass(obj, base_class) and obj != base_class:
if is_accepted(obj):
classes[name] = obj
return classes
def get_ipfblock_classes():
""" Create dict {"block_name" : IPFBlock class } for all IPFBlock subclasses
This dict will be used in file loading process
"""
return get_classes_from_module(IPFBlock, lambda x: not x.is_abstract_block)
def get_type_classes():
""" Create dict {"typename" : Type class } for all IPFType subclasses
This dict will be used in file loading process
"""
return get_classes_from_module(IPFType)
|
Move get_classes_from_module reflection functions to separate file# -*- coding: utf-8 -*-
import pkgutil
import inspect
import os
from ipfblock.ipfblock import IPFBlock
from ipftype.ipftype import IPFType
def get_classes_from_module(base_class,
is_accepted=lambda x: True):
""" Create dict {"class_name" : class object } for all classes
based on given base_class
is_accepted function checks if given class need to be added in dict
"""
parent_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
modules = [ cls for iter, cls, ispkg in \
pkgutil.walk_packages([parent_folder,]) ]
classes = dict()
for module_name in modules:
mod = __import__(module_name, fromlist = ["Whatever need for import"])
for name, obj in inspect.getmembers(mod):
if inspect.isclass(obj):
# Don`t add base_class to dict
if issubclass(obj, base_class) and obj != base_class:
if is_accepted(obj):
classes[name] = obj
return classes
def get_ipfblock_classes():
""" Create dict {"block_name" : IPFBlock class } for all IPFBlock subclasses
This dict will be used in file loading process
"""
return get_classes_from_module(IPFBlock, lambda x: not x.is_abstract_block)
def get_type_classes():
""" Create dict {"typename" : Type class } for all IPFType subclasses
This dict will be used in file loading process
"""
return get_classes_from_module(IPFType)
|
<commit_before><commit_msg>Move get_classes_from_module reflection functions to separate file<commit_after># -*- coding: utf-8 -*-
import pkgutil
import inspect
import os
from ipfblock.ipfblock import IPFBlock
from ipftype.ipftype import IPFType
def get_classes_from_module(base_class,
is_accepted=lambda x: True):
""" Create dict {"class_name" : class object } for all classes
based on given base_class
is_accepted function checks if given class need to be added in dict
"""
parent_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
modules = [ cls for iter, cls, ispkg in \
pkgutil.walk_packages([parent_folder,]) ]
classes = dict()
for module_name in modules:
mod = __import__(module_name, fromlist = ["Whatever need for import"])
for name, obj in inspect.getmembers(mod):
if inspect.isclass(obj):
# Don`t add base_class to dict
if issubclass(obj, base_class) and obj != base_class:
if is_accepted(obj):
classes[name] = obj
return classes
def get_ipfblock_classes():
""" Create dict {"block_name" : IPFBlock class } for all IPFBlock subclasses
This dict will be used in file loading process
"""
return get_classes_from_module(IPFBlock, lambda x: not x.is_abstract_block)
def get_type_classes():
""" Create dict {"typename" : Type class } for all IPFType subclasses
This dict will be used in file loading process
"""
return get_classes_from_module(IPFType)
|
|
1b7f6ca19e395f2285556ae43c97eb761dc9e565
|
candidates/management/commands/candidates_strip_ip_addresses.py
|
candidates/management/commands/candidates_strip_ip_addresses.py
|
from candidates.popit import PopItApiMixin, popit_unwrap_pagination
from django.core.management.base import BaseCommand
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
per_page=100
):
print "Stripping IP addresses from {name} ({id})".format(
**person
)
for version in person.get('versions', []):
version.pop('ip', None)
self.api.persons(person['id']).put(person)
|
Add a command to strip the user IP addresses from PopIt
|
Add a command to strip the user IP addresses from PopIt
|
Python
|
agpl-3.0
|
openstate/yournextrepresentative,mysociety/yournextmp-popit,datamade/yournextmp-popit,DemocracyClub/yournextrepresentative,datamade/yournextmp-popit,neavouli/yournextrepresentative,mysociety/yournextrepresentative,openstate/yournextrepresentative,mhl/yournextmp-popit,datamade/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextmp-popit,DemocracyClub/yournextrepresentative,datamade/yournextmp-popit,neavouli/yournextrepresentative,DemocracyClub/yournextrepresentative,YoQuieroSaber/yournextrepresentative,YoQuieroSaber/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mhl/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextrepresentative,datamade/yournextmp-popit,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mhl/yournextmp-popit,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,openstate/yournextrepresentative
|
Add a command to strip the user IP addresses from PopIt
|
from candidates.popit import PopItApiMixin, popit_unwrap_pagination
from django.core.management.base import BaseCommand
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
per_page=100
):
print "Stripping IP addresses from {name} ({id})".format(
**person
)
for version in person.get('versions', []):
version.pop('ip', None)
self.api.persons(person['id']).put(person)
|
<commit_before><commit_msg>Add a command to strip the user IP addresses from PopIt<commit_after>
|
from candidates.popit import PopItApiMixin, popit_unwrap_pagination
from django.core.management.base import BaseCommand
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
per_page=100
):
print "Stripping IP addresses from {name} ({id})".format(
**person
)
for version in person.get('versions', []):
version.pop('ip', None)
self.api.persons(person['id']).put(person)
|
Add a command to strip the user IP addresses from PopItfrom candidates.popit import PopItApiMixin, popit_unwrap_pagination
from django.core.management.base import BaseCommand
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
per_page=100
):
print "Stripping IP addresses from {name} ({id})".format(
**person
)
for version in person.get('versions', []):
version.pop('ip', None)
self.api.persons(person['id']).put(person)
|
<commit_before><commit_msg>Add a command to strip the user IP addresses from PopIt<commit_after>from candidates.popit import PopItApiMixin, popit_unwrap_pagination
from django.core.management.base import BaseCommand
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
for person in popit_unwrap_pagination(
self.api.persons,
per_page=100
):
print "Stripping IP addresses from {name} ({id})".format(
**person
)
for version in person.get('versions', []):
version.pop('ip', None)
self.api.persons(person['id']).put(person)
|
|
97dbd021800f323aea6e1e84d464a77b5832fd75
|
tests/test_sorting_and_searching/test_find_kth_smallest.py
|
tests/test_sorting_and_searching/test_find_kth_smallest.py
|
import unittest
from aids.sorting_and_searching.find_kth_smallest import find_kth_smallest
class FindKthSmallestTestCase(unittest.TestCase):
'''
Unit tests for find_kth_smallest
'''
def setUp(self):
self.example_1 = [54,26,93,17,77,31,44,55,20]
def test_find_kth_smallest(self):
self.assertEqual(find_kth_smallest(self.example_1, 0, len(self.example_1) - 1, 3), 26)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
Add unit tests for find_kth_smallest
|
Add unit tests for find_kth_smallest
|
Python
|
mit
|
ueg1990/aids
|
Add unit tests for find_kth_smallest
|
import unittest
from aids.sorting_and_searching.find_kth_smallest import find_kth_smallest
class FindKthSmallestTestCase(unittest.TestCase):
'''
Unit tests for find_kth_smallest
'''
def setUp(self):
self.example_1 = [54,26,93,17,77,31,44,55,20]
def test_find_kth_smallest(self):
self.assertEqual(find_kth_smallest(self.example_1, 0, len(self.example_1) - 1, 3), 26)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add unit tests for find_kth_smallest<commit_after>
|
import unittest
from aids.sorting_and_searching.find_kth_smallest import find_kth_smallest
class FindKthSmallestTestCase(unittest.TestCase):
'''
Unit tests for find_kth_smallest
'''
def setUp(self):
self.example_1 = [54,26,93,17,77,31,44,55,20]
def test_find_kth_smallest(self):
self.assertEqual(find_kth_smallest(self.example_1, 0, len(self.example_1) - 1, 3), 26)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
Add unit tests for find_kth_smallestimport unittest
from aids.sorting_and_searching.find_kth_smallest import find_kth_smallest
class FindKthSmallestTestCase(unittest.TestCase):
'''
Unit tests for find_kth_smallest
'''
def setUp(self):
self.example_1 = [54,26,93,17,77,31,44,55,20]
def test_find_kth_smallest(self):
self.assertEqual(find_kth_smallest(self.example_1, 0, len(self.example_1) - 1, 3), 26)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add unit tests for find_kth_smallest<commit_after>import unittest
from aids.sorting_and_searching.find_kth_smallest import find_kth_smallest
class FindKthSmallestTestCase(unittest.TestCase):
'''
Unit tests for find_kth_smallest
'''
def setUp(self):
self.example_1 = [54,26,93,17,77,31,44,55,20]
def test_find_kth_smallest(self):
self.assertEqual(find_kth_smallest(self.example_1, 0, len(self.example_1) - 1, 3), 26)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
|
6cd0dc04b4389fd5e0829acd65a332382d4db635
|
fabric_colors/postgis/__init__.py
|
fabric_colors/postgis/__init__.py
|
from fabric.api import run, local
from fabric_colors.deployment import _env_get
def create_postgis_template(target):
_env_get(target)
command_string = """
POSTGIS_SQL_PATH=`pg_config --sharedir`/contrib/postgis-2.0
# Creating the template spatial database.
createdb -U postgres -E UTF8 template_postgis
createlang -U postgres -d template_postgis plpgsql # Adding PLPGSQL language support.
# Allows non-superusers the ability to create from this template
psql -U postgres -d postgres -c "UPDATE pg_database SET datistemplate='true' WHERE datname='template_postgis';"
# Loaing the PostGIS SQL routines
psql -U postgres -d template_postgis -f $POSTGIS_SQL_PATH/postgis.sql
psql -U postgres -d template_postgis -f $POSTGIS_SQL_PATH/spatial_ref_sys.sql
# Enabling users to alter spatial tables.
psql -U postgres -d template_postgis -c "GRANT ALL ON geometry_columns TO PUBLIC;"
psql -U postgres -d template_postgis -c "GRANT ALL ON geography_columns TO PUBLIC;"
psql -U postgres -d template_postgis -c "GRANT ALL ON spatial_ref_sys TO PUBLIC;"
"""
if target == "localhost":
local(command_string)
else:
run(command_string)
|
Add in template_postgis creation script
|
Add in template_postgis creation script
|
Python
|
bsd-2-clause
|
fabric-colors/fabric-colors,fabric-colors/fabric-colors
|
Add in template_postgis creation script
|
from fabric.api import run, local
from fabric_colors.deployment import _env_get
def create_postgis_template(target):
_env_get(target)
command_string = """
POSTGIS_SQL_PATH=`pg_config --sharedir`/contrib/postgis-2.0
# Creating the template spatial database.
createdb -U postgres -E UTF8 template_postgis
createlang -U postgres -d template_postgis plpgsql # Adding PLPGSQL language support.
# Allows non-superusers the ability to create from this template
psql -U postgres -d postgres -c "UPDATE pg_database SET datistemplate='true' WHERE datname='template_postgis';"
# Loaing the PostGIS SQL routines
psql -U postgres -d template_postgis -f $POSTGIS_SQL_PATH/postgis.sql
psql -U postgres -d template_postgis -f $POSTGIS_SQL_PATH/spatial_ref_sys.sql
# Enabling users to alter spatial tables.
psql -U postgres -d template_postgis -c "GRANT ALL ON geometry_columns TO PUBLIC;"
psql -U postgres -d template_postgis -c "GRANT ALL ON geography_columns TO PUBLIC;"
psql -U postgres -d template_postgis -c "GRANT ALL ON spatial_ref_sys TO PUBLIC;"
"""
if target == "localhost":
local(command_string)
else:
run(command_string)
|
<commit_before><commit_msg>Add in template_postgis creation script<commit_after>
|
from fabric.api import run, local
from fabric_colors.deployment import _env_get
def create_postgis_template(target):
_env_get(target)
command_string = """
POSTGIS_SQL_PATH=`pg_config --sharedir`/contrib/postgis-2.0
# Creating the template spatial database.
createdb -U postgres -E UTF8 template_postgis
createlang -U postgres -d template_postgis plpgsql # Adding PLPGSQL language support.
# Allows non-superusers the ability to create from this template
psql -U postgres -d postgres -c "UPDATE pg_database SET datistemplate='true' WHERE datname='template_postgis';"
# Loaing the PostGIS SQL routines
psql -U postgres -d template_postgis -f $POSTGIS_SQL_PATH/postgis.sql
psql -U postgres -d template_postgis -f $POSTGIS_SQL_PATH/spatial_ref_sys.sql
# Enabling users to alter spatial tables.
psql -U postgres -d template_postgis -c "GRANT ALL ON geometry_columns TO PUBLIC;"
psql -U postgres -d template_postgis -c "GRANT ALL ON geography_columns TO PUBLIC;"
psql -U postgres -d template_postgis -c "GRANT ALL ON spatial_ref_sys TO PUBLIC;"
"""
if target == "localhost":
local(command_string)
else:
run(command_string)
|
Add in template_postgis creation scriptfrom fabric.api import run, local
from fabric_colors.deployment import _env_get
def create_postgis_template(target):
_env_get(target)
command_string = """
POSTGIS_SQL_PATH=`pg_config --sharedir`/contrib/postgis-2.0
# Creating the template spatial database.
createdb -U postgres -E UTF8 template_postgis
createlang -U postgres -d template_postgis plpgsql # Adding PLPGSQL language support.
# Allows non-superusers the ability to create from this template
psql -U postgres -d postgres -c "UPDATE pg_database SET datistemplate='true' WHERE datname='template_postgis';"
# Loaing the PostGIS SQL routines
psql -U postgres -d template_postgis -f $POSTGIS_SQL_PATH/postgis.sql
psql -U postgres -d template_postgis -f $POSTGIS_SQL_PATH/spatial_ref_sys.sql
# Enabling users to alter spatial tables.
psql -U postgres -d template_postgis -c "GRANT ALL ON geometry_columns TO PUBLIC;"
psql -U postgres -d template_postgis -c "GRANT ALL ON geography_columns TO PUBLIC;"
psql -U postgres -d template_postgis -c "GRANT ALL ON spatial_ref_sys TO PUBLIC;"
"""
if target == "localhost":
local(command_string)
else:
run(command_string)
|
<commit_before><commit_msg>Add in template_postgis creation script<commit_after>from fabric.api import run, local
from fabric_colors.deployment import _env_get
def create_postgis_template(target):
_env_get(target)
command_string = """
POSTGIS_SQL_PATH=`pg_config --sharedir`/contrib/postgis-2.0
# Creating the template spatial database.
createdb -U postgres -E UTF8 template_postgis
createlang -U postgres -d template_postgis plpgsql # Adding PLPGSQL language support.
# Allows non-superusers the ability to create from this template
psql -U postgres -d postgres -c "UPDATE pg_database SET datistemplate='true' WHERE datname='template_postgis';"
# Loaing the PostGIS SQL routines
psql -U postgres -d template_postgis -f $POSTGIS_SQL_PATH/postgis.sql
psql -U postgres -d template_postgis -f $POSTGIS_SQL_PATH/spatial_ref_sys.sql
# Enabling users to alter spatial tables.
psql -U postgres -d template_postgis -c "GRANT ALL ON geometry_columns TO PUBLIC;"
psql -U postgres -d template_postgis -c "GRANT ALL ON geography_columns TO PUBLIC;"
psql -U postgres -d template_postgis -c "GRANT ALL ON spatial_ref_sys TO PUBLIC;"
"""
if target == "localhost":
local(command_string)
else:
run(command_string)
|
|
bac22c6486cbd1bd909db44f003e778ab0380761
|
custom/bihar/management/commands/bihar_cleanup_case.py
|
custom/bihar/management/commands/bihar_cleanup_case.py
|
import logging
import csv
from django.core.management import BaseCommand
from casexml.apps.case.models import CommCareCase
from dimagi.utils.couch.database import iter_docs
logger = logging.getLogger('case_cleanup')
logger.setLevel('DEBUG')
MOTECH_ID = "fb6e0b19cbe3ef683a10c4c4766a1ef3"
class Command(BaseCommand):
"""
One time command for cleaning up care-bihar data
"""
def handle(self, *args, **options):
csv_file = csv.writer(open('bihar_case_cleanup.csv', 'wb'))
csv_file.writerow(['case_id', 'old_case_type', 'new_case_type',
'old_owner_id', 'new_owner_id'])
blank_case_ids = [c['id'] for c in
CommCareCase.get_all_cases("care-bihar", case_type="")]
task_case_ids = [c['id'] for c in
CommCareCase.get_all_cases("care-bihar", case_type="task")]
case_ids = set(blank_case_ids) | set(task_case_ids)
to_save = []
for i, doc in enumerate(iter_docs(CommCareCase.get_db(), case_ids)):
should_save = False
case = CommCareCase.wrap(doc)
parent = case.parent
csv_row = [case._id, case.type, case.type, case.owner_id, case.owner_id]
if case.type != 'task':
if case.user_id != MOTECH_ID:
logger.info("{case} was not last submitted by motech".format(case=case._id))
continue
case.type = 'task'
csv_row[2] = 'task'
should_save = True
if parent and parent.owner_id != case.owner_id:
case.owner_id = parent.owner_id
csv_row[4] = case.owner_id
should_save = True
if should_save:
to_save.append(case)
csv_file.writerow(csv_row)
if len(to_save) > 25:
CommCareCase.get_db().bulk_save(to_save)
to_save = []
if i % 100 == 0:
logger.info("{current}/{count} cases completed".format(current=i, count=len(case_ids)))
if to_save:
CommCareCase.get_db().bulk_save(to_save)
csv_file.writerow(csv_row)
|
Clean up cases in bihar domain
|
Clean up cases in bihar domain
|
Python
|
bsd-3-clause
|
qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq
|
Clean up cases in bihar domain
|
import logging
import csv
from django.core.management import BaseCommand
from casexml.apps.case.models import CommCareCase
from dimagi.utils.couch.database import iter_docs
logger = logging.getLogger('case_cleanup')
logger.setLevel('DEBUG')
MOTECH_ID = "fb6e0b19cbe3ef683a10c4c4766a1ef3"
class Command(BaseCommand):
"""
One time command for cleaning up care-bihar data
"""
def handle(self, *args, **options):
csv_file = csv.writer(open('bihar_case_cleanup.csv', 'wb'))
csv_file.writerow(['case_id', 'old_case_type', 'new_case_type',
'old_owner_id', 'new_owner_id'])
blank_case_ids = [c['id'] for c in
CommCareCase.get_all_cases("care-bihar", case_type="")]
task_case_ids = [c['id'] for c in
CommCareCase.get_all_cases("care-bihar", case_type="task")]
case_ids = set(blank_case_ids) | set(task_case_ids)
to_save = []
for i, doc in enumerate(iter_docs(CommCareCase.get_db(), case_ids)):
should_save = False
case = CommCareCase.wrap(doc)
parent = case.parent
csv_row = [case._id, case.type, case.type, case.owner_id, case.owner_id]
if case.type != 'task':
if case.user_id != MOTECH_ID:
logger.info("{case} was not last submitted by motech".format(case=case._id))
continue
case.type = 'task'
csv_row[2] = 'task'
should_save = True
if parent and parent.owner_id != case.owner_id:
case.owner_id = parent.owner_id
csv_row[4] = case.owner_id
should_save = True
if should_save:
to_save.append(case)
csv_file.writerow(csv_row)
if len(to_save) > 25:
CommCareCase.get_db().bulk_save(to_save)
to_save = []
if i % 100 == 0:
logger.info("{current}/{count} cases completed".format(current=i, count=len(case_ids)))
if to_save:
CommCareCase.get_db().bulk_save(to_save)
csv_file.writerow(csv_row)
|
<commit_before><commit_msg>Clean up cases in bihar domain<commit_after>
|
import logging
import csv
from django.core.management import BaseCommand
from casexml.apps.case.models import CommCareCase
from dimagi.utils.couch.database import iter_docs
logger = logging.getLogger('case_cleanup')
logger.setLevel('DEBUG')
MOTECH_ID = "fb6e0b19cbe3ef683a10c4c4766a1ef3"
class Command(BaseCommand):
"""
One time command for cleaning up care-bihar data
"""
def handle(self, *args, **options):
csv_file = csv.writer(open('bihar_case_cleanup.csv', 'wb'))
csv_file.writerow(['case_id', 'old_case_type', 'new_case_type',
'old_owner_id', 'new_owner_id'])
blank_case_ids = [c['id'] for c in
CommCareCase.get_all_cases("care-bihar", case_type="")]
task_case_ids = [c['id'] for c in
CommCareCase.get_all_cases("care-bihar", case_type="task")]
case_ids = set(blank_case_ids) | set(task_case_ids)
to_save = []
for i, doc in enumerate(iter_docs(CommCareCase.get_db(), case_ids)):
should_save = False
case = CommCareCase.wrap(doc)
parent = case.parent
csv_row = [case._id, case.type, case.type, case.owner_id, case.owner_id]
if case.type != 'task':
if case.user_id != MOTECH_ID:
logger.info("{case} was not last submitted by motech".format(case=case._id))
continue
case.type = 'task'
csv_row[2] = 'task'
should_save = True
if parent and parent.owner_id != case.owner_id:
case.owner_id = parent.owner_id
csv_row[4] = case.owner_id
should_save = True
if should_save:
to_save.append(case)
csv_file.writerow(csv_row)
if len(to_save) > 25:
CommCareCase.get_db().bulk_save(to_save)
to_save = []
if i % 100 == 0:
logger.info("{current}/{count} cases completed".format(current=i, count=len(case_ids)))
if to_save:
CommCareCase.get_db().bulk_save(to_save)
csv_file.writerow(csv_row)
|
Clean up cases in bihar domainimport logging
import csv
from django.core.management import BaseCommand
from casexml.apps.case.models import CommCareCase
from dimagi.utils.couch.database import iter_docs
logger = logging.getLogger('case_cleanup')
logger.setLevel('DEBUG')
MOTECH_ID = "fb6e0b19cbe3ef683a10c4c4766a1ef3"
class Command(BaseCommand):
"""
One time command for cleaning up care-bihar data
"""
def handle(self, *args, **options):
csv_file = csv.writer(open('bihar_case_cleanup.csv', 'wb'))
csv_file.writerow(['case_id', 'old_case_type', 'new_case_type',
'old_owner_id', 'new_owner_id'])
blank_case_ids = [c['id'] for c in
CommCareCase.get_all_cases("care-bihar", case_type="")]
task_case_ids = [c['id'] for c in
CommCareCase.get_all_cases("care-bihar", case_type="task")]
case_ids = set(blank_case_ids) | set(task_case_ids)
to_save = []
for i, doc in enumerate(iter_docs(CommCareCase.get_db(), case_ids)):
should_save = False
case = CommCareCase.wrap(doc)
parent = case.parent
csv_row = [case._id, case.type, case.type, case.owner_id, case.owner_id]
if case.type != 'task':
if case.user_id != MOTECH_ID:
logger.info("{case} was not last submitted by motech".format(case=case._id))
continue
case.type = 'task'
csv_row[2] = 'task'
should_save = True
if parent and parent.owner_id != case.owner_id:
case.owner_id = parent.owner_id
csv_row[4] = case.owner_id
should_save = True
if should_save:
to_save.append(case)
csv_file.writerow(csv_row)
if len(to_save) > 25:
CommCareCase.get_db().bulk_save(to_save)
to_save = []
if i % 100 == 0:
logger.info("{current}/{count} cases completed".format(current=i, count=len(case_ids)))
if to_save:
CommCareCase.get_db().bulk_save(to_save)
csv_file.writerow(csv_row)
|
<commit_before><commit_msg>Clean up cases in bihar domain<commit_after>import logging
import csv
from django.core.management import BaseCommand
from casexml.apps.case.models import CommCareCase
from dimagi.utils.couch.database import iter_docs
logger = logging.getLogger('case_cleanup')
logger.setLevel('DEBUG')
MOTECH_ID = "fb6e0b19cbe3ef683a10c4c4766a1ef3"
class Command(BaseCommand):
"""
One time command for cleaning up care-bihar data
"""
def handle(self, *args, **options):
csv_file = csv.writer(open('bihar_case_cleanup.csv', 'wb'))
csv_file.writerow(['case_id', 'old_case_type', 'new_case_type',
'old_owner_id', 'new_owner_id'])
blank_case_ids = [c['id'] for c in
CommCareCase.get_all_cases("care-bihar", case_type="")]
task_case_ids = [c['id'] for c in
CommCareCase.get_all_cases("care-bihar", case_type="task")]
case_ids = set(blank_case_ids) | set(task_case_ids)
to_save = []
for i, doc in enumerate(iter_docs(CommCareCase.get_db(), case_ids)):
should_save = False
case = CommCareCase.wrap(doc)
parent = case.parent
csv_row = [case._id, case.type, case.type, case.owner_id, case.owner_id]
if case.type != 'task':
if case.user_id != MOTECH_ID:
logger.info("{case} was not last submitted by motech".format(case=case._id))
continue
case.type = 'task'
csv_row[2] = 'task'
should_save = True
if parent and parent.owner_id != case.owner_id:
case.owner_id = parent.owner_id
csv_row[4] = case.owner_id
should_save = True
if should_save:
to_save.append(case)
csv_file.writerow(csv_row)
if len(to_save) > 25:
CommCareCase.get_db().bulk_save(to_save)
to_save = []
if i % 100 == 0:
logger.info("{current}/{count} cases completed".format(current=i, count=len(case_ids)))
if to_save:
CommCareCase.get_db().bulk_save(to_save)
csv_file.writerow(csv_row)
|
|
e1ceb24517db8710c11a94c589ad117aa46ac8a4
|
kamatis/util.py
|
kamatis/util.py
|
import logging
import os
import sys
def makedirs(path):
isdir = True
exc_info = None
try:
os.makedirs(path)
except OSError as err:
if err.errno == 17: # Path already exists.
if not os.path.isdir(path):
message = '{} already exists and is not a dir.'.format(path)
logging.warning(message)
isdir = False
else:
exc_info = sys.exc_info()
isdir = False
except:
exc_info = sys.exc_info()
isdir = False
if exc_info is not None:
logging.warning('Cannot create {}.'.format(path), exc_info=exc_info)
return isdir
|
Add method for creating a directory
|
Add method for creating a directory
|
Python
|
bsd-3-clause
|
luipugs/kamatis
|
Add method for creating a directory
|
import logging
import os
import sys
def makedirs(path):
isdir = True
exc_info = None
try:
os.makedirs(path)
except OSError as err:
if err.errno == 17: # Path already exists.
if not os.path.isdir(path):
message = '{} already exists and is not a dir.'.format(path)
logging.warning(message)
isdir = False
else:
exc_info = sys.exc_info()
isdir = False
except:
exc_info = sys.exc_info()
isdir = False
if exc_info is not None:
logging.warning('Cannot create {}.'.format(path), exc_info=exc_info)
return isdir
|
<commit_before><commit_msg>Add method for creating a directory<commit_after>
|
import logging
import os
import sys
def makedirs(path):
isdir = True
exc_info = None
try:
os.makedirs(path)
except OSError as err:
if err.errno == 17: # Path already exists.
if not os.path.isdir(path):
message = '{} already exists and is not a dir.'.format(path)
logging.warning(message)
isdir = False
else:
exc_info = sys.exc_info()
isdir = False
except:
exc_info = sys.exc_info()
isdir = False
if exc_info is not None:
logging.warning('Cannot create {}.'.format(path), exc_info=exc_info)
return isdir
|
Add method for creating a directoryimport logging
import os
import sys
def makedirs(path):
isdir = True
exc_info = None
try:
os.makedirs(path)
except OSError as err:
if err.errno == 17: # Path already exists.
if not os.path.isdir(path):
message = '{} already exists and is not a dir.'.format(path)
logging.warning(message)
isdir = False
else:
exc_info = sys.exc_info()
isdir = False
except:
exc_info = sys.exc_info()
isdir = False
if exc_info is not None:
logging.warning('Cannot create {}.'.format(path), exc_info=exc_info)
return isdir
|
<commit_before><commit_msg>Add method for creating a directory<commit_after>import logging
import os
import sys
def makedirs(path):
isdir = True
exc_info = None
try:
os.makedirs(path)
except OSError as err:
if err.errno == 17: # Path already exists.
if not os.path.isdir(path):
message = '{} already exists and is not a dir.'.format(path)
logging.warning(message)
isdir = False
else:
exc_info = sys.exc_info()
isdir = False
except:
exc_info = sys.exc_info()
isdir = False
if exc_info is not None:
logging.warning('Cannot create {}.'.format(path), exc_info=exc_info)
return isdir
|
|
2226980daf685c1ecb7df99b5e1bb646b6cfbd3d
|
geotrek/feedback/migrations/0008_auto_20200526_1419.py
|
geotrek/feedback/migrations/0008_auto_20200526_1419.py
|
# Generated by Django 2.2.12 on 2020-05-26 14:19
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('feedback', '0007_auto_20200407_0728'),
]
operations = [
migrations.AlterField(
model_name='report',
name='activity',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportActivity', verbose_name='Activity'),
),
migrations.AlterField(
model_name='report',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportCategory', verbose_name='Category'),
),
migrations.AlterField(
model_name='report',
name='problem_magnitude',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportProblemMagnitude', verbose_name='Problem magnitude'),
),
migrations.AlterField(
model_name='report',
name='status',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportStatus', verbose_name='Status'),
),
]
|
Add migration test for CASCADE
|
Add migration test for CASCADE
|
Python
|
bsd-2-clause
|
makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin
|
Add migration test for CASCADE
|
# Generated by Django 2.2.12 on 2020-05-26 14:19
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('feedback', '0007_auto_20200407_0728'),
]
operations = [
migrations.AlterField(
model_name='report',
name='activity',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportActivity', verbose_name='Activity'),
),
migrations.AlterField(
model_name='report',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportCategory', verbose_name='Category'),
),
migrations.AlterField(
model_name='report',
name='problem_magnitude',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportProblemMagnitude', verbose_name='Problem magnitude'),
),
migrations.AlterField(
model_name='report',
name='status',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportStatus', verbose_name='Status'),
),
]
|
<commit_before><commit_msg>Add migration test for CASCADE<commit_after>
|
# Generated by Django 2.2.12 on 2020-05-26 14:19
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('feedback', '0007_auto_20200407_0728'),
]
operations = [
migrations.AlterField(
model_name='report',
name='activity',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportActivity', verbose_name='Activity'),
),
migrations.AlterField(
model_name='report',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportCategory', verbose_name='Category'),
),
migrations.AlterField(
model_name='report',
name='problem_magnitude',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportProblemMagnitude', verbose_name='Problem magnitude'),
),
migrations.AlterField(
model_name='report',
name='status',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportStatus', verbose_name='Status'),
),
]
|
Add migration test for CASCADE# Generated by Django 2.2.12 on 2020-05-26 14:19
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('feedback', '0007_auto_20200407_0728'),
]
operations = [
migrations.AlterField(
model_name='report',
name='activity',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportActivity', verbose_name='Activity'),
),
migrations.AlterField(
model_name='report',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportCategory', verbose_name='Category'),
),
migrations.AlterField(
model_name='report',
name='problem_magnitude',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportProblemMagnitude', verbose_name='Problem magnitude'),
),
migrations.AlterField(
model_name='report',
name='status',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportStatus', verbose_name='Status'),
),
]
|
<commit_before><commit_msg>Add migration test for CASCADE<commit_after># Generated by Django 2.2.12 on 2020-05-26 14:19
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('feedback', '0007_auto_20200407_0728'),
]
operations = [
migrations.AlterField(
model_name='report',
name='activity',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportActivity', verbose_name='Activity'),
),
migrations.AlterField(
model_name='report',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportCategory', verbose_name='Category'),
),
migrations.AlterField(
model_name='report',
name='problem_magnitude',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportProblemMagnitude', verbose_name='Problem magnitude'),
),
migrations.AlterField(
model_name='report',
name='status',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='feedback.ReportStatus', verbose_name='Status'),
),
]
|
|
14d1defaa72d3bfd23b36bcfdf8c786818f328fb
|
problem_48.py
|
problem_48.py
|
from time import time
def main():
max = 1000
print str(sum([i**i for i in range(1, max+1)]))[-10:]
if __name__ == '__main__':
t = time()
main()
print 'Time:', time() - t
|
Add problem 48, self powers series
|
Add problem 48, self powers series
|
Python
|
mit
|
dimkarakostas/project-euler
|
Add problem 48, self powers series
|
from time import time
def main():
max = 1000
print str(sum([i**i for i in range(1, max+1)]))[-10:]
if __name__ == '__main__':
t = time()
main()
print 'Time:', time() - t
|
<commit_before><commit_msg>Add problem 48, self powers series<commit_after>
|
from time import time
def main():
max = 1000
print str(sum([i**i for i in range(1, max+1)]))[-10:]
if __name__ == '__main__':
t = time()
main()
print 'Time:', time() - t
|
Add problem 48, self powers seriesfrom time import time
def main():
max = 1000
print str(sum([i**i for i in range(1, max+1)]))[-10:]
if __name__ == '__main__':
t = time()
main()
print 'Time:', time() - t
|
<commit_before><commit_msg>Add problem 48, self powers series<commit_after>from time import time
def main():
max = 1000
print str(sum([i**i for i in range(1, max+1)]))[-10:]
if __name__ == '__main__':
t = time()
main()
print 'Time:', time() - t
|
|
f0fa84e35e2af96844812cdd876dcfd60d841290
|
caffe2/python/operator_test/sparse_lengths_sum_benchmark.py
|
caffe2/python/operator_test/sparse_lengths_sum_benchmark.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import numpy as np
import datetime
from caffe2.python import core, workspace
def benchmark_sparse_lengths_sum(
dtype_str,
categorical_limit,
embedding_size,
average_len,
batch_size,
iterations):
print('Preparing lookup table. ' + str(datetime.datetime.now()))
# We will use a constant, but non-trivial value so we save initialization
# time.
arr = np.ones([categorical_limit, embedding_size], dtype=np.float32)
arr *= 17.01
dtype_table = {
'float': np.float32,
'float16': np.float16
}
workspace.FeedBlob("X", arr.astype(dtype_table[dtype_str]))
# In order to produce truly random lengths and indices, we will embed a
# Python operator in the net to generate them.
def f(_, outputs):
lengths = np.random.randint(
int(average_len * 0.75),
int(average_len * 1.25),
batch_size).astype(np.int32)
indices = np.random.randint(
0, categorical_limit, np.sum(lengths)).astype(np.int64)
outputs[0].feed(indices)
outputs[1].feed(lengths)
net = core.Net("mynet")
net.Python(f)([], ["indices", "lengths"])
net.SparseLengthsSum(["X", "indices", "lengths"], "Y")
workspace.CreateNet(net)
# Set random seed, so that repeated runs will keep the same sequence of
# random indices.
np.random.seed(1701)
print('Preparation finished. ' + str(datetime.datetime.now()))
workspace.BenchmarkNet(net.Name(), 1, iterations, True)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="minimal benchmark for sparse lengths sum.")
parser.add_argument(
"--dtype", type=str, default="float",
help="The data type for the input lookup table.")
parser.add_argument(
"--embedding_size", type=int, default=6000000,
help="Lookup table size.")
parser.add_argument(
"--embedding_dim", type=int, default=128,
help="Embedding dimension.")
parser.add_argument(
"--average_len", type=int, default=27,
help="Sparse feature average lengths, default is 27")
parser.add_argument(
"--batch_size", type=int, default=100,
help="The batch size.")
parser.add_argument(
"--iteration", type=int, default=100000,
help="The number of iterations.")
args, extra_args = parser.parse_known_args()
core.GlobalInit(['python'] + extra_args)
benchmark_sparse_lengths_sum(
args.dtype,
args.embedding_size,
args.embedding_dim,
args.average_len,
args.batch_size,
args.iteration)
|
Add a quick SparseLengthsSum benchmark.
|
Add a quick SparseLengthsSum benchmark.
Summary: TSIA - this makes it a bit easy to benchmark sparse lengths sum.
Reviewed By: dzhulgakov
Differential Revision: D5477844
fbshipit-source-id: 89e25c5e0dbf3538877ba1a9abc75a10abfa2757
|
Python
|
apache-2.0
|
pietern/caffe2,xzturn/caffe2,sf-wind/caffe2,xzturn/caffe2,davinwang/caffe2,pietern/caffe2,pietern/caffe2,Yangqing/caffe2,xzturn/caffe2,davinwang/caffe2,sf-wind/caffe2,sf-wind/caffe2,Yangqing/caffe2,xzturn/caffe2,xzturn/caffe2,sf-wind/caffe2,davinwang/caffe2,Yangqing/caffe2,Yangqing/caffe2,davinwang/caffe2,Yangqing/caffe2,sf-wind/caffe2,pietern/caffe2,davinwang/caffe2,pietern/caffe2,caffe2/caffe2
|
Add a quick SparseLengthsSum benchmark.
Summary: TSIA - this makes it a bit easy to benchmark sparse lengths sum.
Reviewed By: dzhulgakov
Differential Revision: D5477844
fbshipit-source-id: 89e25c5e0dbf3538877ba1a9abc75a10abfa2757
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import numpy as np
import datetime
from caffe2.python import core, workspace
def benchmark_sparse_lengths_sum(
dtype_str,
categorical_limit,
embedding_size,
average_len,
batch_size,
iterations):
print('Preparing lookup table. ' + str(datetime.datetime.now()))
# We will use a constant, but non-trivial value so we save initialization
# time.
arr = np.ones([categorical_limit, embedding_size], dtype=np.float32)
arr *= 17.01
dtype_table = {
'float': np.float32,
'float16': np.float16
}
workspace.FeedBlob("X", arr.astype(dtype_table[dtype_str]))
# In order to produce truly random lengths and indices, we will embed a
# Python operator in the net to generate them.
def f(_, outputs):
lengths = np.random.randint(
int(average_len * 0.75),
int(average_len * 1.25),
batch_size).astype(np.int32)
indices = np.random.randint(
0, categorical_limit, np.sum(lengths)).astype(np.int64)
outputs[0].feed(indices)
outputs[1].feed(lengths)
net = core.Net("mynet")
net.Python(f)([], ["indices", "lengths"])
net.SparseLengthsSum(["X", "indices", "lengths"], "Y")
workspace.CreateNet(net)
# Set random seed, so that repeated runs will keep the same sequence of
# random indices.
np.random.seed(1701)
print('Preparation finished. ' + str(datetime.datetime.now()))
workspace.BenchmarkNet(net.Name(), 1, iterations, True)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="minimal benchmark for sparse lengths sum.")
parser.add_argument(
"--dtype", type=str, default="float",
help="The data type for the input lookup table.")
parser.add_argument(
"--embedding_size", type=int, default=6000000,
help="Lookup table size.")
parser.add_argument(
"--embedding_dim", type=int, default=128,
help="Embedding dimension.")
parser.add_argument(
"--average_len", type=int, default=27,
help="Sparse feature average lengths, default is 27")
parser.add_argument(
"--batch_size", type=int, default=100,
help="The batch size.")
parser.add_argument(
"--iteration", type=int, default=100000,
help="The number of iterations.")
args, extra_args = parser.parse_known_args()
core.GlobalInit(['python'] + extra_args)
benchmark_sparse_lengths_sum(
args.dtype,
args.embedding_size,
args.embedding_dim,
args.average_len,
args.batch_size,
args.iteration)
|
<commit_before><commit_msg>Add a quick SparseLengthsSum benchmark.
Summary: TSIA - this makes it a bit easy to benchmark sparse lengths sum.
Reviewed By: dzhulgakov
Differential Revision: D5477844
fbshipit-source-id: 89e25c5e0dbf3538877ba1a9abc75a10abfa2757<commit_after>
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import numpy as np
import datetime
from caffe2.python import core, workspace
def benchmark_sparse_lengths_sum(
dtype_str,
categorical_limit,
embedding_size,
average_len,
batch_size,
iterations):
print('Preparing lookup table. ' + str(datetime.datetime.now()))
# We will use a constant, but non-trivial value so we save initialization
# time.
arr = np.ones([categorical_limit, embedding_size], dtype=np.float32)
arr *= 17.01
dtype_table = {
'float': np.float32,
'float16': np.float16
}
workspace.FeedBlob("X", arr.astype(dtype_table[dtype_str]))
# In order to produce truly random lengths and indices, we will embed a
# Python operator in the net to generate them.
def f(_, outputs):
lengths = np.random.randint(
int(average_len * 0.75),
int(average_len * 1.25),
batch_size).astype(np.int32)
indices = np.random.randint(
0, categorical_limit, np.sum(lengths)).astype(np.int64)
outputs[0].feed(indices)
outputs[1].feed(lengths)
net = core.Net("mynet")
net.Python(f)([], ["indices", "lengths"])
net.SparseLengthsSum(["X", "indices", "lengths"], "Y")
workspace.CreateNet(net)
# Set random seed, so that repeated runs will keep the same sequence of
# random indices.
np.random.seed(1701)
print('Preparation finished. ' + str(datetime.datetime.now()))
workspace.BenchmarkNet(net.Name(), 1, iterations, True)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="minimal benchmark for sparse lengths sum.")
parser.add_argument(
"--dtype", type=str, default="float",
help="The data type for the input lookup table.")
parser.add_argument(
"--embedding_size", type=int, default=6000000,
help="Lookup table size.")
parser.add_argument(
"--embedding_dim", type=int, default=128,
help="Embedding dimension.")
parser.add_argument(
"--average_len", type=int, default=27,
help="Sparse feature average lengths, default is 27")
parser.add_argument(
"--batch_size", type=int, default=100,
help="The batch size.")
parser.add_argument(
"--iteration", type=int, default=100000,
help="The number of iterations.")
args, extra_args = parser.parse_known_args()
core.GlobalInit(['python'] + extra_args)
benchmark_sparse_lengths_sum(
args.dtype,
args.embedding_size,
args.embedding_dim,
args.average_len,
args.batch_size,
args.iteration)
|
Add a quick SparseLengthsSum benchmark.
Summary: TSIA - this makes it a bit easy to benchmark sparse lengths sum.
Reviewed By: dzhulgakov
Differential Revision: D5477844
fbshipit-source-id: 89e25c5e0dbf3538877ba1a9abc75a10abfa2757from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import numpy as np
import datetime
from caffe2.python import core, workspace
def benchmark_sparse_lengths_sum(
dtype_str,
categorical_limit,
embedding_size,
average_len,
batch_size,
iterations):
print('Preparing lookup table. ' + str(datetime.datetime.now()))
# We will use a constant, but non-trivial value so we save initialization
# time.
arr = np.ones([categorical_limit, embedding_size], dtype=np.float32)
arr *= 17.01
dtype_table = {
'float': np.float32,
'float16': np.float16
}
workspace.FeedBlob("X", arr.astype(dtype_table[dtype_str]))
# In order to produce truly random lengths and indices, we will embed a
# Python operator in the net to generate them.
def f(_, outputs):
lengths = np.random.randint(
int(average_len * 0.75),
int(average_len * 1.25),
batch_size).astype(np.int32)
indices = np.random.randint(
0, categorical_limit, np.sum(lengths)).astype(np.int64)
outputs[0].feed(indices)
outputs[1].feed(lengths)
net = core.Net("mynet")
net.Python(f)([], ["indices", "lengths"])
net.SparseLengthsSum(["X", "indices", "lengths"], "Y")
workspace.CreateNet(net)
# Set random seed, so that repeated runs will keep the same sequence of
# random indices.
np.random.seed(1701)
print('Preparation finished. ' + str(datetime.datetime.now()))
workspace.BenchmarkNet(net.Name(), 1, iterations, True)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="minimal benchmark for sparse lengths sum.")
parser.add_argument(
"--dtype", type=str, default="float",
help="The data type for the input lookup table.")
parser.add_argument(
"--embedding_size", type=int, default=6000000,
help="Lookup table size.")
parser.add_argument(
"--embedding_dim", type=int, default=128,
help="Embedding dimension.")
parser.add_argument(
"--average_len", type=int, default=27,
help="Sparse feature average lengths, default is 27")
parser.add_argument(
"--batch_size", type=int, default=100,
help="The batch size.")
parser.add_argument(
"--iteration", type=int, default=100000,
help="The number of iterations.")
args, extra_args = parser.parse_known_args()
core.GlobalInit(['python'] + extra_args)
benchmark_sparse_lengths_sum(
args.dtype,
args.embedding_size,
args.embedding_dim,
args.average_len,
args.batch_size,
args.iteration)
|
<commit_before><commit_msg>Add a quick SparseLengthsSum benchmark.
Summary: TSIA - this makes it a bit easy to benchmark sparse lengths sum.
Reviewed By: dzhulgakov
Differential Revision: D5477844
fbshipit-source-id: 89e25c5e0dbf3538877ba1a9abc75a10abfa2757<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import numpy as np
import datetime
from caffe2.python import core, workspace
def benchmark_sparse_lengths_sum(
dtype_str,
categorical_limit,
embedding_size,
average_len,
batch_size,
iterations):
print('Preparing lookup table. ' + str(datetime.datetime.now()))
# We will use a constant, but non-trivial value so we save initialization
# time.
arr = np.ones([categorical_limit, embedding_size], dtype=np.float32)
arr *= 17.01
dtype_table = {
'float': np.float32,
'float16': np.float16
}
workspace.FeedBlob("X", arr.astype(dtype_table[dtype_str]))
# In order to produce truly random lengths and indices, we will embed a
# Python operator in the net to generate them.
def f(_, outputs):
lengths = np.random.randint(
int(average_len * 0.75),
int(average_len * 1.25),
batch_size).astype(np.int32)
indices = np.random.randint(
0, categorical_limit, np.sum(lengths)).astype(np.int64)
outputs[0].feed(indices)
outputs[1].feed(lengths)
net = core.Net("mynet")
net.Python(f)([], ["indices", "lengths"])
net.SparseLengthsSum(["X", "indices", "lengths"], "Y")
workspace.CreateNet(net)
# Set random seed, so that repeated runs will keep the same sequence of
# random indices.
np.random.seed(1701)
print('Preparation finished. ' + str(datetime.datetime.now()))
workspace.BenchmarkNet(net.Name(), 1, iterations, True)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="minimal benchmark for sparse lengths sum.")
parser.add_argument(
"--dtype", type=str, default="float",
help="The data type for the input lookup table.")
parser.add_argument(
"--embedding_size", type=int, default=6000000,
help="Lookup table size.")
parser.add_argument(
"--embedding_dim", type=int, default=128,
help="Embedding dimension.")
parser.add_argument(
"--average_len", type=int, default=27,
help="Sparse feature average lengths, default is 27")
parser.add_argument(
"--batch_size", type=int, default=100,
help="The batch size.")
parser.add_argument(
"--iteration", type=int, default=100000,
help="The number of iterations.")
args, extra_args = parser.parse_known_args()
core.GlobalInit(['python'] + extra_args)
benchmark_sparse_lengths_sum(
args.dtype,
args.embedding_size,
args.embedding_dim,
args.average_len,
args.batch_size,
args.iteration)
|
|
0a0d449bfee13895db2ef5d494fb67bc34e631ee
|
mysite/scripts/delete-emo-projectexps.py
|
mysite/scripts/delete-emo-projectexps.py
|
from mysite.profile.models import ProjectExp, Person
project_exps = ProjectExp.objects.filter(
person=Person.objects.get(user__username='emo'))[:19]
# Gonna limit to 20; damage mitigation just in case this query isn't right.
for exp in project_exps:
exp.delete()
|
Add script to delete emo's projectexps. We use this script when showing people how the importer works.
|
Add script to delete emo's projectexps. We use this script when showing people how the importer works.
|
Python
|
agpl-3.0
|
mzdaniel/oh-mainline,openhatch/oh-mainline,ehashman/oh-mainline,heeraj123/oh-mainline,nirmeshk/oh-mainline,SnappleCap/oh-mainline,campbe13/openhatch,campbe13/openhatch,heeraj123/oh-mainline,jledbetter/openhatch,eeshangarg/oh-mainline,willingc/oh-mainline,moijes12/oh-mainline,mzdaniel/oh-mainline,ojengwa/oh-mainline,nirmeshk/oh-mainline,mzdaniel/oh-mainline,openhatch/oh-mainline,nirmeshk/oh-mainline,onceuponatimeforever/oh-mainline,willingc/oh-mainline,vipul-sharma20/oh-mainline,Changaco/oh-mainline,jledbetter/openhatch,ehashman/oh-mainline,ojengwa/oh-mainline,Changaco/oh-mainline,vipul-sharma20/oh-mainline,heeraj123/oh-mainline,eeshangarg/oh-mainline,heeraj123/oh-mainline,onceuponatimeforever/oh-mainline,waseem18/oh-mainline,willingc/oh-mainline,SnappleCap/oh-mainline,campbe13/openhatch,SnappleCap/oh-mainline,jledbetter/openhatch,willingc/oh-mainline,Changaco/oh-mainline,mzdaniel/oh-mainline,SnappleCap/oh-mainline,ehashman/oh-mainline,SnappleCap/oh-mainline,ojengwa/oh-mainline,ojengwa/oh-mainline,Changaco/oh-mainline,sudheesh001/oh-mainline,mzdaniel/oh-mainline,vipul-sharma20/oh-mainline,mzdaniel/oh-mainline,onceuponatimeforever/oh-mainline,jledbetter/openhatch,moijes12/oh-mainline,nirmeshk/oh-mainline,waseem18/oh-mainline,openhatch/oh-mainline,moijes12/oh-mainline,waseem18/oh-mainline,vipul-sharma20/oh-mainline,ehashman/oh-mainline,moijes12/oh-mainline,campbe13/openhatch,openhatch/oh-mainline,mzdaniel/oh-mainline,ehashman/oh-mainline,eeshangarg/oh-mainline,sudheesh001/oh-mainline,vipul-sharma20/oh-mainline,onceuponatimeforever/oh-mainline,sudheesh001/oh-mainline,willingc/oh-mainline,eeshangarg/oh-mainline,waseem18/oh-mainline,onceuponatimeforever/oh-mainline,eeshangarg/oh-mainline,nirmeshk/oh-mainline,openhatch/oh-mainline,heeraj123/oh-mainline,campbe13/openhatch,sudheesh001/oh-mainline,jledbetter/openhatch,ojengwa/oh-mainline,waseem18/oh-mainline,sudheesh001/oh-mainline,Changaco/oh-mainline,moijes12/oh-mainline
|
Add script to delete emo's projectexps. We use this script when showing people how the importer works.
|
from mysite.profile.models import ProjectExp, Person
project_exps = ProjectExp.objects.filter(
person=Person.objects.get(user__username='emo'))[:19]
# Gonna limit to 20; damage mitigation just in case this query isn't right.
for exp in project_exps:
exp.delete()
|
<commit_before><commit_msg>Add script to delete emo's projectexps. We use this script when showing people how the importer works.<commit_after>
|
from mysite.profile.models import ProjectExp, Person
project_exps = ProjectExp.objects.filter(
person=Person.objects.get(user__username='emo'))[:19]
# Gonna limit to 20; damage mitigation just in case this query isn't right.
for exp in project_exps:
exp.delete()
|
Add script to delete emo's projectexps. We use this script when showing people how the importer works.from mysite.profile.models import ProjectExp, Person
project_exps = ProjectExp.objects.filter(
person=Person.objects.get(user__username='emo'))[:19]
# Gonna limit to 20; damage mitigation just in case this query isn't right.
for exp in project_exps:
exp.delete()
|
<commit_before><commit_msg>Add script to delete emo's projectexps. We use this script when showing people how the importer works.<commit_after>from mysite.profile.models import ProjectExp, Person
project_exps = ProjectExp.objects.filter(
person=Person.objects.get(user__username='emo'))[:19]
# Gonna limit to 20; damage mitigation just in case this query isn't right.
for exp in project_exps:
exp.delete()
|
|
07d2742da2b75d1c23451b76447acf5ec03f41b0
|
osf/management/commands/update_preprint_share_dates.py
|
osf/management/commands/update_preprint_share_dates.py
|
from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
dates_updated = 0
for preprint in PreprintService.objects.filter():
if preprint.node.date_modified > preprint.date_modified:
if not dry_run:
on_preprint_updated(preprint._id)
dates_updated += 1
return dates_updated
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to Share (max of node.date_modified and preprint.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to share',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
dates_updated = update_share_preprint_modified_dates()
logger.info('Sent %d new preprint modified dates to Share' % dates_updated)
else:
dates_updated = update_share_preprint_modified_dates(dry_run=True)
logger.info('Would have sent %d new preprint modified dates to Share' % dates_updated)
|
Add management command to update preprint share dates
|
Add management command to update preprint share dates
|
Python
|
apache-2.0
|
HalcyonChimera/osf.io,laurenrevere/osf.io,crcresearch/osf.io,icereval/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,sloria/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,sloria/osf.io,baylee-d/osf.io,TomBaxter/osf.io,chrisseto/osf.io,binoculars/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,cslzchen/osf.io,mattclark/osf.io,laurenrevere/osf.io,leb2dg/osf.io,erinspace/osf.io,leb2dg/osf.io,laurenrevere/osf.io,erinspace/osf.io,felliott/osf.io,cslzchen/osf.io,crcresearch/osf.io,sloria/osf.io,aaxelb/osf.io,adlius/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,felliott/osf.io,TomBaxter/osf.io,adlius/osf.io,icereval/osf.io,icereval/osf.io,caseyrollins/osf.io,chennan47/osf.io,erinspace/osf.io,pattisdr/osf.io,pattisdr/osf.io,chrisseto/osf.io,cslzchen/osf.io,baylee-d/osf.io,mfraezz/osf.io,cslzchen/osf.io,baylee-d/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,adlius/osf.io,chrisseto/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,felliott/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,pattisdr/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,saradbowman/osf.io,saradbowman/osf.io,chennan47/osf.io,caseyrollins/osf.io,felliott/osf.io,mfraezz/osf.io,binoculars/osf.io,mattclark/osf.io,mattclark/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,chennan47/osf.io
|
Add management command to update preprint share dates
|
from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
dates_updated = 0
for preprint in PreprintService.objects.filter():
if preprint.node.date_modified > preprint.date_modified:
if not dry_run:
on_preprint_updated(preprint._id)
dates_updated += 1
return dates_updated
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to Share (max of node.date_modified and preprint.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to share',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
dates_updated = update_share_preprint_modified_dates()
logger.info('Sent %d new preprint modified dates to Share' % dates_updated)
else:
dates_updated = update_share_preprint_modified_dates(dry_run=True)
logger.info('Would have sent %d new preprint modified dates to Share' % dates_updated)
|
<commit_before><commit_msg>Add management command to update preprint share dates<commit_after>
|
from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
dates_updated = 0
for preprint in PreprintService.objects.filter():
if preprint.node.date_modified > preprint.date_modified:
if not dry_run:
on_preprint_updated(preprint._id)
dates_updated += 1
return dates_updated
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to Share (max of node.date_modified and preprint.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to share',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
dates_updated = update_share_preprint_modified_dates()
logger.info('Sent %d new preprint modified dates to Share' % dates_updated)
else:
dates_updated = update_share_preprint_modified_dates(dry_run=True)
logger.info('Would have sent %d new preprint modified dates to Share' % dates_updated)
|
Add management command to update preprint share datesfrom __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
dates_updated = 0
for preprint in PreprintService.objects.filter():
if preprint.node.date_modified > preprint.date_modified:
if not dry_run:
on_preprint_updated(preprint._id)
dates_updated += 1
return dates_updated
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to Share (max of node.date_modified and preprint.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to share',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
dates_updated = update_share_preprint_modified_dates()
logger.info('Sent %d new preprint modified dates to Share' % dates_updated)
else:
dates_updated = update_share_preprint_modified_dates(dry_run=True)
logger.info('Would have sent %d new preprint modified dates to Share' % dates_updated)
|
<commit_before><commit_msg>Add management command to update preprint share dates<commit_after>from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from scripts import utils as script_utils
from osf.models import PreprintService
from website.preprints.tasks import on_preprint_updated
logger = logging.getLogger(__name__)
def update_share_preprint_modified_dates(dry_run=False):
dates_updated = 0
for preprint in PreprintService.objects.filter():
if preprint.node.date_modified > preprint.date_modified:
if not dry_run:
on_preprint_updated(preprint._id)
dates_updated += 1
return dates_updated
class Command(BaseCommand):
"""
Send more accurate preprint modified dates to Share (max of node.date_modified and preprint.date_modified)
"""
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Say how many preprint updates would be sent to share',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
if not dry_run:
script_utils.add_file_logger(logger, __file__)
dates_updated = update_share_preprint_modified_dates()
logger.info('Sent %d new preprint modified dates to Share' % dates_updated)
else:
dates_updated = update_share_preprint_modified_dates(dry_run=True)
logger.info('Would have sent %d new preprint modified dates to Share' % dates_updated)
|
|
936650dd8bc320731e7a703d6b1c9ab4092c3f0c
|
swap_nodes_in_pairs.py
|
swap_nodes_in_pairs.py
|
'''
Given a linked list, swap every two adjacent nodes and return its head.
For example,
Given 1->2->3->4, you should return the list as 2->1->4->3.
Your algorithm should use only constant space. You may not modify the values in the list, only nodes itself can be changed.
'''
'''
Use a dummy head.
'''
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
from listnode import ListNode
class Solution:
# @param a ListNode
# @return a ListNode
def swapPairs(self, head):
dummyHead = ListNode(0)
dummyHead.next = head
cur = dummyHead
post = head
while post and post.next:
cur.next = post.next
post.next = post.next.next
cur.next.next = post
cur = post
post = post.next
return dummyHead.next
if __name__ == '__main__':
s = Solution()
n1 = ListNode(1)
n2 = ListNode(2)
n3 = ListNode(3)
n4 = ListNode(4)
n1.next = n2
n2.next = n3
n3.next = n4
head = n1
head.printList()
s.swapPairs(head).printList()
|
Swap Nodes in Pairs problem
|
Swap Nodes in Pairs problem
|
Python
|
apache-2.0
|
zsmountain/leetcode,zsmountain/leetcode,zsmountain/leetcode
|
Swap Nodes in Pairs problem
|
'''
Given a linked list, swap every two adjacent nodes and return its head.
For example,
Given 1->2->3->4, you should return the list as 2->1->4->3.
Your algorithm should use only constant space. You may not modify the values in the list, only nodes itself can be changed.
'''
'''
Use a dummy head.
'''
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
from listnode import ListNode
class Solution:
# @param a ListNode
# @return a ListNode
def swapPairs(self, head):
dummyHead = ListNode(0)
dummyHead.next = head
cur = dummyHead
post = head
while post and post.next:
cur.next = post.next
post.next = post.next.next
cur.next.next = post
cur = post
post = post.next
return dummyHead.next
if __name__ == '__main__':
s = Solution()
n1 = ListNode(1)
n2 = ListNode(2)
n3 = ListNode(3)
n4 = ListNode(4)
n1.next = n2
n2.next = n3
n3.next = n4
head = n1
head.printList()
s.swapPairs(head).printList()
|
<commit_before><commit_msg>Swap Nodes in Pairs problem<commit_after>
|
'''
Given a linked list, swap every two adjacent nodes and return its head.
For example,
Given 1->2->3->4, you should return the list as 2->1->4->3.
Your algorithm should use only constant space. You may not modify the values in the list, only nodes itself can be changed.
'''
'''
Use a dummy head.
'''
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
from listnode import ListNode
class Solution:
# @param a ListNode
# @return a ListNode
def swapPairs(self, head):
dummyHead = ListNode(0)
dummyHead.next = head
cur = dummyHead
post = head
while post and post.next:
cur.next = post.next
post.next = post.next.next
cur.next.next = post
cur = post
post = post.next
return dummyHead.next
if __name__ == '__main__':
s = Solution()
n1 = ListNode(1)
n2 = ListNode(2)
n3 = ListNode(3)
n4 = ListNode(4)
n1.next = n2
n2.next = n3
n3.next = n4
head = n1
head.printList()
s.swapPairs(head).printList()
|
Swap Nodes in Pairs problem'''
Given a linked list, swap every two adjacent nodes and return its head.
For example,
Given 1->2->3->4, you should return the list as 2->1->4->3.
Your algorithm should use only constant space. You may not modify the values in the list, only nodes itself can be changed.
'''
'''
Use a dummy head.
'''
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
from listnode import ListNode
class Solution:
# @param a ListNode
# @return a ListNode
def swapPairs(self, head):
dummyHead = ListNode(0)
dummyHead.next = head
cur = dummyHead
post = head
while post and post.next:
cur.next = post.next
post.next = post.next.next
cur.next.next = post
cur = post
post = post.next
return dummyHead.next
if __name__ == '__main__':
s = Solution()
n1 = ListNode(1)
n2 = ListNode(2)
n3 = ListNode(3)
n4 = ListNode(4)
n1.next = n2
n2.next = n3
n3.next = n4
head = n1
head.printList()
s.swapPairs(head).printList()
|
<commit_before><commit_msg>Swap Nodes in Pairs problem<commit_after>'''
Given a linked list, swap every two adjacent nodes and return its head.
For example,
Given 1->2->3->4, you should return the list as 2->1->4->3.
Your algorithm should use only constant space. You may not modify the values in the list, only nodes itself can be changed.
'''
'''
Use a dummy head.
'''
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
from listnode import ListNode
class Solution:
# @param a ListNode
# @return a ListNode
def swapPairs(self, head):
dummyHead = ListNode(0)
dummyHead.next = head
cur = dummyHead
post = head
while post and post.next:
cur.next = post.next
post.next = post.next.next
cur.next.next = post
cur = post
post = post.next
return dummyHead.next
if __name__ == '__main__':
s = Solution()
n1 = ListNode(1)
n2 = ListNode(2)
n3 = ListNode(3)
n4 = ListNode(4)
n1.next = n2
n2.next = n3
n3.next = n4
head = n1
head.printList()
s.swapPairs(head).printList()
|
|
44035c166ffde209a47d7739af0d56acb4ec0422
|
notebooks/test_notebooks.py
|
notebooks/test_notebooks.py
|
# -*- coding: utf-8 -*-
'''
Checks notebook execution result.
Equal to this command + error management:
jupyter nbconvert --to notebook --execute --ExecutePreprocessor.timeout=60 --output executed_notebook.ipynb demo.ipynb
For jupyter configuration information, run: jupyter --path
'''
# Dependencies: nbformat, nbconvert, jupyter-client, ipykernel
import io
import nbformat
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert.preprocessors import CellExecutionError
notebook_filename = 'demo.ipynb'
run_path = '.'
notebook_filename_out = 'executed_notebook.ipynb'
with io.open(notebook_filename) as f:
nb = nbformat.read(f, as_version=4)
ep = ExecutePreprocessor(timeout=600, kernel_name='python')
try:
out = ep.preprocess(nb, {'metadata': {'path': run_path}})
except CellExecutionError:
out = None
msg = 'Error executing the notebook "%s".\n\n' % notebook_filename
msg += 'See notebook "%s" for the traceback.' % notebook_filename_out
print(msg)
raise
finally:
with io.open(notebook_filename_out, mode='wt') as f: # io.open avoids UnicodeEncodeError
nbformat.write(nb, f)
|
Add script to automate notebooks testing
|
Add script to automate notebooks testing
|
Python
|
agpl-3.0
|
openfisca/openfisca-tunisia,openfisca/openfisca-tunisia
|
Add script to automate notebooks testing
|
# -*- coding: utf-8 -*-
'''
Checks notebook execution result.
Equal to this command + error management:
jupyter nbconvert --to notebook --execute --ExecutePreprocessor.timeout=60 --output executed_notebook.ipynb demo.ipynb
For jupyter configuration information, run: jupyter --path
'''
# Dependencies: nbformat, nbconvert, jupyter-client, ipykernel
import io
import nbformat
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert.preprocessors import CellExecutionError
notebook_filename = 'demo.ipynb'
run_path = '.'
notebook_filename_out = 'executed_notebook.ipynb'
with io.open(notebook_filename) as f:
nb = nbformat.read(f, as_version=4)
ep = ExecutePreprocessor(timeout=600, kernel_name='python')
try:
out = ep.preprocess(nb, {'metadata': {'path': run_path}})
except CellExecutionError:
out = None
msg = 'Error executing the notebook "%s".\n\n' % notebook_filename
msg += 'See notebook "%s" for the traceback.' % notebook_filename_out
print(msg)
raise
finally:
with io.open(notebook_filename_out, mode='wt') as f: # io.open avoids UnicodeEncodeError
nbformat.write(nb, f)
|
<commit_before><commit_msg>Add script to automate notebooks testing<commit_after>
|
# -*- coding: utf-8 -*-
'''
Checks notebook execution result.
Equal to this command + error management:
jupyter nbconvert --to notebook --execute --ExecutePreprocessor.timeout=60 --output executed_notebook.ipynb demo.ipynb
For jupyter configuration information, run: jupyter --path
'''
# Dependencies: nbformat, nbconvert, jupyter-client, ipykernel
import io
import nbformat
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert.preprocessors import CellExecutionError
notebook_filename = 'demo.ipynb'
run_path = '.'
notebook_filename_out = 'executed_notebook.ipynb'
with io.open(notebook_filename) as f:
nb = nbformat.read(f, as_version=4)
ep = ExecutePreprocessor(timeout=600, kernel_name='python')
try:
out = ep.preprocess(nb, {'metadata': {'path': run_path}})
except CellExecutionError:
out = None
msg = 'Error executing the notebook "%s".\n\n' % notebook_filename
msg += 'See notebook "%s" for the traceback.' % notebook_filename_out
print(msg)
raise
finally:
with io.open(notebook_filename_out, mode='wt') as f: # io.open avoids UnicodeEncodeError
nbformat.write(nb, f)
|
Add script to automate notebooks testing# -*- coding: utf-8 -*-
'''
Checks notebook execution result.
Equal to this command + error management:
jupyter nbconvert --to notebook --execute --ExecutePreprocessor.timeout=60 --output executed_notebook.ipynb demo.ipynb
For jupyter configuration information, run: jupyter --path
'''
# Dependencies: nbformat, nbconvert, jupyter-client, ipykernel
import io
import nbformat
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert.preprocessors import CellExecutionError
notebook_filename = 'demo.ipynb'
run_path = '.'
notebook_filename_out = 'executed_notebook.ipynb'
with io.open(notebook_filename) as f:
nb = nbformat.read(f, as_version=4)
ep = ExecutePreprocessor(timeout=600, kernel_name='python')
try:
out = ep.preprocess(nb, {'metadata': {'path': run_path}})
except CellExecutionError:
out = None
msg = 'Error executing the notebook "%s".\n\n' % notebook_filename
msg += 'See notebook "%s" for the traceback.' % notebook_filename_out
print(msg)
raise
finally:
with io.open(notebook_filename_out, mode='wt') as f: # io.open avoids UnicodeEncodeError
nbformat.write(nb, f)
|
<commit_before><commit_msg>Add script to automate notebooks testing<commit_after># -*- coding: utf-8 -*-
'''
Checks notebook execution result.
Equal to this command + error management:
jupyter nbconvert --to notebook --execute --ExecutePreprocessor.timeout=60 --output executed_notebook.ipynb demo.ipynb
For jupyter configuration information, run: jupyter --path
'''
# Dependencies: nbformat, nbconvert, jupyter-client, ipykernel
import io
import nbformat
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert.preprocessors import CellExecutionError
notebook_filename = 'demo.ipynb'
run_path = '.'
notebook_filename_out = 'executed_notebook.ipynb'
with io.open(notebook_filename) as f:
nb = nbformat.read(f, as_version=4)
ep = ExecutePreprocessor(timeout=600, kernel_name='python')
try:
out = ep.preprocess(nb, {'metadata': {'path': run_path}})
except CellExecutionError:
out = None
msg = 'Error executing the notebook "%s".\n\n' % notebook_filename
msg += 'See notebook "%s" for the traceback.' % notebook_filename_out
print(msg)
raise
finally:
with io.open(notebook_filename_out, mode='wt') as f: # io.open avoids UnicodeEncodeError
nbformat.write(nb, f)
|
|
e9a18df953c972204b393d5a374b74f1dc8303cd
|
backend/scripts/updatedf.py
|
backend/scripts/updatedf.py
|
#!/usr/bin/env python
#import hashlib
import os
def main():
for root, dirs, files in os.walk("/mcfs/data/materialscommons"):
for f in files:
print f
if __name__ == "__main__":
main()
|
Add script to update uploaded files.
|
Add script to update uploaded files.
|
Python
|
mit
|
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
|
Add script to update uploaded files.
|
#!/usr/bin/env python
#import hashlib
import os
def main():
for root, dirs, files in os.walk("/mcfs/data/materialscommons"):
for f in files:
print f
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to update uploaded files.<commit_after>
|
#!/usr/bin/env python
#import hashlib
import os
def main():
for root, dirs, files in os.walk("/mcfs/data/materialscommons"):
for f in files:
print f
if __name__ == "__main__":
main()
|
Add script to update uploaded files.#!/usr/bin/env python
#import hashlib
import os
def main():
for root, dirs, files in os.walk("/mcfs/data/materialscommons"):
for f in files:
print f
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to update uploaded files.<commit_after>#!/usr/bin/env python
#import hashlib
import os
def main():
for root, dirs, files in os.walk("/mcfs/data/materialscommons"):
for f in files:
print f
if __name__ == "__main__":
main()
|
|
a997a84d2a3f1f485eeab24558a62aac15530999
|
src/sentry/runner/commands/repair.py
|
src/sentry/runner/commands/repair.py
|
"""
sentry.runner.commands.repair
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import os
import click
from sentry.runner.decorators import configuration
@click.command()
@configuration
def repair():
"Attempt to repair any invalid data."
click.echo('Forcing documentation sync')
from sentry.utils.integrationdocs import sync_docs, DOC_FOLDER
if os.access(DOC_FOLDER, os.W_OK):
sync_docs()
else:
click.echo(' - skipping (path cannot be written to)')
from sentry.models import Activity, Project, ProjectKey
click.echo('Creating missing project keys')
queryset = Project.objects.filter(key_set__isnull=True)
for project in queryset:
try:
ProjectKey.objects.get_or_create(
project=project,
)
except ProjectKey.MultipleObjectsReturned:
pass
from django.db import connection
click.echo("Correcting Group.num_comments counter")
cursor = connection.cursor()
cursor.execute("""
UPDATE sentry_groupedmessage SET num_comments = (
SELECT COUNT(*) from sentry_activity
WHERE type = %s and group_id = sentry_groupedmessage.id
)
""", [Activity.NOTE])
|
"""
sentry.runner.commands.repair
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import os
import click
from sentry.runner.decorators import configuration
@click.command()
@configuration
def repair():
"Attempt to repair any invalid data."
click.echo('Forcing documentation sync')
from sentry.utils.integrationdocs import sync_docs, DOC_FOLDER
if os.access(DOC_FOLDER, os.W_OK):
sync_docs()
elif os.path.isdir(DOC_FOLDER):
click.echo(' - skipping, path cannot be written to: %r' % DOC_FOLDER)
else:
click.echo(' - skipping, path does not exist: %r' % DOC_FOLDER)
from sentry.models import Activity, Project, ProjectKey
click.echo('Creating missing project keys')
queryset = Project.objects.filter(key_set__isnull=True)
for project in queryset:
try:
ProjectKey.objects.get_or_create(
project=project,
)
except ProjectKey.MultipleObjectsReturned:
pass
from django.db import connection
click.echo("Correcting Group.num_comments counter")
cursor = connection.cursor()
cursor.execute("""
UPDATE sentry_groupedmessage SET num_comments = (
SELECT COUNT(*) from sentry_activity
WHERE type = %s and group_id = sentry_groupedmessage.id
)
""", [Activity.NOTE])
|
Clarify error condition when failing to sync docs
|
Clarify error condition when failing to sync docs
|
Python
|
bsd-3-clause
|
mitsuhiko/sentry,daevaorn/sentry,jean/sentry,fotinakis/sentry,gencer/sentry,nicholasserra/sentry,daevaorn/sentry,looker/sentry,zenefits/sentry,nicholasserra/sentry,jean/sentry,looker/sentry,mvaled/sentry,JamesMura/sentry,zenefits/sentry,jean/sentry,mvaled/sentry,JackDanger/sentry,mvaled/sentry,zenefits/sentry,mvaled/sentry,zenefits/sentry,JamesMura/sentry,fotinakis/sentry,zenefits/sentry,BuildingLink/sentry,gencer/sentry,BuildingLink/sentry,daevaorn/sentry,jean/sentry,gencer/sentry,mvaled/sentry,daevaorn/sentry,fotinakis/sentry,mvaled/sentry,alexm92/sentry,ifduyue/sentry,alexm92/sentry,BuildingLink/sentry,gencer/sentry,beeftornado/sentry,ifduyue/sentry,JackDanger/sentry,nicholasserra/sentry,looker/sentry,looker/sentry,fotinakis/sentry,JackDanger/sentry,ifduyue/sentry,gencer/sentry,jean/sentry,JamesMura/sentry,JamesMura/sentry,alexm92/sentry,JamesMura/sentry,mitsuhiko/sentry,beeftornado/sentry,ifduyue/sentry,beeftornado/sentry,looker/sentry,BuildingLink/sentry,BuildingLink/sentry,ifduyue/sentry
|
"""
sentry.runner.commands.repair
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import os
import click
from sentry.runner.decorators import configuration
@click.command()
@configuration
def repair():
"Attempt to repair any invalid data."
click.echo('Forcing documentation sync')
from sentry.utils.integrationdocs import sync_docs, DOC_FOLDER
if os.access(DOC_FOLDER, os.W_OK):
sync_docs()
else:
click.echo(' - skipping (path cannot be written to)')
from sentry.models import Activity, Project, ProjectKey
click.echo('Creating missing project keys')
queryset = Project.objects.filter(key_set__isnull=True)
for project in queryset:
try:
ProjectKey.objects.get_or_create(
project=project,
)
except ProjectKey.MultipleObjectsReturned:
pass
from django.db import connection
click.echo("Correcting Group.num_comments counter")
cursor = connection.cursor()
cursor.execute("""
UPDATE sentry_groupedmessage SET num_comments = (
SELECT COUNT(*) from sentry_activity
WHERE type = %s and group_id = sentry_groupedmessage.id
)
""", [Activity.NOTE])
Clarify error condition when failing to sync docs
|
"""
sentry.runner.commands.repair
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import os
import click
from sentry.runner.decorators import configuration
@click.command()
@configuration
def repair():
"Attempt to repair any invalid data."
click.echo('Forcing documentation sync')
from sentry.utils.integrationdocs import sync_docs, DOC_FOLDER
if os.access(DOC_FOLDER, os.W_OK):
sync_docs()
elif os.path.isdir(DOC_FOLDER):
click.echo(' - skipping, path cannot be written to: %r' % DOC_FOLDER)
else:
click.echo(' - skipping, path does not exist: %r' % DOC_FOLDER)
from sentry.models import Activity, Project, ProjectKey
click.echo('Creating missing project keys')
queryset = Project.objects.filter(key_set__isnull=True)
for project in queryset:
try:
ProjectKey.objects.get_or_create(
project=project,
)
except ProjectKey.MultipleObjectsReturned:
pass
from django.db import connection
click.echo("Correcting Group.num_comments counter")
cursor = connection.cursor()
cursor.execute("""
UPDATE sentry_groupedmessage SET num_comments = (
SELECT COUNT(*) from sentry_activity
WHERE type = %s and group_id = sentry_groupedmessage.id
)
""", [Activity.NOTE])
|
<commit_before>"""
sentry.runner.commands.repair
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import os
import click
from sentry.runner.decorators import configuration
@click.command()
@configuration
def repair():
"Attempt to repair any invalid data."
click.echo('Forcing documentation sync')
from sentry.utils.integrationdocs import sync_docs, DOC_FOLDER
if os.access(DOC_FOLDER, os.W_OK):
sync_docs()
else:
click.echo(' - skipping (path cannot be written to)')
from sentry.models import Activity, Project, ProjectKey
click.echo('Creating missing project keys')
queryset = Project.objects.filter(key_set__isnull=True)
for project in queryset:
try:
ProjectKey.objects.get_or_create(
project=project,
)
except ProjectKey.MultipleObjectsReturned:
pass
from django.db import connection
click.echo("Correcting Group.num_comments counter")
cursor = connection.cursor()
cursor.execute("""
UPDATE sentry_groupedmessage SET num_comments = (
SELECT COUNT(*) from sentry_activity
WHERE type = %s and group_id = sentry_groupedmessage.id
)
""", [Activity.NOTE])
<commit_msg>Clarify error condition when failing to sync docs<commit_after>
|
"""
sentry.runner.commands.repair
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import os
import click
from sentry.runner.decorators import configuration
@click.command()
@configuration
def repair():
"Attempt to repair any invalid data."
click.echo('Forcing documentation sync')
from sentry.utils.integrationdocs import sync_docs, DOC_FOLDER
if os.access(DOC_FOLDER, os.W_OK):
sync_docs()
elif os.path.isdir(DOC_FOLDER):
click.echo(' - skipping, path cannot be written to: %r' % DOC_FOLDER)
else:
click.echo(' - skipping, path does not exist: %r' % DOC_FOLDER)
from sentry.models import Activity, Project, ProjectKey
click.echo('Creating missing project keys')
queryset = Project.objects.filter(key_set__isnull=True)
for project in queryset:
try:
ProjectKey.objects.get_or_create(
project=project,
)
except ProjectKey.MultipleObjectsReturned:
pass
from django.db import connection
click.echo("Correcting Group.num_comments counter")
cursor = connection.cursor()
cursor.execute("""
UPDATE sentry_groupedmessage SET num_comments = (
SELECT COUNT(*) from sentry_activity
WHERE type = %s and group_id = sentry_groupedmessage.id
)
""", [Activity.NOTE])
|
"""
sentry.runner.commands.repair
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import os
import click
from sentry.runner.decorators import configuration
@click.command()
@configuration
def repair():
"Attempt to repair any invalid data."
click.echo('Forcing documentation sync')
from sentry.utils.integrationdocs import sync_docs, DOC_FOLDER
if os.access(DOC_FOLDER, os.W_OK):
sync_docs()
else:
click.echo(' - skipping (path cannot be written to)')
from sentry.models import Activity, Project, ProjectKey
click.echo('Creating missing project keys')
queryset = Project.objects.filter(key_set__isnull=True)
for project in queryset:
try:
ProjectKey.objects.get_or_create(
project=project,
)
except ProjectKey.MultipleObjectsReturned:
pass
from django.db import connection
click.echo("Correcting Group.num_comments counter")
cursor = connection.cursor()
cursor.execute("""
UPDATE sentry_groupedmessage SET num_comments = (
SELECT COUNT(*) from sentry_activity
WHERE type = %s and group_id = sentry_groupedmessage.id
)
""", [Activity.NOTE])
Clarify error condition when failing to sync docs"""
sentry.runner.commands.repair
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import os
import click
from sentry.runner.decorators import configuration
@click.command()
@configuration
def repair():
"Attempt to repair any invalid data."
click.echo('Forcing documentation sync')
from sentry.utils.integrationdocs import sync_docs, DOC_FOLDER
if os.access(DOC_FOLDER, os.W_OK):
sync_docs()
elif os.path.isdir(DOC_FOLDER):
click.echo(' - skipping, path cannot be written to: %r' % DOC_FOLDER)
else:
click.echo(' - skipping, path does not exist: %r' % DOC_FOLDER)
from sentry.models import Activity, Project, ProjectKey
click.echo('Creating missing project keys')
queryset = Project.objects.filter(key_set__isnull=True)
for project in queryset:
try:
ProjectKey.objects.get_or_create(
project=project,
)
except ProjectKey.MultipleObjectsReturned:
pass
from django.db import connection
click.echo("Correcting Group.num_comments counter")
cursor = connection.cursor()
cursor.execute("""
UPDATE sentry_groupedmessage SET num_comments = (
SELECT COUNT(*) from sentry_activity
WHERE type = %s and group_id = sentry_groupedmessage.id
)
""", [Activity.NOTE])
|
<commit_before>"""
sentry.runner.commands.repair
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import os
import click
from sentry.runner.decorators import configuration
@click.command()
@configuration
def repair():
"Attempt to repair any invalid data."
click.echo('Forcing documentation sync')
from sentry.utils.integrationdocs import sync_docs, DOC_FOLDER
if os.access(DOC_FOLDER, os.W_OK):
sync_docs()
else:
click.echo(' - skipping (path cannot be written to)')
from sentry.models import Activity, Project, ProjectKey
click.echo('Creating missing project keys')
queryset = Project.objects.filter(key_set__isnull=True)
for project in queryset:
try:
ProjectKey.objects.get_or_create(
project=project,
)
except ProjectKey.MultipleObjectsReturned:
pass
from django.db import connection
click.echo("Correcting Group.num_comments counter")
cursor = connection.cursor()
cursor.execute("""
UPDATE sentry_groupedmessage SET num_comments = (
SELECT COUNT(*) from sentry_activity
WHERE type = %s and group_id = sentry_groupedmessage.id
)
""", [Activity.NOTE])
<commit_msg>Clarify error condition when failing to sync docs<commit_after>"""
sentry.runner.commands.repair
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import os
import click
from sentry.runner.decorators import configuration
@click.command()
@configuration
def repair():
"Attempt to repair any invalid data."
click.echo('Forcing documentation sync')
from sentry.utils.integrationdocs import sync_docs, DOC_FOLDER
if os.access(DOC_FOLDER, os.W_OK):
sync_docs()
elif os.path.isdir(DOC_FOLDER):
click.echo(' - skipping, path cannot be written to: %r' % DOC_FOLDER)
else:
click.echo(' - skipping, path does not exist: %r' % DOC_FOLDER)
from sentry.models import Activity, Project, ProjectKey
click.echo('Creating missing project keys')
queryset = Project.objects.filter(key_set__isnull=True)
for project in queryset:
try:
ProjectKey.objects.get_or_create(
project=project,
)
except ProjectKey.MultipleObjectsReturned:
pass
from django.db import connection
click.echo("Correcting Group.num_comments counter")
cursor = connection.cursor()
cursor.execute("""
UPDATE sentry_groupedmessage SET num_comments = (
SELECT COUNT(*) from sentry_activity
WHERE type = %s and group_id = sentry_groupedmessage.id
)
""", [Activity.NOTE])
|
6fec4a7e5f594ab34e2d7edcefa1695325669cff
|
stingray/tests/test_crossspectrum.py
|
stingray/tests/test_crossspectrum.py
|
from __future__ import division
import numpy as np
from nose.tools import raises
from stingray import Lightcurve
from stingray import Crossspectrum, AveragedCrossspectrum
np.random.seed(20160528)
class TestCrossspectrum(object):
def setUp(self):
tstart = 0.0
tend = 1.0
dt = 0.0001
time = np.linspace(tstart, tend, int((tend - tstart)/dt))
counts1 = np.random.poisson(0.01, size=time.shape[0])
counts2 = np.random.negative_binomial(1, 0.09, size=time.shape[0])
self.lc1 = Lightcurve(time, counts1)
self.lc2 = Lightcurve(time, counts2)
self.cs = Crossspectrum(self.lc1, self.lc2)
def test_make_empty_crossspectrum(self):
cs = Crossspectrum()
assert cs.freq is None
assert cs.cs is None
assert cs.df is None
assert cs.nphots1 is None
assert cs.nphots2 is None
assert cs.m == 1
assert cs.n is None
|
Initialize class for testing Crossspectrum
|
Initialize class for testing Crossspectrum
|
Python
|
mit
|
pabell/stingray,StingraySoftware/stingray,abigailStev/stingray,evandromr/stingray
|
Initialize class for testing Crossspectrum
|
from __future__ import division
import numpy as np
from nose.tools import raises
from stingray import Lightcurve
from stingray import Crossspectrum, AveragedCrossspectrum
np.random.seed(20160528)
class TestCrossspectrum(object):
def setUp(self):
tstart = 0.0
tend = 1.0
dt = 0.0001
time = np.linspace(tstart, tend, int((tend - tstart)/dt))
counts1 = np.random.poisson(0.01, size=time.shape[0])
counts2 = np.random.negative_binomial(1, 0.09, size=time.shape[0])
self.lc1 = Lightcurve(time, counts1)
self.lc2 = Lightcurve(time, counts2)
self.cs = Crossspectrum(self.lc1, self.lc2)
def test_make_empty_crossspectrum(self):
cs = Crossspectrum()
assert cs.freq is None
assert cs.cs is None
assert cs.df is None
assert cs.nphots1 is None
assert cs.nphots2 is None
assert cs.m == 1
assert cs.n is None
|
<commit_before><commit_msg>Initialize class for testing Crossspectrum<commit_after>
|
from __future__ import division
import numpy as np
from nose.tools import raises
from stingray import Lightcurve
from stingray import Crossspectrum, AveragedCrossspectrum
np.random.seed(20160528)
class TestCrossspectrum(object):
def setUp(self):
tstart = 0.0
tend = 1.0
dt = 0.0001
time = np.linspace(tstart, tend, int((tend - tstart)/dt))
counts1 = np.random.poisson(0.01, size=time.shape[0])
counts2 = np.random.negative_binomial(1, 0.09, size=time.shape[0])
self.lc1 = Lightcurve(time, counts1)
self.lc2 = Lightcurve(time, counts2)
self.cs = Crossspectrum(self.lc1, self.lc2)
def test_make_empty_crossspectrum(self):
cs = Crossspectrum()
assert cs.freq is None
assert cs.cs is None
assert cs.df is None
assert cs.nphots1 is None
assert cs.nphots2 is None
assert cs.m == 1
assert cs.n is None
|
Initialize class for testing Crossspectrumfrom __future__ import division
import numpy as np
from nose.tools import raises
from stingray import Lightcurve
from stingray import Crossspectrum, AveragedCrossspectrum
np.random.seed(20160528)
class TestCrossspectrum(object):
def setUp(self):
tstart = 0.0
tend = 1.0
dt = 0.0001
time = np.linspace(tstart, tend, int((tend - tstart)/dt))
counts1 = np.random.poisson(0.01, size=time.shape[0])
counts2 = np.random.negative_binomial(1, 0.09, size=time.shape[0])
self.lc1 = Lightcurve(time, counts1)
self.lc2 = Lightcurve(time, counts2)
self.cs = Crossspectrum(self.lc1, self.lc2)
def test_make_empty_crossspectrum(self):
cs = Crossspectrum()
assert cs.freq is None
assert cs.cs is None
assert cs.df is None
assert cs.nphots1 is None
assert cs.nphots2 is None
assert cs.m == 1
assert cs.n is None
|
<commit_before><commit_msg>Initialize class for testing Crossspectrum<commit_after>from __future__ import division
import numpy as np
from nose.tools import raises
from stingray import Lightcurve
from stingray import Crossspectrum, AveragedCrossspectrum
np.random.seed(20160528)
class TestCrossspectrum(object):
def setUp(self):
tstart = 0.0
tend = 1.0
dt = 0.0001
time = np.linspace(tstart, tend, int((tend - tstart)/dt))
counts1 = np.random.poisson(0.01, size=time.shape[0])
counts2 = np.random.negative_binomial(1, 0.09, size=time.shape[0])
self.lc1 = Lightcurve(time, counts1)
self.lc2 = Lightcurve(time, counts2)
self.cs = Crossspectrum(self.lc1, self.lc2)
def test_make_empty_crossspectrum(self):
cs = Crossspectrum()
assert cs.freq is None
assert cs.cs is None
assert cs.df is None
assert cs.nphots1 is None
assert cs.nphots2 is None
assert cs.m == 1
assert cs.n is None
|
|
f7c0f0a38f2632c8adf4e987cf330012d0a9c502
|
ureport/polls/migrations/0046_add_index_on_pollresults_contact.py
|
ureport/polls/migrations/0046_add_index_on_pollresults_contact.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
#language=SQL
INDEX_SQL = """
CREATE INDEX polls_pollresult_contact
ON polls_pollresult (contact);
"""
class Migration(migrations.Migration):
dependencies = [
('polls', '0045_fix_has_synced_field'),
]
operations = [
migrations.RunSQL(INDEX_SQL)
]
|
Add index on polls results contact field
|
Add index on polls results contact field
|
Python
|
agpl-3.0
|
xkmato/ureport,xkmato/ureport,rapidpro/ureport,Ilhasoft/ureport,xkmato/ureport,rapidpro/ureport,Ilhasoft/ureport,Ilhasoft/ureport,rapidpro/ureport,rapidpro/ureport,Ilhasoft/ureport
|
Add index on polls results contact field
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
#language=SQL
INDEX_SQL = """
CREATE INDEX polls_pollresult_contact
ON polls_pollresult (contact);
"""
class Migration(migrations.Migration):
dependencies = [
('polls', '0045_fix_has_synced_field'),
]
operations = [
migrations.RunSQL(INDEX_SQL)
]
|
<commit_before><commit_msg>Add index on polls results contact field<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
#language=SQL
INDEX_SQL = """
CREATE INDEX polls_pollresult_contact
ON polls_pollresult (contact);
"""
class Migration(migrations.Migration):
dependencies = [
('polls', '0045_fix_has_synced_field'),
]
operations = [
migrations.RunSQL(INDEX_SQL)
]
|
Add index on polls results contact field# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
#language=SQL
INDEX_SQL = """
CREATE INDEX polls_pollresult_contact
ON polls_pollresult (contact);
"""
class Migration(migrations.Migration):
dependencies = [
('polls', '0045_fix_has_synced_field'),
]
operations = [
migrations.RunSQL(INDEX_SQL)
]
|
<commit_before><commit_msg>Add index on polls results contact field<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
#language=SQL
INDEX_SQL = """
CREATE INDEX polls_pollresult_contact
ON polls_pollresult (contact);
"""
class Migration(migrations.Migration):
dependencies = [
('polls', '0045_fix_has_synced_field'),
]
operations = [
migrations.RunSQL(INDEX_SQL)
]
|
|
ed52a74fa6c60fec164badc8217e9e89bff40e70
|
test/test_metropolis_hasting_mcmc.py
|
test/test_metropolis_hasting_mcmc.py
|
#!/usr/bin/env python3
#
# Tests the basic methods of the metropolis hasting MCMC routine.
#
# This file is part of PINTS.
# Copyright (c) 2017-2018, University of Oxford.
# For licensing information, see the LICENSE file distributed with the PINTS
# software package.
#
import pints
import pints.toy as toy
import unittest
import numpy as np
debug = False
class TestMetropolisHastingMCMC(unittest.TestCase):
"""
Tests the basic methods of the metropolis hasting MCMC routine.
"""
def __init__(self, name):
super(TestMetropolisHastingMCMC, self).__init__(name)
# Create toy model
self.model = toy.LogisticModel()
self.real_parameters = [0.015, 500]
self.times = np.linspace(0, 1000, 1000)
self.values = self.model.simulate(self.real_parameters, self.times)
# Add noise
self.noise = 10
self.values += np.random.normal(0, self.noise, self.values.shape)
self.real_parameters.append(self.noise)
self.real_parameters = np.array(self.real_parameters)
# Create an object with links to the model and time series
self.problem = pints.SingleSeriesProblem(
self.model, self.times, self.values)
# Create a uniform prior over both the parameters and the new noise
# variable
self.log_prior = pints.UniformLogPrior(
[0.01, 400, self.noise * 0.1],
[0.02, 600, self.noise * 100]
)
# Create a log likelihood
self.log_likelihood = pints.UnknownNoiseLogLikelihood(self.problem)
# Create an un-normalised log-posterior (log-likelihood + log-prior)
self.log_posterior = pints.LogPosterior(
self.log_likelihood, self.log_prior)
def test_method(self):
# Create mcmc
x0 = self.real_parameters * 1.1
mcmc = pints.MetropolisHastingMCMC(x0)
# Perform short run
rate = []
chain = []
for i in range(100):
x = mcmc.ask()
fx = self.log_posterior(x)
sample = mcmc.tell(fx)
if i >= 50:
chain.append(sample)
rate.append(mcmc.acceptance_rate())
chain = np.array(chain)
rate = np.array(rate)
self.assertEqual(chain.shape[0], 50)
self.assertEqual(chain.shape[1], len(x0))
self.assertEqual(rate.shape[0], 100)
if __name__ == '__main__':
print('Add -v for more debug output')
import sys
if '-v' in sys.argv:
debug = True
unittest.main()
|
Add basic unit test for MH
|
Add basic unit test for MH
|
Python
|
bsd-3-clause
|
martinjrobins/hobo,martinjrobins/hobo,martinjrobins/hobo,martinjrobins/hobo
|
Add basic unit test for MH
|
#!/usr/bin/env python3
#
# Tests the basic methods of the metropolis hasting MCMC routine.
#
# This file is part of PINTS.
# Copyright (c) 2017-2018, University of Oxford.
# For licensing information, see the LICENSE file distributed with the PINTS
# software package.
#
import pints
import pints.toy as toy
import unittest
import numpy as np
debug = False
class TestMetropolisHastingMCMC(unittest.TestCase):
"""
Tests the basic methods of the metropolis hasting MCMC routine.
"""
def __init__(self, name):
super(TestMetropolisHastingMCMC, self).__init__(name)
# Create toy model
self.model = toy.LogisticModel()
self.real_parameters = [0.015, 500]
self.times = np.linspace(0, 1000, 1000)
self.values = self.model.simulate(self.real_parameters, self.times)
# Add noise
self.noise = 10
self.values += np.random.normal(0, self.noise, self.values.shape)
self.real_parameters.append(self.noise)
self.real_parameters = np.array(self.real_parameters)
# Create an object with links to the model and time series
self.problem = pints.SingleSeriesProblem(
self.model, self.times, self.values)
# Create a uniform prior over both the parameters and the new noise
# variable
self.log_prior = pints.UniformLogPrior(
[0.01, 400, self.noise * 0.1],
[0.02, 600, self.noise * 100]
)
# Create a log likelihood
self.log_likelihood = pints.UnknownNoiseLogLikelihood(self.problem)
# Create an un-normalised log-posterior (log-likelihood + log-prior)
self.log_posterior = pints.LogPosterior(
self.log_likelihood, self.log_prior)
def test_method(self):
# Create mcmc
x0 = self.real_parameters * 1.1
mcmc = pints.MetropolisHastingMCMC(x0)
# Perform short run
rate = []
chain = []
for i in range(100):
x = mcmc.ask()
fx = self.log_posterior(x)
sample = mcmc.tell(fx)
if i >= 50:
chain.append(sample)
rate.append(mcmc.acceptance_rate())
chain = np.array(chain)
rate = np.array(rate)
self.assertEqual(chain.shape[0], 50)
self.assertEqual(chain.shape[1], len(x0))
self.assertEqual(rate.shape[0], 100)
if __name__ == '__main__':
print('Add -v for more debug output')
import sys
if '-v' in sys.argv:
debug = True
unittest.main()
|
<commit_before><commit_msg>Add basic unit test for MH<commit_after>
|
#!/usr/bin/env python3
#
# Tests the basic methods of the metropolis hasting MCMC routine.
#
# This file is part of PINTS.
# Copyright (c) 2017-2018, University of Oxford.
# For licensing information, see the LICENSE file distributed with the PINTS
# software package.
#
import pints
import pints.toy as toy
import unittest
import numpy as np
debug = False
class TestMetropolisHastingMCMC(unittest.TestCase):
"""
Tests the basic methods of the metropolis hasting MCMC routine.
"""
def __init__(self, name):
super(TestMetropolisHastingMCMC, self).__init__(name)
# Create toy model
self.model = toy.LogisticModel()
self.real_parameters = [0.015, 500]
self.times = np.linspace(0, 1000, 1000)
self.values = self.model.simulate(self.real_parameters, self.times)
# Add noise
self.noise = 10
self.values += np.random.normal(0, self.noise, self.values.shape)
self.real_parameters.append(self.noise)
self.real_parameters = np.array(self.real_parameters)
# Create an object with links to the model and time series
self.problem = pints.SingleSeriesProblem(
self.model, self.times, self.values)
# Create a uniform prior over both the parameters and the new noise
# variable
self.log_prior = pints.UniformLogPrior(
[0.01, 400, self.noise * 0.1],
[0.02, 600, self.noise * 100]
)
# Create a log likelihood
self.log_likelihood = pints.UnknownNoiseLogLikelihood(self.problem)
# Create an un-normalised log-posterior (log-likelihood + log-prior)
self.log_posterior = pints.LogPosterior(
self.log_likelihood, self.log_prior)
def test_method(self):
# Create mcmc
x0 = self.real_parameters * 1.1
mcmc = pints.MetropolisHastingMCMC(x0)
# Perform short run
rate = []
chain = []
for i in range(100):
x = mcmc.ask()
fx = self.log_posterior(x)
sample = mcmc.tell(fx)
if i >= 50:
chain.append(sample)
rate.append(mcmc.acceptance_rate())
chain = np.array(chain)
rate = np.array(rate)
self.assertEqual(chain.shape[0], 50)
self.assertEqual(chain.shape[1], len(x0))
self.assertEqual(rate.shape[0], 100)
if __name__ == '__main__':
print('Add -v for more debug output')
import sys
if '-v' in sys.argv:
debug = True
unittest.main()
|
Add basic unit test for MH#!/usr/bin/env python3
#
# Tests the basic methods of the metropolis hasting MCMC routine.
#
# This file is part of PINTS.
# Copyright (c) 2017-2018, University of Oxford.
# For licensing information, see the LICENSE file distributed with the PINTS
# software package.
#
import pints
import pints.toy as toy
import unittest
import numpy as np
debug = False
class TestMetropolisHastingMCMC(unittest.TestCase):
"""
Tests the basic methods of the metropolis hasting MCMC routine.
"""
def __init__(self, name):
super(TestMetropolisHastingMCMC, self).__init__(name)
# Create toy model
self.model = toy.LogisticModel()
self.real_parameters = [0.015, 500]
self.times = np.linspace(0, 1000, 1000)
self.values = self.model.simulate(self.real_parameters, self.times)
# Add noise
self.noise = 10
self.values += np.random.normal(0, self.noise, self.values.shape)
self.real_parameters.append(self.noise)
self.real_parameters = np.array(self.real_parameters)
# Create an object with links to the model and time series
self.problem = pints.SingleSeriesProblem(
self.model, self.times, self.values)
# Create a uniform prior over both the parameters and the new noise
# variable
self.log_prior = pints.UniformLogPrior(
[0.01, 400, self.noise * 0.1],
[0.02, 600, self.noise * 100]
)
# Create a log likelihood
self.log_likelihood = pints.UnknownNoiseLogLikelihood(self.problem)
# Create an un-normalised log-posterior (log-likelihood + log-prior)
self.log_posterior = pints.LogPosterior(
self.log_likelihood, self.log_prior)
def test_method(self):
# Create mcmc
x0 = self.real_parameters * 1.1
mcmc = pints.MetropolisHastingMCMC(x0)
# Perform short run
rate = []
chain = []
for i in range(100):
x = mcmc.ask()
fx = self.log_posterior(x)
sample = mcmc.tell(fx)
if i >= 50:
chain.append(sample)
rate.append(mcmc.acceptance_rate())
chain = np.array(chain)
rate = np.array(rate)
self.assertEqual(chain.shape[0], 50)
self.assertEqual(chain.shape[1], len(x0))
self.assertEqual(rate.shape[0], 100)
if __name__ == '__main__':
print('Add -v for more debug output')
import sys
if '-v' in sys.argv:
debug = True
unittest.main()
|
<commit_before><commit_msg>Add basic unit test for MH<commit_after>#!/usr/bin/env python3
#
# Tests the basic methods of the metropolis hasting MCMC routine.
#
# This file is part of PINTS.
# Copyright (c) 2017-2018, University of Oxford.
# For licensing information, see the LICENSE file distributed with the PINTS
# software package.
#
import pints
import pints.toy as toy
import unittest
import numpy as np
debug = False
class TestMetropolisHastingMCMC(unittest.TestCase):
"""
Tests the basic methods of the metropolis hasting MCMC routine.
"""
def __init__(self, name):
super(TestMetropolisHastingMCMC, self).__init__(name)
# Create toy model
self.model = toy.LogisticModel()
self.real_parameters = [0.015, 500]
self.times = np.linspace(0, 1000, 1000)
self.values = self.model.simulate(self.real_parameters, self.times)
# Add noise
self.noise = 10
self.values += np.random.normal(0, self.noise, self.values.shape)
self.real_parameters.append(self.noise)
self.real_parameters = np.array(self.real_parameters)
# Create an object with links to the model and time series
self.problem = pints.SingleSeriesProblem(
self.model, self.times, self.values)
# Create a uniform prior over both the parameters and the new noise
# variable
self.log_prior = pints.UniformLogPrior(
[0.01, 400, self.noise * 0.1],
[0.02, 600, self.noise * 100]
)
# Create a log likelihood
self.log_likelihood = pints.UnknownNoiseLogLikelihood(self.problem)
# Create an un-normalised log-posterior (log-likelihood + log-prior)
self.log_posterior = pints.LogPosterior(
self.log_likelihood, self.log_prior)
def test_method(self):
# Create mcmc
x0 = self.real_parameters * 1.1
mcmc = pints.MetropolisHastingMCMC(x0)
# Perform short run
rate = []
chain = []
for i in range(100):
x = mcmc.ask()
fx = self.log_posterior(x)
sample = mcmc.tell(fx)
if i >= 50:
chain.append(sample)
rate.append(mcmc.acceptance_rate())
chain = np.array(chain)
rate = np.array(rate)
self.assertEqual(chain.shape[0], 50)
self.assertEqual(chain.shape[1], len(x0))
self.assertEqual(rate.shape[0], 100)
if __name__ == '__main__':
print('Add -v for more debug output')
import sys
if '-v' in sys.argv:
debug = True
unittest.main()
|
|
461097d063e9e174cfbfcbc86e8add91414966bd
|
tests/test_03_multiple_dependency.py
|
tests/test_03_multiple_dependency.py
|
"""A complicated scenario with tests having multiple dependencies.
"""
import pytest
pytest_plugins = "pytester"
def test_multiple(testdir):
testdir.makepyfile("""
import pytest
pytest_plugins = "pytest_dependency"
@pytest.mark.dependency(name="a")
def test_a():
pytest.skip("explicit skip")
@pytest.mark.dependency(name="b")
def test_b():
assert False
@pytest.mark.dependency(name="c")
def test_c():
pass
@pytest.mark.dependency(name="d")
def test_d():
pass
@pytest.mark.dependency(name="e")
def test_e():
pass
@pytest.mark.dependency(name="f", depends=["a", "c"])
def test_f():
pass
@pytest.mark.dependency(name="g", depends=["b", "d"])
def test_g():
pass
@pytest.mark.dependency(name="h", depends=["c", "e"])
def test_h():
pass
@pytest.mark.dependency(name="i", depends=["f", "h"])
def test_i():
pass
@pytest.mark.dependency(name="j", depends=["d", "h"])
def test_j():
pass
@pytest.mark.dependency(name="k", depends=["g", "i", "j"])
def test_k():
pass
""")
result = testdir.runpytest("--verbose")
result.assert_outcomes(passed=5, skipped=5, failed=1)
result.stdout.fnmatch_lines("""
*::test_a SKIPPED
*::test_b FAILED
*::test_c PASSED
*::test_d PASSED
*::test_e PASSED
*::test_f SKIPPED
*::test_g SKIPPED
*::test_h PASSED
*::test_i SKIPPED
*::test_j PASSED
*::test_k SKIPPED
""")
|
Add a test with multiple dependencies.
|
Add a test with multiple dependencies.
|
Python
|
apache-2.0
|
RKrahl/pytest-dependency
|
Add a test with multiple dependencies.
|
"""A complicated scenario with tests having multiple dependencies.
"""
import pytest
pytest_plugins = "pytester"
def test_multiple(testdir):
testdir.makepyfile("""
import pytest
pytest_plugins = "pytest_dependency"
@pytest.mark.dependency(name="a")
def test_a():
pytest.skip("explicit skip")
@pytest.mark.dependency(name="b")
def test_b():
assert False
@pytest.mark.dependency(name="c")
def test_c():
pass
@pytest.mark.dependency(name="d")
def test_d():
pass
@pytest.mark.dependency(name="e")
def test_e():
pass
@pytest.mark.dependency(name="f", depends=["a", "c"])
def test_f():
pass
@pytest.mark.dependency(name="g", depends=["b", "d"])
def test_g():
pass
@pytest.mark.dependency(name="h", depends=["c", "e"])
def test_h():
pass
@pytest.mark.dependency(name="i", depends=["f", "h"])
def test_i():
pass
@pytest.mark.dependency(name="j", depends=["d", "h"])
def test_j():
pass
@pytest.mark.dependency(name="k", depends=["g", "i", "j"])
def test_k():
pass
""")
result = testdir.runpytest("--verbose")
result.assert_outcomes(passed=5, skipped=5, failed=1)
result.stdout.fnmatch_lines("""
*::test_a SKIPPED
*::test_b FAILED
*::test_c PASSED
*::test_d PASSED
*::test_e PASSED
*::test_f SKIPPED
*::test_g SKIPPED
*::test_h PASSED
*::test_i SKIPPED
*::test_j PASSED
*::test_k SKIPPED
""")
|
<commit_before><commit_msg>Add a test with multiple dependencies.<commit_after>
|
"""A complicated scenario with tests having multiple dependencies.
"""
import pytest
pytest_plugins = "pytester"
def test_multiple(testdir):
testdir.makepyfile("""
import pytest
pytest_plugins = "pytest_dependency"
@pytest.mark.dependency(name="a")
def test_a():
pytest.skip("explicit skip")
@pytest.mark.dependency(name="b")
def test_b():
assert False
@pytest.mark.dependency(name="c")
def test_c():
pass
@pytest.mark.dependency(name="d")
def test_d():
pass
@pytest.mark.dependency(name="e")
def test_e():
pass
@pytest.mark.dependency(name="f", depends=["a", "c"])
def test_f():
pass
@pytest.mark.dependency(name="g", depends=["b", "d"])
def test_g():
pass
@pytest.mark.dependency(name="h", depends=["c", "e"])
def test_h():
pass
@pytest.mark.dependency(name="i", depends=["f", "h"])
def test_i():
pass
@pytest.mark.dependency(name="j", depends=["d", "h"])
def test_j():
pass
@pytest.mark.dependency(name="k", depends=["g", "i", "j"])
def test_k():
pass
""")
result = testdir.runpytest("--verbose")
result.assert_outcomes(passed=5, skipped=5, failed=1)
result.stdout.fnmatch_lines("""
*::test_a SKIPPED
*::test_b FAILED
*::test_c PASSED
*::test_d PASSED
*::test_e PASSED
*::test_f SKIPPED
*::test_g SKIPPED
*::test_h PASSED
*::test_i SKIPPED
*::test_j PASSED
*::test_k SKIPPED
""")
|
Add a test with multiple dependencies."""A complicated scenario with tests having multiple dependencies.
"""
import pytest
pytest_plugins = "pytester"
def test_multiple(testdir):
testdir.makepyfile("""
import pytest
pytest_plugins = "pytest_dependency"
@pytest.mark.dependency(name="a")
def test_a():
pytest.skip("explicit skip")
@pytest.mark.dependency(name="b")
def test_b():
assert False
@pytest.mark.dependency(name="c")
def test_c():
pass
@pytest.mark.dependency(name="d")
def test_d():
pass
@pytest.mark.dependency(name="e")
def test_e():
pass
@pytest.mark.dependency(name="f", depends=["a", "c"])
def test_f():
pass
@pytest.mark.dependency(name="g", depends=["b", "d"])
def test_g():
pass
@pytest.mark.dependency(name="h", depends=["c", "e"])
def test_h():
pass
@pytest.mark.dependency(name="i", depends=["f", "h"])
def test_i():
pass
@pytest.mark.dependency(name="j", depends=["d", "h"])
def test_j():
pass
@pytest.mark.dependency(name="k", depends=["g", "i", "j"])
def test_k():
pass
""")
result = testdir.runpytest("--verbose")
result.assert_outcomes(passed=5, skipped=5, failed=1)
result.stdout.fnmatch_lines("""
*::test_a SKIPPED
*::test_b FAILED
*::test_c PASSED
*::test_d PASSED
*::test_e PASSED
*::test_f SKIPPED
*::test_g SKIPPED
*::test_h PASSED
*::test_i SKIPPED
*::test_j PASSED
*::test_k SKIPPED
""")
|
<commit_before><commit_msg>Add a test with multiple dependencies.<commit_after>"""A complicated scenario with tests having multiple dependencies.
"""
import pytest
pytest_plugins = "pytester"
def test_multiple(testdir):
testdir.makepyfile("""
import pytest
pytest_plugins = "pytest_dependency"
@pytest.mark.dependency(name="a")
def test_a():
pytest.skip("explicit skip")
@pytest.mark.dependency(name="b")
def test_b():
assert False
@pytest.mark.dependency(name="c")
def test_c():
pass
@pytest.mark.dependency(name="d")
def test_d():
pass
@pytest.mark.dependency(name="e")
def test_e():
pass
@pytest.mark.dependency(name="f", depends=["a", "c"])
def test_f():
pass
@pytest.mark.dependency(name="g", depends=["b", "d"])
def test_g():
pass
@pytest.mark.dependency(name="h", depends=["c", "e"])
def test_h():
pass
@pytest.mark.dependency(name="i", depends=["f", "h"])
def test_i():
pass
@pytest.mark.dependency(name="j", depends=["d", "h"])
def test_j():
pass
@pytest.mark.dependency(name="k", depends=["g", "i", "j"])
def test_k():
pass
""")
result = testdir.runpytest("--verbose")
result.assert_outcomes(passed=5, skipped=5, failed=1)
result.stdout.fnmatch_lines("""
*::test_a SKIPPED
*::test_b FAILED
*::test_c PASSED
*::test_d PASSED
*::test_e PASSED
*::test_f SKIPPED
*::test_g SKIPPED
*::test_h PASSED
*::test_i SKIPPED
*::test_j PASSED
*::test_k SKIPPED
""")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.