commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2fbeeed52251c7b005e826122d731bfa25d1e41a
|
tweets/migrations/0001_initial.py
|
tweets/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-11 15:02
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Tweet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('text', models.CharField(max_length=140)),
('tweeted', models.BooleanField(default=False)),
],
options={
'ordering': ('-created',),
},
),
]
|
Add initial migration for tweets app
|
Add initial migration for tweets app
|
Python
|
mit
|
gwhigs/tweeter,gwhigs/tweeter,gwhigs/tweeter,gwhigs/tweeter
|
Add initial migration for tweets app
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-11 15:02
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Tweet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('text', models.CharField(max_length=140)),
('tweeted', models.BooleanField(default=False)),
],
options={
'ordering': ('-created',),
},
),
]
|
<commit_before><commit_msg>Add initial migration for tweets app<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-11 15:02
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Tweet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('text', models.CharField(max_length=140)),
('tweeted', models.BooleanField(default=False)),
],
options={
'ordering': ('-created',),
},
),
]
|
Add initial migration for tweets app# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-11 15:02
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Tweet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('text', models.CharField(max_length=140)),
('tweeted', models.BooleanField(default=False)),
],
options={
'ordering': ('-created',),
},
),
]
|
<commit_before><commit_msg>Add initial migration for tweets app<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-11 15:02
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Tweet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('text', models.CharField(max_length=140)),
('tweeted', models.BooleanField(default=False)),
],
options={
'ordering': ('-created',),
},
),
]
|
|
8c317b34f4897f497c9f31255c706ec886b6cf23
|
bluebottle/payouts/migrations/0020_auto_20190123_1731.py
|
bluebottle/payouts/migrations/0020_auto_20190123_1731.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-01-23 16:31
from __future__ import unicode_literals
from django.db import migrations
def set_stripe_document_default(apps, schema_editor):
StripePayoutAccount = apps.get_model('payouts', 'StripePayoutAccount')
StripePayoutAccount.objects.update(document_type='passport')
class Migration(migrations.Migration):
dependencies = [
('payouts', '0019_auto_20190123_1216'),
]
operations = [
migrations.RunPython(set_stripe_document_default, migrations.RunPython.noop)
]
|
Set Stripe document type to 'passort' for old projects.
|
Set Stripe document type to 'passort' for old projects.
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Set Stripe document type to 'passort' for old projects.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-01-23 16:31
from __future__ import unicode_literals
from django.db import migrations
def set_stripe_document_default(apps, schema_editor):
StripePayoutAccount = apps.get_model('payouts', 'StripePayoutAccount')
StripePayoutAccount.objects.update(document_type='passport')
class Migration(migrations.Migration):
dependencies = [
('payouts', '0019_auto_20190123_1216'),
]
operations = [
migrations.RunPython(set_stripe_document_default, migrations.RunPython.noop)
]
|
<commit_before><commit_msg>Set Stripe document type to 'passort' for old projects.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-01-23 16:31
from __future__ import unicode_literals
from django.db import migrations
def set_stripe_document_default(apps, schema_editor):
StripePayoutAccount = apps.get_model('payouts', 'StripePayoutAccount')
StripePayoutAccount.objects.update(document_type='passport')
class Migration(migrations.Migration):
dependencies = [
('payouts', '0019_auto_20190123_1216'),
]
operations = [
migrations.RunPython(set_stripe_document_default, migrations.RunPython.noop)
]
|
Set Stripe document type to 'passort' for old projects.# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-01-23 16:31
from __future__ import unicode_literals
from django.db import migrations
def set_stripe_document_default(apps, schema_editor):
StripePayoutAccount = apps.get_model('payouts', 'StripePayoutAccount')
StripePayoutAccount.objects.update(document_type='passport')
class Migration(migrations.Migration):
dependencies = [
('payouts', '0019_auto_20190123_1216'),
]
operations = [
migrations.RunPython(set_stripe_document_default, migrations.RunPython.noop)
]
|
<commit_before><commit_msg>Set Stripe document type to 'passort' for old projects.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-01-23 16:31
from __future__ import unicode_literals
from django.db import migrations
def set_stripe_document_default(apps, schema_editor):
StripePayoutAccount = apps.get_model('payouts', 'StripePayoutAccount')
StripePayoutAccount.objects.update(document_type='passport')
class Migration(migrations.Migration):
dependencies = [
('payouts', '0019_auto_20190123_1216'),
]
operations = [
migrations.RunPython(set_stripe_document_default, migrations.RunPython.noop)
]
|
|
048fd80ee74e5ef4544e1a6bbd6639ae27f4d144
|
src/pybel/io/tsv/converters.py
|
src/pybel/io/tsv/converters.py
|
# -*- coding: utf-8 -*-
"""Warnings for old TSV conversion module."""
import warnings
from ..triples.converters import _safe_label
__all__ = [
'_safe_label',
]
warnings.warn(
'''Use pybel.io.triples module instead. Changes in PyBEL v0.15.0:
- pybel.io.tsv.converters._safe_label renamed to pybel.io.triples.converters._safe_label
Will be removed in PyBEL v0.16.*
''',
DeprecationWarning,
)
|
Add additional backwards-compatibility for pybel.io.tsv/pybel.io.triples
|
Add additional backwards-compatibility for pybel.io.tsv/pybel.io.triples
|
Python
|
mit
|
pybel/pybel,pybel/pybel,pybel/pybel
|
Add additional backwards-compatibility for pybel.io.tsv/pybel.io.triples
|
# -*- coding: utf-8 -*-
"""Warnings for old TSV conversion module."""
import warnings
from ..triples.converters import _safe_label
__all__ = [
'_safe_label',
]
warnings.warn(
'''Use pybel.io.triples module instead. Changes in PyBEL v0.15.0:
- pybel.io.tsv.converters._safe_label renamed to pybel.io.triples.converters._safe_label
Will be removed in PyBEL v0.16.*
''',
DeprecationWarning,
)
|
<commit_before><commit_msg>Add additional backwards-compatibility for pybel.io.tsv/pybel.io.triples<commit_after>
|
# -*- coding: utf-8 -*-
"""Warnings for old TSV conversion module."""
import warnings
from ..triples.converters import _safe_label
__all__ = [
'_safe_label',
]
warnings.warn(
'''Use pybel.io.triples module instead. Changes in PyBEL v0.15.0:
- pybel.io.tsv.converters._safe_label renamed to pybel.io.triples.converters._safe_label
Will be removed in PyBEL v0.16.*
''',
DeprecationWarning,
)
|
Add additional backwards-compatibility for pybel.io.tsv/pybel.io.triples# -*- coding: utf-8 -*-
"""Warnings for old TSV conversion module."""
import warnings
from ..triples.converters import _safe_label
__all__ = [
'_safe_label',
]
warnings.warn(
'''Use pybel.io.triples module instead. Changes in PyBEL v0.15.0:
- pybel.io.tsv.converters._safe_label renamed to pybel.io.triples.converters._safe_label
Will be removed in PyBEL v0.16.*
''',
DeprecationWarning,
)
|
<commit_before><commit_msg>Add additional backwards-compatibility for pybel.io.tsv/pybel.io.triples<commit_after># -*- coding: utf-8 -*-
"""Warnings for old TSV conversion module."""
import warnings
from ..triples.converters import _safe_label
__all__ = [
'_safe_label',
]
warnings.warn(
'''Use pybel.io.triples module instead. Changes in PyBEL v0.15.0:
- pybel.io.tsv.converters._safe_label renamed to pybel.io.triples.converters._safe_label
Will be removed in PyBEL v0.16.*
''',
DeprecationWarning,
)
|
|
aec424ec4b9d5ff482d7afe705cecbd463b4ddcd
|
convnets.py
|
convnets.py
|
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from sklearn.datasets import load_sample_image
from sklearn.datasets import load_sample_images
# Utility functions
def plot_image(image):
plt.imshow(image, cmap="gray", interpolation="nearest")
plt.axis("off")
def plot_color_image(image):
plt.imshow(image.astype(np.uint8),interpolation="nearest")
plt.axis("off")
# Load sample images
china = load_sample_image('china.jpg')
flower = load_sample_image('flower.jpg')
image = china[150:220, 130:250]
height, width, channels = image.shape
image_grayscale = image.mean(axis=2).astype(np.float32)
images = image_grayscale.reshape(1, height, width, 1)
dataset = np.array(load_sample_images().images, dtype=np.float32)
batchsize, height, width, channels = dataset.shape
# Create 2 filters
fmap = np.zeros(shape=(7, 7, channels, 2), dtype=np.float32)
fmap[:, 3, 0, 0] = 1
fmap[3, :, 0, 1] = 1
plot_image(fmap[:,:,0,0])
plt.show()
plot_image(fmap[:,:,0,1])
plt.show()
X = tf.placeholder(tf.float32, shape=(None, height, width, channels))
convolution = tf.nn.conv2d(X, fmap, strides=[1,2,2,1], padding='SAME')
with tf.Session() as sess:
output = sess.run(convolution, feed_dict={X: dataset})
plt.imshow(output[0,:,:,1])
plt.show()
|
Add code for simple convnet
|
Add code for simple convnet
Basic code to run a very very simple convnet
|
Python
|
mit
|
KT12/hands_on_machine_learning
|
Add code for simple convnet
Basic code to run a very very simple convnet
|
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from sklearn.datasets import load_sample_image
from sklearn.datasets import load_sample_images
# Utility functions
def plot_image(image):
plt.imshow(image, cmap="gray", interpolation="nearest")
plt.axis("off")
def plot_color_image(image):
plt.imshow(image.astype(np.uint8),interpolation="nearest")
plt.axis("off")
# Load sample images
china = load_sample_image('china.jpg')
flower = load_sample_image('flower.jpg')
image = china[150:220, 130:250]
height, width, channels = image.shape
image_grayscale = image.mean(axis=2).astype(np.float32)
images = image_grayscale.reshape(1, height, width, 1)
dataset = np.array(load_sample_images().images, dtype=np.float32)
batchsize, height, width, channels = dataset.shape
# Create 2 filters
fmap = np.zeros(shape=(7, 7, channels, 2), dtype=np.float32)
fmap[:, 3, 0, 0] = 1
fmap[3, :, 0, 1] = 1
plot_image(fmap[:,:,0,0])
plt.show()
plot_image(fmap[:,:,0,1])
plt.show()
X = tf.placeholder(tf.float32, shape=(None, height, width, channels))
convolution = tf.nn.conv2d(X, fmap, strides=[1,2,2,1], padding='SAME')
with tf.Session() as sess:
output = sess.run(convolution, feed_dict={X: dataset})
plt.imshow(output[0,:,:,1])
plt.show()
|
<commit_before><commit_msg>Add code for simple convnet
Basic code to run a very very simple convnet<commit_after>
|
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from sklearn.datasets import load_sample_image
from sklearn.datasets import load_sample_images
# Utility functions
def plot_image(image):
plt.imshow(image, cmap="gray", interpolation="nearest")
plt.axis("off")
def plot_color_image(image):
plt.imshow(image.astype(np.uint8),interpolation="nearest")
plt.axis("off")
# Load sample images
china = load_sample_image('china.jpg')
flower = load_sample_image('flower.jpg')
image = china[150:220, 130:250]
height, width, channels = image.shape
image_grayscale = image.mean(axis=2).astype(np.float32)
images = image_grayscale.reshape(1, height, width, 1)
dataset = np.array(load_sample_images().images, dtype=np.float32)
batchsize, height, width, channels = dataset.shape
# Create 2 filters
fmap = np.zeros(shape=(7, 7, channels, 2), dtype=np.float32)
fmap[:, 3, 0, 0] = 1
fmap[3, :, 0, 1] = 1
plot_image(fmap[:,:,0,0])
plt.show()
plot_image(fmap[:,:,0,1])
plt.show()
X = tf.placeholder(tf.float32, shape=(None, height, width, channels))
convolution = tf.nn.conv2d(X, fmap, strides=[1,2,2,1], padding='SAME')
with tf.Session() as sess:
output = sess.run(convolution, feed_dict={X: dataset})
plt.imshow(output[0,:,:,1])
plt.show()
|
Add code for simple convnet
Basic code to run a very very simple convnetimport numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from sklearn.datasets import load_sample_image
from sklearn.datasets import load_sample_images
# Utility functions
def plot_image(image):
plt.imshow(image, cmap="gray", interpolation="nearest")
plt.axis("off")
def plot_color_image(image):
plt.imshow(image.astype(np.uint8),interpolation="nearest")
plt.axis("off")
# Load sample images
china = load_sample_image('china.jpg')
flower = load_sample_image('flower.jpg')
image = china[150:220, 130:250]
height, width, channels = image.shape
image_grayscale = image.mean(axis=2).astype(np.float32)
images = image_grayscale.reshape(1, height, width, 1)
dataset = np.array(load_sample_images().images, dtype=np.float32)
batchsize, height, width, channels = dataset.shape
# Create 2 filters
fmap = np.zeros(shape=(7, 7, channels, 2), dtype=np.float32)
fmap[:, 3, 0, 0] = 1
fmap[3, :, 0, 1] = 1
plot_image(fmap[:,:,0,0])
plt.show()
plot_image(fmap[:,:,0,1])
plt.show()
X = tf.placeholder(tf.float32, shape=(None, height, width, channels))
convolution = tf.nn.conv2d(X, fmap, strides=[1,2,2,1], padding='SAME')
with tf.Session() as sess:
output = sess.run(convolution, feed_dict={X: dataset})
plt.imshow(output[0,:,:,1])
plt.show()
|
<commit_before><commit_msg>Add code for simple convnet
Basic code to run a very very simple convnet<commit_after>import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from sklearn.datasets import load_sample_image
from sklearn.datasets import load_sample_images
# Utility functions
def plot_image(image):
plt.imshow(image, cmap="gray", interpolation="nearest")
plt.axis("off")
def plot_color_image(image):
plt.imshow(image.astype(np.uint8),interpolation="nearest")
plt.axis("off")
# Load sample images
china = load_sample_image('china.jpg')
flower = load_sample_image('flower.jpg')
image = china[150:220, 130:250]
height, width, channels = image.shape
image_grayscale = image.mean(axis=2).astype(np.float32)
images = image_grayscale.reshape(1, height, width, 1)
dataset = np.array(load_sample_images().images, dtype=np.float32)
batchsize, height, width, channels = dataset.shape
# Create 2 filters
fmap = np.zeros(shape=(7, 7, channels, 2), dtype=np.float32)
fmap[:, 3, 0, 0] = 1
fmap[3, :, 0, 1] = 1
plot_image(fmap[:,:,0,0])
plt.show()
plot_image(fmap[:,:,0,1])
plt.show()
X = tf.placeholder(tf.float32, shape=(None, height, width, channels))
convolution = tf.nn.conv2d(X, fmap, strides=[1,2,2,1], padding='SAME')
with tf.Session() as sess:
output = sess.run(convolution, feed_dict={X: dataset})
plt.imshow(output[0,:,:,1])
plt.show()
|
|
8c55949339b2292f226bcbe1ea0b06a209fe85af
|
Cython/Compiler/Tests/TestTypes.py
|
Cython/Compiler/Tests/TestTypes.py
|
from __future__ import absolute_import
import unittest
import Cython.Compiler.PyrexTypes as PT
class TestMethodDispatcherTransform(unittest.TestCase):
def test_widest_numeric_type(self):
def assert_widest(type1, type2, widest):
self.assertEqual(widest, PT.widest_numeric_type(type1, type2))
assert_widest(PT.c_int_type, PT.c_long_type, PT.c_long_type)
assert_widest(PT.c_double_type, PT.c_long_type, PT.c_double_type)
assert_widest(PT.c_longdouble_type, PT.c_long_type, PT.c_longdouble_type)
cenum = PT.CEnumType("E", "cenum", typedef_flag=False)
assert_widest(PT.c_int_type, cenum, PT.c_int_type)
|
Add some basic tests for PyrexTypes.widest_numeric_type().
|
Add some basic tests for PyrexTypes.widest_numeric_type().
|
Python
|
apache-2.0
|
scoder/cython,scoder/cython,scoder/cython,cython/cython,cython/cython,da-woods/cython,da-woods/cython,da-woods/cython,da-woods/cython,cython/cython,cython/cython,scoder/cython
|
Add some basic tests for PyrexTypes.widest_numeric_type().
|
from __future__ import absolute_import
import unittest
import Cython.Compiler.PyrexTypes as PT
class TestMethodDispatcherTransform(unittest.TestCase):
def test_widest_numeric_type(self):
def assert_widest(type1, type2, widest):
self.assertEqual(widest, PT.widest_numeric_type(type1, type2))
assert_widest(PT.c_int_type, PT.c_long_type, PT.c_long_type)
assert_widest(PT.c_double_type, PT.c_long_type, PT.c_double_type)
assert_widest(PT.c_longdouble_type, PT.c_long_type, PT.c_longdouble_type)
cenum = PT.CEnumType("E", "cenum", typedef_flag=False)
assert_widest(PT.c_int_type, cenum, PT.c_int_type)
|
<commit_before><commit_msg>Add some basic tests for PyrexTypes.widest_numeric_type().<commit_after>
|
from __future__ import absolute_import
import unittest
import Cython.Compiler.PyrexTypes as PT
class TestMethodDispatcherTransform(unittest.TestCase):
def test_widest_numeric_type(self):
def assert_widest(type1, type2, widest):
self.assertEqual(widest, PT.widest_numeric_type(type1, type2))
assert_widest(PT.c_int_type, PT.c_long_type, PT.c_long_type)
assert_widest(PT.c_double_type, PT.c_long_type, PT.c_double_type)
assert_widest(PT.c_longdouble_type, PT.c_long_type, PT.c_longdouble_type)
cenum = PT.CEnumType("E", "cenum", typedef_flag=False)
assert_widest(PT.c_int_type, cenum, PT.c_int_type)
|
Add some basic tests for PyrexTypes.widest_numeric_type().from __future__ import absolute_import
import unittest
import Cython.Compiler.PyrexTypes as PT
class TestMethodDispatcherTransform(unittest.TestCase):
def test_widest_numeric_type(self):
def assert_widest(type1, type2, widest):
self.assertEqual(widest, PT.widest_numeric_type(type1, type2))
assert_widest(PT.c_int_type, PT.c_long_type, PT.c_long_type)
assert_widest(PT.c_double_type, PT.c_long_type, PT.c_double_type)
assert_widest(PT.c_longdouble_type, PT.c_long_type, PT.c_longdouble_type)
cenum = PT.CEnumType("E", "cenum", typedef_flag=False)
assert_widest(PT.c_int_type, cenum, PT.c_int_type)
|
<commit_before><commit_msg>Add some basic tests for PyrexTypes.widest_numeric_type().<commit_after>from __future__ import absolute_import
import unittest
import Cython.Compiler.PyrexTypes as PT
class TestMethodDispatcherTransform(unittest.TestCase):
def test_widest_numeric_type(self):
def assert_widest(type1, type2, widest):
self.assertEqual(widest, PT.widest_numeric_type(type1, type2))
assert_widest(PT.c_int_type, PT.c_long_type, PT.c_long_type)
assert_widest(PT.c_double_type, PT.c_long_type, PT.c_double_type)
assert_widest(PT.c_longdouble_type, PT.c_long_type, PT.c_longdouble_type)
cenum = PT.CEnumType("E", "cenum", typedef_flag=False)
assert_widest(PT.c_int_type, cenum, PT.c_int_type)
|
|
c5e57f0f428369706d94a6bf67fa1eeb597bc857
|
tests/test_daemon.py
|
tests/test_daemon.py
|
import inotify
import pytest
import os
from botbot import daemon
def test_func():
pass
def get_dbpath():
return os.path.join('.', 'test.db')
def test_daemon_constructor(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
assert d
prev.chdir()
def test_daemon_init_no_handlers(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init()
assert len(d.handle_hook) == 0
prev.chdir()
def test_daemon_init_with_handler(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init((test_func, 0))
assert len(d.handle_hook) == 1
prev.chdir()
def test_daemon_init_with_multiple_handlers(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init((test_func, 0), (test_func, 1))
assert len(d.handle_hook) == 2
prev.chdir()
def test_add_event_handler_default_mask(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.add_event_handler(test_func)
assert d.handle_hook
prev.chdir()
def test_add_event_handler_custom_mask(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.add_event_handler(test_func, 0)
assert d.handle_hook
prev.chdir()
|
Add tests for the daemon mode
|
Add tests for the daemon mode
|
Python
|
mit
|
jackstanek/BotBot,jackstanek/BotBot
|
Add tests for the daemon mode
|
import inotify
import pytest
import os
from botbot import daemon
def test_func():
pass
def get_dbpath():
return os.path.join('.', 'test.db')
def test_daemon_constructor(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
assert d
prev.chdir()
def test_daemon_init_no_handlers(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init()
assert len(d.handle_hook) == 0
prev.chdir()
def test_daemon_init_with_handler(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init((test_func, 0))
assert len(d.handle_hook) == 1
prev.chdir()
def test_daemon_init_with_multiple_handlers(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init((test_func, 0), (test_func, 1))
assert len(d.handle_hook) == 2
prev.chdir()
def test_add_event_handler_default_mask(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.add_event_handler(test_func)
assert d.handle_hook
prev.chdir()
def test_add_event_handler_custom_mask(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.add_event_handler(test_func, 0)
assert d.handle_hook
prev.chdir()
|
<commit_before><commit_msg>Add tests for the daemon mode<commit_after>
|
import inotify
import pytest
import os
from botbot import daemon
def test_func():
pass
def get_dbpath():
return os.path.join('.', 'test.db')
def test_daemon_constructor(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
assert d
prev.chdir()
def test_daemon_init_no_handlers(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init()
assert len(d.handle_hook) == 0
prev.chdir()
def test_daemon_init_with_handler(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init((test_func, 0))
assert len(d.handle_hook) == 1
prev.chdir()
def test_daemon_init_with_multiple_handlers(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init((test_func, 0), (test_func, 1))
assert len(d.handle_hook) == 2
prev.chdir()
def test_add_event_handler_default_mask(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.add_event_handler(test_func)
assert d.handle_hook
prev.chdir()
def test_add_event_handler_custom_mask(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.add_event_handler(test_func, 0)
assert d.handle_hook
prev.chdir()
|
Add tests for the daemon modeimport inotify
import pytest
import os
from botbot import daemon
def test_func():
pass
def get_dbpath():
return os.path.join('.', 'test.db')
def test_daemon_constructor(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
assert d
prev.chdir()
def test_daemon_init_no_handlers(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init()
assert len(d.handle_hook) == 0
prev.chdir()
def test_daemon_init_with_handler(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init((test_func, 0))
assert len(d.handle_hook) == 1
prev.chdir()
def test_daemon_init_with_multiple_handlers(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init((test_func, 0), (test_func, 1))
assert len(d.handle_hook) == 2
prev.chdir()
def test_add_event_handler_default_mask(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.add_event_handler(test_func)
assert d.handle_hook
prev.chdir()
def test_add_event_handler_custom_mask(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.add_event_handler(test_func, 0)
assert d.handle_hook
prev.chdir()
|
<commit_before><commit_msg>Add tests for the daemon mode<commit_after>import inotify
import pytest
import os
from botbot import daemon
def test_func():
pass
def get_dbpath():
return os.path.join('.', 'test.db')
def test_daemon_constructor(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
assert d
prev.chdir()
def test_daemon_init_no_handlers(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init()
assert len(d.handle_hook) == 0
prev.chdir()
def test_daemon_init_with_handler(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init((test_func, 0))
assert len(d.handle_hook) == 1
prev.chdir()
def test_daemon_init_with_multiple_handlers(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.init((test_func, 0), (test_func, 1))
assert len(d.handle_hook) == 2
prev.chdir()
def test_add_event_handler_default_mask(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.add_event_handler(test_func)
assert d.handle_hook
prev.chdir()
def test_add_event_handler_custom_mask(tmpdir):
daemon.get_dbpath = get_dbpath
prev = tmpdir.chdir()
d = daemon.DaemonizedChecker('.')
d.add_event_handler(test_func, 0)
assert d.handle_hook
prev.chdir()
|
|
c6e5ea3727be92e2478df748acb29d1fc5bc3b76
|
tests/test_routes.py
|
tests/test_routes.py
|
from unittest import TestCase
from kobin.routes import Router
class RouterTest(TestCase):
def setUp(self):
self.router = Router()
def test_add_func_when_input_static(self):
def target_func(): pass
self.router.add('/user/', 'GET', target_func)
actual = self.router.static
self.assertIn('GET', actual)
self.assertIn('/user/', actual['GET'])
expected_tuple = (target_func, None)
self.assertTupleEqual(actual['GET']['/user/'], expected_tuple)
|
Add Test of static routing
|
Add Test of static routing
|
Python
|
mit
|
c-bata/kobin,kobinpy/kobin,kobinpy/kobin,c-bata/kobin
|
Add Test of static routing
|
from unittest import TestCase
from kobin.routes import Router
class RouterTest(TestCase):
def setUp(self):
self.router = Router()
def test_add_func_when_input_static(self):
def target_func(): pass
self.router.add('/user/', 'GET', target_func)
actual = self.router.static
self.assertIn('GET', actual)
self.assertIn('/user/', actual['GET'])
expected_tuple = (target_func, None)
self.assertTupleEqual(actual['GET']['/user/'], expected_tuple)
|
<commit_before><commit_msg>Add Test of static routing<commit_after>
|
from unittest import TestCase
from kobin.routes import Router
class RouterTest(TestCase):
def setUp(self):
self.router = Router()
def test_add_func_when_input_static(self):
def target_func(): pass
self.router.add('/user/', 'GET', target_func)
actual = self.router.static
self.assertIn('GET', actual)
self.assertIn('/user/', actual['GET'])
expected_tuple = (target_func, None)
self.assertTupleEqual(actual['GET']['/user/'], expected_tuple)
|
Add Test of static routingfrom unittest import TestCase
from kobin.routes import Router
class RouterTest(TestCase):
def setUp(self):
self.router = Router()
def test_add_func_when_input_static(self):
def target_func(): pass
self.router.add('/user/', 'GET', target_func)
actual = self.router.static
self.assertIn('GET', actual)
self.assertIn('/user/', actual['GET'])
expected_tuple = (target_func, None)
self.assertTupleEqual(actual['GET']['/user/'], expected_tuple)
|
<commit_before><commit_msg>Add Test of static routing<commit_after>from unittest import TestCase
from kobin.routes import Router
class RouterTest(TestCase):
def setUp(self):
self.router = Router()
def test_add_func_when_input_static(self):
def target_func(): pass
self.router.add('/user/', 'GET', target_func)
actual = self.router.static
self.assertIn('GET', actual)
self.assertIn('/user/', actual['GET'])
expected_tuple = (target_func, None)
self.assertTupleEqual(actual['GET']['/user/'], expected_tuple)
|
|
ff3c664896a95b119f9b5b6d46adb0f36461f218
|
ecpy/utils/flags.py
|
ecpy/utils/flags.py
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015 by Ecpy Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Thread safe bit flag with convenient interface.
"""
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from threading import Event, RLock
class BitFlag(object):
"""Bit flag conveniency class providing thread safety facilities.
Parameters
----------
flags : iterable[unicode]
Name of the flags that this flag understand.
"""
__slots__ = ('flags', '_lock', '_flags', '_events', '_state')
def __new__(cls, flags):
self = object.__new__(cls)
self.flags = flags
self._flags = {f: 2**i for i, f in enumerate(flags)}
self._events = {}
self._lock = RLock()
self._state = 0
def set(self, *flags):
"""Set specified flags.
If a flag is already set this is a no-op. If a thread is waiting on a
flag, it gets notified.
"""
with self._lock:
for f in flags:
self._state |= self._flags[f]
if f in self._events:
self._events[f].set()
del self._events[f]
def clear(self, *flags):
"""Clear the specified flags.
If a flag is already cleared this is a no-op. If a thread is waiting
on a flag clearing, it gets notified.
"""
with self._lock:
for f in flags:
self._state &= ~self._flags[f]
def test(self, *flags):
"""Test is all specified flags are set.
"""
res = False
with self._lock:
for f in flags:
res &= self._state & f
return res
def wait(self, timeout, *flags):
"""Wait till some flags are set.
Parameters
----------
timeout : float|None
Maximum time to wait. If None waits forever.
flags : iterable[unicode]
Flags upon which to wait.
Returns
-------
result : bool
False of the method returned because of the timeout.
"""
events = []
with self._lock:
for f in flags:
if not self.test(f):
if f not in self._events:
self._events[f] = Event()
events = self._events[f]
res = True
for e in events:
res &= e.wait(timeout)
return res
|
Add a thread safe convenient bit flag class.
|
Add a thread safe convenient bit flag class.
|
Python
|
bsd-3-clause
|
Ecpy/ecpy,Ecpy/ecpy
|
Add a thread safe convenient bit flag class.
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015 by Ecpy Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Thread safe bit flag with convenient interface.
"""
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from threading import Event, RLock
class BitFlag(object):
"""Bit flag conveniency class providing thread safety facilities.
Parameters
----------
flags : iterable[unicode]
Name of the flags that this flag understand.
"""
__slots__ = ('flags', '_lock', '_flags', '_events', '_state')
def __new__(cls, flags):
self = object.__new__(cls)
self.flags = flags
self._flags = {f: 2**i for i, f in enumerate(flags)}
self._events = {}
self._lock = RLock()
self._state = 0
def set(self, *flags):
"""Set specified flags.
If a flag is already set this is a no-op. If a thread is waiting on a
flag, it gets notified.
"""
with self._lock:
for f in flags:
self._state |= self._flags[f]
if f in self._events:
self._events[f].set()
del self._events[f]
def clear(self, *flags):
"""Clear the specified flags.
If a flag is already cleared this is a no-op. If a thread is waiting
on a flag clearing, it gets notified.
"""
with self._lock:
for f in flags:
self._state &= ~self._flags[f]
def test(self, *flags):
"""Test is all specified flags are set.
"""
res = False
with self._lock:
for f in flags:
res &= self._state & f
return res
def wait(self, timeout, *flags):
"""Wait till some flags are set.
Parameters
----------
timeout : float|None
Maximum time to wait. If None waits forever.
flags : iterable[unicode]
Flags upon which to wait.
Returns
-------
result : bool
False of the method returned because of the timeout.
"""
events = []
with self._lock:
for f in flags:
if not self.test(f):
if f not in self._events:
self._events[f] = Event()
events = self._events[f]
res = True
for e in events:
res &= e.wait(timeout)
return res
|
<commit_before><commit_msg>Add a thread safe convenient bit flag class.<commit_after>
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015 by Ecpy Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Thread safe bit flag with convenient interface.
"""
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from threading import Event, RLock
class BitFlag(object):
"""Bit flag conveniency class providing thread safety facilities.
Parameters
----------
flags : iterable[unicode]
Name of the flags that this flag understand.
"""
__slots__ = ('flags', '_lock', '_flags', '_events', '_state')
def __new__(cls, flags):
self = object.__new__(cls)
self.flags = flags
self._flags = {f: 2**i for i, f in enumerate(flags)}
self._events = {}
self._lock = RLock()
self._state = 0
def set(self, *flags):
"""Set specified flags.
If a flag is already set this is a no-op. If a thread is waiting on a
flag, it gets notified.
"""
with self._lock:
for f in flags:
self._state |= self._flags[f]
if f in self._events:
self._events[f].set()
del self._events[f]
def clear(self, *flags):
"""Clear the specified flags.
If a flag is already cleared this is a no-op. If a thread is waiting
on a flag clearing, it gets notified.
"""
with self._lock:
for f in flags:
self._state &= ~self._flags[f]
def test(self, *flags):
"""Test is all specified flags are set.
"""
res = False
with self._lock:
for f in flags:
res &= self._state & f
return res
def wait(self, timeout, *flags):
"""Wait till some flags are set.
Parameters
----------
timeout : float|None
Maximum time to wait. If None waits forever.
flags : iterable[unicode]
Flags upon which to wait.
Returns
-------
result : bool
False of the method returned because of the timeout.
"""
events = []
with self._lock:
for f in flags:
if not self.test(f):
if f not in self._events:
self._events[f] = Event()
events = self._events[f]
res = True
for e in events:
res &= e.wait(timeout)
return res
|
Add a thread safe convenient bit flag class.# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015 by Ecpy Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Thread safe bit flag with convenient interface.
"""
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from threading import Event, RLock
class BitFlag(object):
"""Bit flag conveniency class providing thread safety facilities.
Parameters
----------
flags : iterable[unicode]
Name of the flags that this flag understand.
"""
__slots__ = ('flags', '_lock', '_flags', '_events', '_state')
def __new__(cls, flags):
self = object.__new__(cls)
self.flags = flags
self._flags = {f: 2**i for i, f in enumerate(flags)}
self._events = {}
self._lock = RLock()
self._state = 0
def set(self, *flags):
"""Set specified flags.
If a flag is already set this is a no-op. If a thread is waiting on a
flag, it gets notified.
"""
with self._lock:
for f in flags:
self._state |= self._flags[f]
if f in self._events:
self._events[f].set()
del self._events[f]
def clear(self, *flags):
"""Clear the specified flags.
If a flag is already cleared this is a no-op. If a thread is waiting
on a flag clearing, it gets notified.
"""
with self._lock:
for f in flags:
self._state &= ~self._flags[f]
def test(self, *flags):
"""Test is all specified flags are set.
"""
res = False
with self._lock:
for f in flags:
res &= self._state & f
return res
def wait(self, timeout, *flags):
"""Wait till some flags are set.
Parameters
----------
timeout : float|None
Maximum time to wait. If None waits forever.
flags : iterable[unicode]
Flags upon which to wait.
Returns
-------
result : bool
False of the method returned because of the timeout.
"""
events = []
with self._lock:
for f in flags:
if not self.test(f):
if f not in self._events:
self._events[f] = Event()
events = self._events[f]
res = True
for e in events:
res &= e.wait(timeout)
return res
|
<commit_before><commit_msg>Add a thread safe convenient bit flag class.<commit_after># -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015 by Ecpy Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Thread safe bit flag with convenient interface.
"""
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from threading import Event, RLock
class BitFlag(object):
"""Bit flag conveniency class providing thread safety facilities.
Parameters
----------
flags : iterable[unicode]
Name of the flags that this flag understand.
"""
__slots__ = ('flags', '_lock', '_flags', '_events', '_state')
def __new__(cls, flags):
self = object.__new__(cls)
self.flags = flags
self._flags = {f: 2**i for i, f in enumerate(flags)}
self._events = {}
self._lock = RLock()
self._state = 0
def set(self, *flags):
"""Set specified flags.
If a flag is already set this is a no-op. If a thread is waiting on a
flag, it gets notified.
"""
with self._lock:
for f in flags:
self._state |= self._flags[f]
if f in self._events:
self._events[f].set()
del self._events[f]
def clear(self, *flags):
"""Clear the specified flags.
If a flag is already cleared this is a no-op. If a thread is waiting
on a flag clearing, it gets notified.
"""
with self._lock:
for f in flags:
self._state &= ~self._flags[f]
def test(self, *flags):
"""Test is all specified flags are set.
"""
res = False
with self._lock:
for f in flags:
res &= self._state & f
return res
def wait(self, timeout, *flags):
"""Wait till some flags are set.
Parameters
----------
timeout : float|None
Maximum time to wait. If None waits forever.
flags : iterable[unicode]
Flags upon which to wait.
Returns
-------
result : bool
False of the method returned because of the timeout.
"""
events = []
with self._lock:
for f in flags:
if not self.test(f):
if f not in self._events:
self._events[f] = Event()
events = self._events[f]
res = True
for e in events:
res &= e.wait(timeout)
return res
|
|
dbb7a0ff25bcf491dfc05676dd578db5f2769fef
|
tests/test_rq/test_bcolz_stock.py
|
tests/test_rq/test_bcolz_stock.py
|
import bcolz
import numpy as np
def field_type(c_dtyep, field, dt_orig):
try:
return c_dtyep[field][0]
except KeyError:
return dt_orig
float64 = np.dtype('float64')
cdtype = {
'open': (float64, 1 / 10000.0, 4),
'close': (float64, 1 / 10000.0, 4),
'high': (float64, 1 / 10000.0, 4),
'low': (float64, 1 / 10000.0, 4),
'limit_up': (float64, 1 / 10000.0, 4),
'limit_down': (float64, 1 / 10000.0, 4),
}
if __name__ == '__main__':
bc = bcolz.open('/home/eshufan/project/bundle/stocks.bcolz', 'r')
# Access /home/eshufan/project/bundle/stocks.bcolz/__attrs__
se = bc.attrs['line_map']
print('Get the lines for 600000.XSHG')
s, e = se['600000.XSHG']
print('Start from {}, end to {}'.format(s, e))
print('Get all column names')
print(bc.names)
print('Exclude the first column for date')
fields = bc.names[1:]
print(fields)
print('Print original data type for each column')
print([bc.cols[f].dtype for f in fields])
print('Construct the data type for each column')
dtype = np.dtype([('datetime', np.uint64)] + [(f, field_type(cdtype, f, bc.cols[f].dtype)) for f in fields])
print(dtype)
print('Create an empty numpy list with random valume, size is row: e - s, column: len(dtype) (Include datetime)')
result = np.empty(shape=(e - s,), dtype=dtype)
print('Feed the real data')
for f in fields:
result[f][:] = bc.cols[f][s:e]
print(result)
print('Update column datetime')
result['datetime'][:] = bc.cols['date'][s:e].astype(np.uint64) * 1000000
print(result)
|
Add example for bcolz stock data
|
Add example for bcolz stock data
|
Python
|
apache-2.0
|
fs714/ialgotest,fs714/ialgotest
|
Add example for bcolz stock data
|
import bcolz
import numpy as np
def field_type(c_dtyep, field, dt_orig):
try:
return c_dtyep[field][0]
except KeyError:
return dt_orig
float64 = np.dtype('float64')
cdtype = {
'open': (float64, 1 / 10000.0, 4),
'close': (float64, 1 / 10000.0, 4),
'high': (float64, 1 / 10000.0, 4),
'low': (float64, 1 / 10000.0, 4),
'limit_up': (float64, 1 / 10000.0, 4),
'limit_down': (float64, 1 / 10000.0, 4),
}
if __name__ == '__main__':
bc = bcolz.open('/home/eshufan/project/bundle/stocks.bcolz', 'r')
# Access /home/eshufan/project/bundle/stocks.bcolz/__attrs__
se = bc.attrs['line_map']
print('Get the lines for 600000.XSHG')
s, e = se['600000.XSHG']
print('Start from {}, end to {}'.format(s, e))
print('Get all column names')
print(bc.names)
print('Exclude the first column for date')
fields = bc.names[1:]
print(fields)
print('Print original data type for each column')
print([bc.cols[f].dtype for f in fields])
print('Construct the data type for each column')
dtype = np.dtype([('datetime', np.uint64)] + [(f, field_type(cdtype, f, bc.cols[f].dtype)) for f in fields])
print(dtype)
print('Create an empty numpy list with random valume, size is row: e - s, column: len(dtype) (Include datetime)')
result = np.empty(shape=(e - s,), dtype=dtype)
print('Feed the real data')
for f in fields:
result[f][:] = bc.cols[f][s:e]
print(result)
print('Update column datetime')
result['datetime'][:] = bc.cols['date'][s:e].astype(np.uint64) * 1000000
print(result)
|
<commit_before><commit_msg>Add example for bcolz stock data<commit_after>
|
import bcolz
import numpy as np
def field_type(c_dtyep, field, dt_orig):
try:
return c_dtyep[field][0]
except KeyError:
return dt_orig
float64 = np.dtype('float64')
cdtype = {
'open': (float64, 1 / 10000.0, 4),
'close': (float64, 1 / 10000.0, 4),
'high': (float64, 1 / 10000.0, 4),
'low': (float64, 1 / 10000.0, 4),
'limit_up': (float64, 1 / 10000.0, 4),
'limit_down': (float64, 1 / 10000.0, 4),
}
if __name__ == '__main__':
bc = bcolz.open('/home/eshufan/project/bundle/stocks.bcolz', 'r')
# Access /home/eshufan/project/bundle/stocks.bcolz/__attrs__
se = bc.attrs['line_map']
print('Get the lines for 600000.XSHG')
s, e = se['600000.XSHG']
print('Start from {}, end to {}'.format(s, e))
print('Get all column names')
print(bc.names)
print('Exclude the first column for date')
fields = bc.names[1:]
print(fields)
print('Print original data type for each column')
print([bc.cols[f].dtype for f in fields])
print('Construct the data type for each column')
dtype = np.dtype([('datetime', np.uint64)] + [(f, field_type(cdtype, f, bc.cols[f].dtype)) for f in fields])
print(dtype)
print('Create an empty numpy list with random valume, size is row: e - s, column: len(dtype) (Include datetime)')
result = np.empty(shape=(e - s,), dtype=dtype)
print('Feed the real data')
for f in fields:
result[f][:] = bc.cols[f][s:e]
print(result)
print('Update column datetime')
result['datetime'][:] = bc.cols['date'][s:e].astype(np.uint64) * 1000000
print(result)
|
Add example for bcolz stock dataimport bcolz
import numpy as np
def field_type(c_dtyep, field, dt_orig):
try:
return c_dtyep[field][0]
except KeyError:
return dt_orig
float64 = np.dtype('float64')
cdtype = {
'open': (float64, 1 / 10000.0, 4),
'close': (float64, 1 / 10000.0, 4),
'high': (float64, 1 / 10000.0, 4),
'low': (float64, 1 / 10000.0, 4),
'limit_up': (float64, 1 / 10000.0, 4),
'limit_down': (float64, 1 / 10000.0, 4),
}
if __name__ == '__main__':
bc = bcolz.open('/home/eshufan/project/bundle/stocks.bcolz', 'r')
# Access /home/eshufan/project/bundle/stocks.bcolz/__attrs__
se = bc.attrs['line_map']
print('Get the lines for 600000.XSHG')
s, e = se['600000.XSHG']
print('Start from {}, end to {}'.format(s, e))
print('Get all column names')
print(bc.names)
print('Exclude the first column for date')
fields = bc.names[1:]
print(fields)
print('Print original data type for each column')
print([bc.cols[f].dtype for f in fields])
print('Construct the data type for each column')
dtype = np.dtype([('datetime', np.uint64)] + [(f, field_type(cdtype, f, bc.cols[f].dtype)) for f in fields])
print(dtype)
print('Create an empty numpy list with random valume, size is row: e - s, column: len(dtype) (Include datetime)')
result = np.empty(shape=(e - s,), dtype=dtype)
print('Feed the real data')
for f in fields:
result[f][:] = bc.cols[f][s:e]
print(result)
print('Update column datetime')
result['datetime'][:] = bc.cols['date'][s:e].astype(np.uint64) * 1000000
print(result)
|
<commit_before><commit_msg>Add example for bcolz stock data<commit_after>import bcolz
import numpy as np
def field_type(c_dtyep, field, dt_orig):
try:
return c_dtyep[field][0]
except KeyError:
return dt_orig
float64 = np.dtype('float64')
cdtype = {
'open': (float64, 1 / 10000.0, 4),
'close': (float64, 1 / 10000.0, 4),
'high': (float64, 1 / 10000.0, 4),
'low': (float64, 1 / 10000.0, 4),
'limit_up': (float64, 1 / 10000.0, 4),
'limit_down': (float64, 1 / 10000.0, 4),
}
if __name__ == '__main__':
bc = bcolz.open('/home/eshufan/project/bundle/stocks.bcolz', 'r')
# Access /home/eshufan/project/bundle/stocks.bcolz/__attrs__
se = bc.attrs['line_map']
print('Get the lines for 600000.XSHG')
s, e = se['600000.XSHG']
print('Start from {}, end to {}'.format(s, e))
print('Get all column names')
print(bc.names)
print('Exclude the first column for date')
fields = bc.names[1:]
print(fields)
print('Print original data type for each column')
print([bc.cols[f].dtype for f in fields])
print('Construct the data type for each column')
dtype = np.dtype([('datetime', np.uint64)] + [(f, field_type(cdtype, f, bc.cols[f].dtype)) for f in fields])
print(dtype)
print('Create an empty numpy list with random valume, size is row: e - s, column: len(dtype) (Include datetime)')
result = np.empty(shape=(e - s,), dtype=dtype)
print('Feed the real data')
for f in fields:
result[f][:] = bc.cols[f][s:e]
print(result)
print('Update column datetime')
result['datetime'][:] = bc.cols['date'][s:e].astype(np.uint64) * 1000000
print(result)
|
|
4a3ce97a93e8b43082c4ae243ac8127c92890b28
|
tools/diagnose-me.py
|
tools/diagnose-me.py
|
#!/usr/bin/python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Diagnose some common system configuration problems on Linux, and
suggest fixes."""
import subprocess
import sys
all_checks = []
def Check(name):
"""Decorator that defines a diagnostic check."""
def wrap(func):
all_checks.append((name, func))
return func
return wrap
@Check("/usr/bin/ld is not gold")
def CheckSystemLd():
proc = subprocess.Popen(['/usr/bin/ld', '-v'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
if 'GNU gold' in stdout:
return ("When /usr/bin/ld is gold, system updates can silently\n"
"corrupt your graphics drivers.\n"
"Try 'sudo apt-get remove binutils-gold'.\n")
return None
@Check("random lds are not in the $PATH")
def CheckPathLd():
proc = subprocess.Popen(['which', '-a', 'ld'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
instances = stdout.split()
if len(instances) > 1:
return ("You have multiple 'ld' binaries in your $PATH:\n"
+ '\n'.join(' - ' + i for i in instances) + "\n"
"You should delete all of them but your system one.\n"
"gold is hooked into your build via gyp.\n")
return None
def RunChecks():
for name, check in all_checks:
sys.stdout.write("* Checking %s: " % name)
sys.stdout.flush()
error = check()
if not error:
print "ok"
else:
print "FAIL"
print error
if __name__ == '__main__':
RunChecks()
|
Check in a script to diagnose common system configuration problems.
|
Check in a script to diagnose common system configuration problems.
Review URL: https://chromiumcodereview.appspot.com/9309011
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@120430 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,Chilledheart/chromium,chuan9/chromium-crosswalk,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hujiajie/pa-chromium,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,markYoungH/chromium.src,rogerwang/chromium,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,robclark/chromium,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,robclark/chromium,patrickm/chromium.src,chuan9/chromium-crosswalk,rogerwang/chromium,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,junmin-zhu/chromium-rivertrail,jaruba/chromium.src,Jonekee/chromium.src,mogoweb/chromium-crosswalk,keishi/chromium,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dednal/chromium.src,ChromiumWebApps/chromium,anirudhSK/chromium,Just-D/chromium-1,junmin-zhu/chromium-rivertrail,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,hujiajie/pa-chromium,M4sse/chromium.src,pozdnyakov/chromium-crosswalk,mogoweb/chromium-crosswalk,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,hujiajie/pa-chromium,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,M4sse/chromium.src,pozdnyakov/chromium-crosswalk,fujunwei/chromium-crosswalk,mogoweb/chromium-crosswalk,zcbenz/cefode-chromium,zcbenz/cefode-chromium,rogerwang/chromium,crosswalk-project/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,keishi/chromium,markYoungH/chromium.src,timopulkkinen/BubbleFish,jaruba/chromium.src,Fireblend/chromium-crosswalk,littlstar/chromium.src,mogoweb/chromium-crosswalk,Jonekee/chromium.src,markYoungH/chromium.src,dednal/chromium.src,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,robclark/chromium,hgl888/chromium-crosswalk,anirudhSK/chromium,fujunwei/chromium-crosswalk,Chilledheart/chromium,nacl-webkit/chrome_deps,junmin-zhu/chromium-rivertrail,junmin-zhu/chromium-rivertrail,dednal/chromium.src,keishi/chromium,markYoungH/chromium.src,anirudhSK/chromium,chuan9/chromium-crosswalk,Just-D/chromium-1,fujunwei/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,zcbenz/cefode-chromium,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,rogerwang/chromium,hujiajie/pa-chromium,ltilve/chromium,keishi/chromium,ChromiumWebApps/chromium,jaruba/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,Chilledheart/chromium,rogerwang/chromium,jaruba/chromium.src,keishi/chromium,hgl888/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,jaruba/chromium.src,Fireblend/chromium-crosswalk,nacl-webkit/chrome_deps,krieger-od/nwjs_chromium.src,robclark/chromium,zcbenz/cefode-chromium,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,timopulkkinen/BubbleFish,dushu1203/chromium.src,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,rogerwang/chromium,jaruba/chromium.src,jaruba/chromium.src,mogoweb/chromium-crosswalk,nacl-webkit/chrome_deps,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,pozdnyakov/chromium-crosswalk,pozdnyakov/chromium-crosswalk,junmin-zhu/chromium-rivertrail,robclark/chromium,chuan9/chromium-crosswalk,M4sse/chromium.src,hujiajie/pa-chromium,dushu1203/chromium.src,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,ltilve/chromium,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,Jonekee/chromium.src,dednal/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,rogerwang/chromium,nacl-webkit/chrome_deps,anirudhSK/chromium,mogoweb/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,Just-D/chromium-1,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,zcbenz/cefode-chromium,zcbenz/cefode-chromium,Just-D/chromium-1,axinging/chromium-crosswalk,markYoungH/chromium.src,patrickm/chromium.src,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,junmin-zhu/chromium-rivertrail,jaruba/chromium.src,hgl888/chromium-crosswalk,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,zcbenz/cefode-chromium,markYoungH/chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,timopulkkinen/BubbleFish,Fireblend/chromium-crosswalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,anirudhSK/chromium,keishi/chromium,krieger-od/nwjs_chromium.src,hujiajie/pa-chromium,Just-D/chromium-1,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,timopulkkinen/BubbleFish,crosswalk-project/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,zcbenz/cefode-chromium,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,axinging/chromium-crosswalk,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,dushu1203/chromium.src,zcbenz/cefode-chromium,Fireblend/chromium-crosswalk,timopulkkinen/BubbleFish,hujiajie/pa-chromium,anirudhSK/chromium,rogerwang/chromium,ondra-novak/chromium.src,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,littlstar/chromium.src,axinging/chromium-crosswalk,anirudhSK/chromium,timopulkkinen/BubbleFish,robclark/chromium,Chilledheart/chromium,M4sse/chromium.src,nacl-webkit/chrome_deps,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,dushu1203/chromium.src,keishi/chromium,robclark/chromium,nacl-webkit/chrome_deps,chuan9/chromium-crosswalk,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,mogoweb/chromium-crosswalk,junmin-zhu/chromium-rivertrail,Chilledheart/chromium,fujunwei/chromium-crosswalk,hujiajie/pa-chromium,markYoungH/chromium.src,M4sse/chromium.src,nacl-webkit/chrome_deps,pozdnyakov/chromium-crosswalk,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,hujiajie/pa-chromium,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,ltilve/chromium,patrickm/chromium.src,Fireblend/chromium-crosswalk,M4sse/chromium.src,ondra-novak/chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,anirudhSK/chromium,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,M4sse/chromium.src,dednal/chromium.src,patrickm/chromium.src,timopulkkinen/BubbleFish,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,pozdnyakov/chromium-crosswalk,rogerwang/chromium,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,anirudhSK/chromium,ondra-novak/chromium.src,junmin-zhu/chromium-rivertrail,littlstar/chromium.src,keishi/chromium,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,ltilve/chromium,ltilve/chromium,dushu1203/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,ondra-novak/chromium.src,keishi/chromium,robclark/chromium,dednal/chromium.src,timopulkkinen/BubbleFish,chuan9/chromium-crosswalk,dushu1203/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,keishi/chromium,Just-D/chromium-1,ChromiumWebApps/chromium,dednal/chromium.src,patrickm/chromium.src,krieger-od/nwjs_chromium.src,jaruba/chromium.src,robclark/chromium,dednal/chromium.src,dednal/chromium.src,patrickm/chromium.src,zcbenz/cefode-chromium,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,pozdnyakov/chromium-crosswalk,keishi/chromium,robclark/chromium,dushu1203/chromium.src,nacl-webkit/chrome_deps,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,krieger-od/nwjs_chromium.src,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,littlstar/chromium.src,ChromiumWebApps/chromium,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,dushu1203/chromium.src,timopulkkinen/BubbleFish,krieger-od/nwjs_chromium.src,Jonekee/chromium.src
|
Check in a script to diagnose common system configuration problems.
Review URL: https://chromiumcodereview.appspot.com/9309011
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@120430 0039d316-1c4b-4281-b951-d872f2087c98
|
#!/usr/bin/python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Diagnose some common system configuration problems on Linux, and
suggest fixes."""
import subprocess
import sys
all_checks = []
def Check(name):
"""Decorator that defines a diagnostic check."""
def wrap(func):
all_checks.append((name, func))
return func
return wrap
@Check("/usr/bin/ld is not gold")
def CheckSystemLd():
proc = subprocess.Popen(['/usr/bin/ld', '-v'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
if 'GNU gold' in stdout:
return ("When /usr/bin/ld is gold, system updates can silently\n"
"corrupt your graphics drivers.\n"
"Try 'sudo apt-get remove binutils-gold'.\n")
return None
@Check("random lds are not in the $PATH")
def CheckPathLd():
proc = subprocess.Popen(['which', '-a', 'ld'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
instances = stdout.split()
if len(instances) > 1:
return ("You have multiple 'ld' binaries in your $PATH:\n"
+ '\n'.join(' - ' + i for i in instances) + "\n"
"You should delete all of them but your system one.\n"
"gold is hooked into your build via gyp.\n")
return None
def RunChecks():
for name, check in all_checks:
sys.stdout.write("* Checking %s: " % name)
sys.stdout.flush()
error = check()
if not error:
print "ok"
else:
print "FAIL"
print error
if __name__ == '__main__':
RunChecks()
|
<commit_before><commit_msg>Check in a script to diagnose common system configuration problems.
Review URL: https://chromiumcodereview.appspot.com/9309011
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@120430 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
#!/usr/bin/python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Diagnose some common system configuration problems on Linux, and
suggest fixes."""
import subprocess
import sys
all_checks = []
def Check(name):
"""Decorator that defines a diagnostic check."""
def wrap(func):
all_checks.append((name, func))
return func
return wrap
@Check("/usr/bin/ld is not gold")
def CheckSystemLd():
proc = subprocess.Popen(['/usr/bin/ld', '-v'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
if 'GNU gold' in stdout:
return ("When /usr/bin/ld is gold, system updates can silently\n"
"corrupt your graphics drivers.\n"
"Try 'sudo apt-get remove binutils-gold'.\n")
return None
@Check("random lds are not in the $PATH")
def CheckPathLd():
proc = subprocess.Popen(['which', '-a', 'ld'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
instances = stdout.split()
if len(instances) > 1:
return ("You have multiple 'ld' binaries in your $PATH:\n"
+ '\n'.join(' - ' + i for i in instances) + "\n"
"You should delete all of them but your system one.\n"
"gold is hooked into your build via gyp.\n")
return None
def RunChecks():
for name, check in all_checks:
sys.stdout.write("* Checking %s: " % name)
sys.stdout.flush()
error = check()
if not error:
print "ok"
else:
print "FAIL"
print error
if __name__ == '__main__':
RunChecks()
|
Check in a script to diagnose common system configuration problems.
Review URL: https://chromiumcodereview.appspot.com/9309011
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@120430 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Diagnose some common system configuration problems on Linux, and
suggest fixes."""
import subprocess
import sys
all_checks = []
def Check(name):
"""Decorator that defines a diagnostic check."""
def wrap(func):
all_checks.append((name, func))
return func
return wrap
@Check("/usr/bin/ld is not gold")
def CheckSystemLd():
proc = subprocess.Popen(['/usr/bin/ld', '-v'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
if 'GNU gold' in stdout:
return ("When /usr/bin/ld is gold, system updates can silently\n"
"corrupt your graphics drivers.\n"
"Try 'sudo apt-get remove binutils-gold'.\n")
return None
@Check("random lds are not in the $PATH")
def CheckPathLd():
proc = subprocess.Popen(['which', '-a', 'ld'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
instances = stdout.split()
if len(instances) > 1:
return ("You have multiple 'ld' binaries in your $PATH:\n"
+ '\n'.join(' - ' + i for i in instances) + "\n"
"You should delete all of them but your system one.\n"
"gold is hooked into your build via gyp.\n")
return None
def RunChecks():
for name, check in all_checks:
sys.stdout.write("* Checking %s: " % name)
sys.stdout.flush()
error = check()
if not error:
print "ok"
else:
print "FAIL"
print error
if __name__ == '__main__':
RunChecks()
|
<commit_before><commit_msg>Check in a script to diagnose common system configuration problems.
Review URL: https://chromiumcodereview.appspot.com/9309011
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@120430 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Diagnose some common system configuration problems on Linux, and
suggest fixes."""
import subprocess
import sys
all_checks = []
def Check(name):
"""Decorator that defines a diagnostic check."""
def wrap(func):
all_checks.append((name, func))
return func
return wrap
@Check("/usr/bin/ld is not gold")
def CheckSystemLd():
proc = subprocess.Popen(['/usr/bin/ld', '-v'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
if 'GNU gold' in stdout:
return ("When /usr/bin/ld is gold, system updates can silently\n"
"corrupt your graphics drivers.\n"
"Try 'sudo apt-get remove binutils-gold'.\n")
return None
@Check("random lds are not in the $PATH")
def CheckPathLd():
proc = subprocess.Popen(['which', '-a', 'ld'], stdout=subprocess.PIPE)
stdout = proc.communicate()[0]
instances = stdout.split()
if len(instances) > 1:
return ("You have multiple 'ld' binaries in your $PATH:\n"
+ '\n'.join(' - ' + i for i in instances) + "\n"
"You should delete all of them but your system one.\n"
"gold is hooked into your build via gyp.\n")
return None
def RunChecks():
for name, check in all_checks:
sys.stdout.write("* Checking %s: " % name)
sys.stdout.flush()
error = check()
if not error:
print "ok"
else:
print "FAIL"
print error
if __name__ == '__main__':
RunChecks()
|
|
b573047502d6ceb0241257fa26e17ad572639c93
|
tests/utils/test_string_helper.py
|
tests/utils/test_string_helper.py
|
from unittest.case import TestCase
from zsl.utils.string_helper import camelcase_to_underscore, underscore_to_camelcase
class InflectionTestCase(TestCase):
def testCamelCaseToUnderscore(self):
self.assertEquals(
"camel_case_to_underscore",
camelcase_to_underscore("camelCaseToUnderscore"),
"CC to usc conversion"
)
self.assertEquals(
"camel_case_to_underscore",
camelcase_to_underscore("CamelCaseToUnderscore"),
"CC to usc conversion"
)
def testUnderscoreToCamelCase(self):
self.assertEquals(
"camelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore", False),
"CC to usc conversion"
)
self.assertEquals(
"CamelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore"),
"CC to usc conversion"
)
self.assertEquals(
"CamelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore", True),
"CC to usc conversion"
)
|
Add tests of the improved camelcase/underscore conversion
|
Add tests of the improved camelcase/underscore conversion
|
Python
|
mit
|
AtteqCom/zsl,AtteqCom/zsl
|
Add tests of the improved camelcase/underscore conversion
|
from unittest.case import TestCase
from zsl.utils.string_helper import camelcase_to_underscore, underscore_to_camelcase
class InflectionTestCase(TestCase):
def testCamelCaseToUnderscore(self):
self.assertEquals(
"camel_case_to_underscore",
camelcase_to_underscore("camelCaseToUnderscore"),
"CC to usc conversion"
)
self.assertEquals(
"camel_case_to_underscore",
camelcase_to_underscore("CamelCaseToUnderscore"),
"CC to usc conversion"
)
def testUnderscoreToCamelCase(self):
self.assertEquals(
"camelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore", False),
"CC to usc conversion"
)
self.assertEquals(
"CamelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore"),
"CC to usc conversion"
)
self.assertEquals(
"CamelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore", True),
"CC to usc conversion"
)
|
<commit_before><commit_msg>Add tests of the improved camelcase/underscore conversion<commit_after>
|
from unittest.case import TestCase
from zsl.utils.string_helper import camelcase_to_underscore, underscore_to_camelcase
class InflectionTestCase(TestCase):
def testCamelCaseToUnderscore(self):
self.assertEquals(
"camel_case_to_underscore",
camelcase_to_underscore("camelCaseToUnderscore"),
"CC to usc conversion"
)
self.assertEquals(
"camel_case_to_underscore",
camelcase_to_underscore("CamelCaseToUnderscore"),
"CC to usc conversion"
)
def testUnderscoreToCamelCase(self):
self.assertEquals(
"camelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore", False),
"CC to usc conversion"
)
self.assertEquals(
"CamelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore"),
"CC to usc conversion"
)
self.assertEquals(
"CamelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore", True),
"CC to usc conversion"
)
|
Add tests of the improved camelcase/underscore conversionfrom unittest.case import TestCase
from zsl.utils.string_helper import camelcase_to_underscore, underscore_to_camelcase
class InflectionTestCase(TestCase):
def testCamelCaseToUnderscore(self):
self.assertEquals(
"camel_case_to_underscore",
camelcase_to_underscore("camelCaseToUnderscore"),
"CC to usc conversion"
)
self.assertEquals(
"camel_case_to_underscore",
camelcase_to_underscore("CamelCaseToUnderscore"),
"CC to usc conversion"
)
def testUnderscoreToCamelCase(self):
self.assertEquals(
"camelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore", False),
"CC to usc conversion"
)
self.assertEquals(
"CamelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore"),
"CC to usc conversion"
)
self.assertEquals(
"CamelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore", True),
"CC to usc conversion"
)
|
<commit_before><commit_msg>Add tests of the improved camelcase/underscore conversion<commit_after>from unittest.case import TestCase
from zsl.utils.string_helper import camelcase_to_underscore, underscore_to_camelcase
class InflectionTestCase(TestCase):
def testCamelCaseToUnderscore(self):
self.assertEquals(
"camel_case_to_underscore",
camelcase_to_underscore("camelCaseToUnderscore"),
"CC to usc conversion"
)
self.assertEquals(
"camel_case_to_underscore",
camelcase_to_underscore("CamelCaseToUnderscore"),
"CC to usc conversion"
)
def testUnderscoreToCamelCase(self):
self.assertEquals(
"camelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore", False),
"CC to usc conversion"
)
self.assertEquals(
"CamelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore"),
"CC to usc conversion"
)
self.assertEquals(
"CamelCaseToUnderscore",
underscore_to_camelcase("camel_case_to_underscore", True),
"CC to usc conversion"
)
|
|
211d15149ad4e05c2b1159939805246998a2e3d4
|
app/send_ping.py
|
app/send_ping.py
|
from discovery import Endpoint, PingNode, PingServer
my_endpoint = EndPoint(u'192.168.1.192', 30303, 30303)
their_endpoint = EndPoint(u'127.0.0.1', 30303, 30303)
server = PingServer(my_endpoint)
listen_thread = server.udp_listen()
listen_thread.start()
server.ping(their_endpoint)
|
Add script to run app
|
Add script to run app
|
Python
|
bsd-3-clause
|
tomarak/basic-eth,tomarak/basic-eth
|
Add script to run app
|
from discovery import Endpoint, PingNode, PingServer
my_endpoint = EndPoint(u'192.168.1.192', 30303, 30303)
their_endpoint = EndPoint(u'127.0.0.1', 30303, 30303)
server = PingServer(my_endpoint)
listen_thread = server.udp_listen()
listen_thread.start()
server.ping(their_endpoint)
|
<commit_before><commit_msg>Add script to run app<commit_after>
|
from discovery import Endpoint, PingNode, PingServer
my_endpoint = EndPoint(u'192.168.1.192', 30303, 30303)
their_endpoint = EndPoint(u'127.0.0.1', 30303, 30303)
server = PingServer(my_endpoint)
listen_thread = server.udp_listen()
listen_thread.start()
server.ping(their_endpoint)
|
Add script to run appfrom discovery import Endpoint, PingNode, PingServer
my_endpoint = EndPoint(u'192.168.1.192', 30303, 30303)
their_endpoint = EndPoint(u'127.0.0.1', 30303, 30303)
server = PingServer(my_endpoint)
listen_thread = server.udp_listen()
listen_thread.start()
server.ping(their_endpoint)
|
<commit_before><commit_msg>Add script to run app<commit_after>from discovery import Endpoint, PingNode, PingServer
my_endpoint = EndPoint(u'192.168.1.192', 30303, 30303)
their_endpoint = EndPoint(u'127.0.0.1', 30303, 30303)
server = PingServer(my_endpoint)
listen_thread = server.udp_listen()
listen_thread.start()
server.ping(their_endpoint)
|
|
8b9857958a4dc9286ac0e2e52f4e81909ea29728
|
scripts/cluster_build.py
|
scripts/cluster_build.py
|
#!/usr/bin/python
from cluster_config import ClusterConfig
from cluster_run import ClusterRun
class ClusterBuild:
def __init__(self, config):
self.config = config
def cd_to_code(self):
return "cd " + self.config.code_dir + "; "
def cd_to_build(self):
return "cd build/cmake; "
def destroy(self):
destroy_cmd = "rm -rf " + self.config.code_dir + ";"
ClusterRun(self.config, destroy_cmd)
def checkout(self):
checkout_cmd = "git clone git@ahoy:cbr.git " + self.config.code_dir + ";"
ClusterRun(self.config, checkout_cmd)
def update(self):
cd_cmd = self.cd_to_code()
pull_cmd = "git pull origin;"
ClusterRun(self.config, cd_cmd + pull_cmd)
def dependencies(self):
cd_cmd = self.cd_to_code()
build_cmd = "./install-deps.sh"
ClusterRun(self.config, cd_cmd + build_cmd)
def update_dependencies(self):
cd_cmd = self.cd_to_code()
update_cmd = "./install-deps.sh update"
ClusterRun(self.config, cd_cmd + update_cmd)
def build(self):
cd_cmd = self.cd_to_code() + self.cd_to_build()
build_cmd = "cmake .; make -j2"
ClusterRun(self.config, cd_cmd + build_cmd)
if __name__ == "__main__":
cc = ClusterConfig()
cluster_build = ClusterBuild(cc)
cluster_build.destroy()
cluster_build.checkout()
cluster_build.update()
cluster_build.dependencies()
cluster_build.update_dependencies()
cluster_build.build()
|
Add script to handle build tasks across all nodes. Right now running it on its own just cleans out any existing data and runs the entire build process (which takes a *long* time!). Needs to be upgraded to have a nice command line interface.
|
Add script to handle build tasks across all nodes. Right now running it on its own just cleans out any existing data and runs the entire build process (which takes a *long* time!). Needs to be upgraded to have a nice command line interface.
|
Python
|
bsd-3-clause
|
sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata
|
Add script to handle build tasks across all nodes. Right now running it on its own just cleans out any existing data and runs the entire build process (which takes a *long* time!). Needs to be upgraded to have a nice command line interface.
|
#!/usr/bin/python
from cluster_config import ClusterConfig
from cluster_run import ClusterRun
class ClusterBuild:
def __init__(self, config):
self.config = config
def cd_to_code(self):
return "cd " + self.config.code_dir + "; "
def cd_to_build(self):
return "cd build/cmake; "
def destroy(self):
destroy_cmd = "rm -rf " + self.config.code_dir + ";"
ClusterRun(self.config, destroy_cmd)
def checkout(self):
checkout_cmd = "git clone git@ahoy:cbr.git " + self.config.code_dir + ";"
ClusterRun(self.config, checkout_cmd)
def update(self):
cd_cmd = self.cd_to_code()
pull_cmd = "git pull origin;"
ClusterRun(self.config, cd_cmd + pull_cmd)
def dependencies(self):
cd_cmd = self.cd_to_code()
build_cmd = "./install-deps.sh"
ClusterRun(self.config, cd_cmd + build_cmd)
def update_dependencies(self):
cd_cmd = self.cd_to_code()
update_cmd = "./install-deps.sh update"
ClusterRun(self.config, cd_cmd + update_cmd)
def build(self):
cd_cmd = self.cd_to_code() + self.cd_to_build()
build_cmd = "cmake .; make -j2"
ClusterRun(self.config, cd_cmd + build_cmd)
if __name__ == "__main__":
cc = ClusterConfig()
cluster_build = ClusterBuild(cc)
cluster_build.destroy()
cluster_build.checkout()
cluster_build.update()
cluster_build.dependencies()
cluster_build.update_dependencies()
cluster_build.build()
|
<commit_before><commit_msg>Add script to handle build tasks across all nodes. Right now running it on its own just cleans out any existing data and runs the entire build process (which takes a *long* time!). Needs to be upgraded to have a nice command line interface.<commit_after>
|
#!/usr/bin/python
from cluster_config import ClusterConfig
from cluster_run import ClusterRun
class ClusterBuild:
def __init__(self, config):
self.config = config
def cd_to_code(self):
return "cd " + self.config.code_dir + "; "
def cd_to_build(self):
return "cd build/cmake; "
def destroy(self):
destroy_cmd = "rm -rf " + self.config.code_dir + ";"
ClusterRun(self.config, destroy_cmd)
def checkout(self):
checkout_cmd = "git clone git@ahoy:cbr.git " + self.config.code_dir + ";"
ClusterRun(self.config, checkout_cmd)
def update(self):
cd_cmd = self.cd_to_code()
pull_cmd = "git pull origin;"
ClusterRun(self.config, cd_cmd + pull_cmd)
def dependencies(self):
cd_cmd = self.cd_to_code()
build_cmd = "./install-deps.sh"
ClusterRun(self.config, cd_cmd + build_cmd)
def update_dependencies(self):
cd_cmd = self.cd_to_code()
update_cmd = "./install-deps.sh update"
ClusterRun(self.config, cd_cmd + update_cmd)
def build(self):
cd_cmd = self.cd_to_code() + self.cd_to_build()
build_cmd = "cmake .; make -j2"
ClusterRun(self.config, cd_cmd + build_cmd)
if __name__ == "__main__":
cc = ClusterConfig()
cluster_build = ClusterBuild(cc)
cluster_build.destroy()
cluster_build.checkout()
cluster_build.update()
cluster_build.dependencies()
cluster_build.update_dependencies()
cluster_build.build()
|
Add script to handle build tasks across all nodes. Right now running it on its own just cleans out any existing data and runs the entire build process (which takes a *long* time!). Needs to be upgraded to have a nice command line interface.#!/usr/bin/python
from cluster_config import ClusterConfig
from cluster_run import ClusterRun
class ClusterBuild:
def __init__(self, config):
self.config = config
def cd_to_code(self):
return "cd " + self.config.code_dir + "; "
def cd_to_build(self):
return "cd build/cmake; "
def destroy(self):
destroy_cmd = "rm -rf " + self.config.code_dir + ";"
ClusterRun(self.config, destroy_cmd)
def checkout(self):
checkout_cmd = "git clone git@ahoy:cbr.git " + self.config.code_dir + ";"
ClusterRun(self.config, checkout_cmd)
def update(self):
cd_cmd = self.cd_to_code()
pull_cmd = "git pull origin;"
ClusterRun(self.config, cd_cmd + pull_cmd)
def dependencies(self):
cd_cmd = self.cd_to_code()
build_cmd = "./install-deps.sh"
ClusterRun(self.config, cd_cmd + build_cmd)
def update_dependencies(self):
cd_cmd = self.cd_to_code()
update_cmd = "./install-deps.sh update"
ClusterRun(self.config, cd_cmd + update_cmd)
def build(self):
cd_cmd = self.cd_to_code() + self.cd_to_build()
build_cmd = "cmake .; make -j2"
ClusterRun(self.config, cd_cmd + build_cmd)
if __name__ == "__main__":
cc = ClusterConfig()
cluster_build = ClusterBuild(cc)
cluster_build.destroy()
cluster_build.checkout()
cluster_build.update()
cluster_build.dependencies()
cluster_build.update_dependencies()
cluster_build.build()
|
<commit_before><commit_msg>Add script to handle build tasks across all nodes. Right now running it on its own just cleans out any existing data and runs the entire build process (which takes a *long* time!). Needs to be upgraded to have a nice command line interface.<commit_after>#!/usr/bin/python
from cluster_config import ClusterConfig
from cluster_run import ClusterRun
class ClusterBuild:
def __init__(self, config):
self.config = config
def cd_to_code(self):
return "cd " + self.config.code_dir + "; "
def cd_to_build(self):
return "cd build/cmake; "
def destroy(self):
destroy_cmd = "rm -rf " + self.config.code_dir + ";"
ClusterRun(self.config, destroy_cmd)
def checkout(self):
checkout_cmd = "git clone git@ahoy:cbr.git " + self.config.code_dir + ";"
ClusterRun(self.config, checkout_cmd)
def update(self):
cd_cmd = self.cd_to_code()
pull_cmd = "git pull origin;"
ClusterRun(self.config, cd_cmd + pull_cmd)
def dependencies(self):
cd_cmd = self.cd_to_code()
build_cmd = "./install-deps.sh"
ClusterRun(self.config, cd_cmd + build_cmd)
def update_dependencies(self):
cd_cmd = self.cd_to_code()
update_cmd = "./install-deps.sh update"
ClusterRun(self.config, cd_cmd + update_cmd)
def build(self):
cd_cmd = self.cd_to_code() + self.cd_to_build()
build_cmd = "cmake .; make -j2"
ClusterRun(self.config, cd_cmd + build_cmd)
if __name__ == "__main__":
cc = ClusterConfig()
cluster_build = ClusterBuild(cc)
cluster_build.destroy()
cluster_build.checkout()
cluster_build.update()
cluster_build.dependencies()
cluster_build.update_dependencies()
cluster_build.build()
|
|
9232cafe92c794c3cbfa2445a790d7e66b258691
|
scripts/analytics/file_sizes.py
|
scripts/analytics/file_sizes.py
|
# -*- coding: utf-8 -*-
"""
Summarize distribution of file sizes in OSF git repos.
"""
from __future__ import division
import os
import numpy as np
import tabulate
from website import settings
def walk_collect(path, func):
sizes = []
for root, dirs, files in os.walk(path):
try:
dirs.remove('.git')
except ValueError:
pass
sizes.extend([
func(root, file)
for file in files
])
return sizes
def size_helper(root, file):
return root, file, os.stat(os.path.join(root, file)).st_size
def size_percentiles():
sizes = walk_collect(settings.UPLOADS_PATH, size_helper)
cutoffs = range(2, 102, 2)
percentiles = np.percentile(
[size[-1] / 1024 / 1024 for size in sizes],
cutoffs,
)
return tabulate.tabulate(
zip(cutoffs, percentiles),
headers=['Percentile', 'Size (MiB)'],
)
if __name__ == '__main__':
print(size_percentiles())
|
Add script to summarize file size distribution.
|
Add script to summarize file size distribution.
See example at
https://github.com/CenterForOpenScience/openscienceframework.org/issues/871
|
Python
|
apache-2.0
|
pattisdr/osf.io,monikagrabowska/osf.io,petermalcolm/osf.io,zkraime/osf.io,KAsante95/osf.io,emetsger/osf.io,pattisdr/osf.io,doublebits/osf.io,baylee-d/osf.io,KAsante95/osf.io,TomHeatwole/osf.io,abought/osf.io,Johnetordoff/osf.io,GageGaskins/osf.io,hmoco/osf.io,felliott/osf.io,cldershem/osf.io,saradbowman/osf.io,jnayak1/osf.io,barbour-em/osf.io,caneruguz/osf.io,fabianvf/osf.io,kch8qx/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,TomBaxter/osf.io,reinaH/osf.io,revanthkolli/osf.io,SSJohns/osf.io,lyndsysimon/osf.io,RomanZWang/osf.io,laurenrevere/osf.io,kushG/osf.io,brandonPurvis/osf.io,zkraime/osf.io,KAsante95/osf.io,cosenal/osf.io,asanfilippo7/osf.io,laurenrevere/osf.io,wearpants/osf.io,fabianvf/osf.io,monikagrabowska/osf.io,revanthkolli/osf.io,icereval/osf.io,AndrewSallans/osf.io,samanehsan/osf.io,ZobairAlijan/osf.io,jolene-esposito/osf.io,asanfilippo7/osf.io,HalcyonChimera/osf.io,samchrisinger/osf.io,kwierman/osf.io,wearpants/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,mattclark/osf.io,himanshuo/osf.io,binoculars/osf.io,chrisseto/osf.io,emetsger/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,amyshi188/osf.io,kch8qx/osf.io,abought/osf.io,MerlinZhang/osf.io,haoyuchen1992/osf.io,mfraezz/osf.io,icereval/osf.io,caneruguz/osf.io,mluo613/osf.io,acshi/osf.io,amyshi188/osf.io,mluke93/osf.io,baylee-d/osf.io,mfraezz/osf.io,cosenal/osf.io,reinaH/osf.io,wearpants/osf.io,mluo613/osf.io,zachjanicki/osf.io,himanshuo/osf.io,ticklemepierce/osf.io,cwisecarver/osf.io,emetsger/osf.io,dplorimer/osf,adlius/osf.io,sbt9uc/osf.io,RomanZWang/osf.io,sbt9uc/osf.io,kch8qx/osf.io,arpitar/osf.io,KAsante95/osf.io,sloria/osf.io,kushG/osf.io,jeffreyliu3230/osf.io,ZobairAlijan/osf.io,erinspace/osf.io,acshi/osf.io,samanehsan/osf.io,dplorimer/osf,zamattiac/osf.io,cslzchen/osf.io,revanthkolli/osf.io,CenterForOpenScience/osf.io,arpitar/osf.io,rdhyee/osf.io,brandonPurvis/osf.io,danielneis/osf.io,HalcyonChimera/osf.io,barbour-em/osf.io,sbt9uc/osf.io,dplorimer/osf,TomBaxter/osf.io,sbt9uc/osf.io,GaryKriebel/osf.io,cwisecarver/osf.io,zamattiac/osf.io,rdhyee/osf.io,HalcyonChimera/osf.io,billyhunt/osf.io,zachjanicki/osf.io,jnayak1/osf.io,brianjgeiger/osf.io,alexschiller/osf.io,wearpants/osf.io,bdyetton/prettychart,jeffreyliu3230/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,njantrania/osf.io,ticklemepierce/osf.io,cwisecarver/osf.io,binoculars/osf.io,caseyrygt/osf.io,fabianvf/osf.io,njantrania/osf.io,samanehsan/osf.io,Nesiehr/osf.io,jolene-esposito/osf.io,kwierman/osf.io,ZobairAlijan/osf.io,petermalcolm/osf.io,Nesiehr/osf.io,jmcarp/osf.io,mfraezz/osf.io,haoyuchen1992/osf.io,aaxelb/osf.io,MerlinZhang/osf.io,Johnetordoff/osf.io,barbour-em/osf.io,bdyetton/prettychart,samchrisinger/osf.io,aaxelb/osf.io,HarryRybacki/osf.io,brandonPurvis/osf.io,acshi/osf.io,billyhunt/osf.io,ckc6cz/osf.io,TomHeatwole/osf.io,billyhunt/osf.io,caseyrygt/osf.io,brianjgeiger/osf.io,adlius/osf.io,ckc6cz/osf.io,AndrewSallans/osf.io,lyndsysimon/osf.io,KAsante95/osf.io,brianjgeiger/osf.io,brandonPurvis/osf.io,cldershem/osf.io,Ghalko/osf.io,TomHeatwole/osf.io,brandonPurvis/osf.io,RomanZWang/osf.io,felliott/osf.io,sloria/osf.io,RomanZWang/osf.io,acshi/osf.io,Ghalko/osf.io,mattclark/osf.io,GaryKriebel/osf.io,rdhyee/osf.io,doublebits/osf.io,arpitar/osf.io,samchrisinger/osf.io,danielneis/osf.io,petermalcolm/osf.io,zachjanicki/osf.io,alexschiller/osf.io,zamattiac/osf.io,Nesiehr/osf.io,ticklemepierce/osf.io,icereval/osf.io,mattclark/osf.io,TomBaxter/osf.io,cslzchen/osf.io,haoyuchen1992/osf.io,lamdnhan/osf.io,jolene-esposito/osf.io,doublebits/osf.io,reinaH/osf.io,jmcarp/osf.io,crcresearch/osf.io,jolene-esposito/osf.io,aaxelb/osf.io,adlius/osf.io,leb2dg/osf.io,GageGaskins/osf.io,alexschiller/osf.io,GaryKriebel/osf.io,monikagrabowska/osf.io,kwierman/osf.io,himanshuo/osf.io,Ghalko/osf.io,chennan47/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,Nesiehr/osf.io,jmcarp/osf.io,revanthkolli/osf.io,jinluyuan/osf.io,emetsger/osf.io,lamdnhan/osf.io,pattisdr/osf.io,cslzchen/osf.io,mluke93/osf.io,Ghalko/osf.io,mluo613/osf.io,felliott/osf.io,billyhunt/osf.io,himanshuo/osf.io,rdhyee/osf.io,MerlinZhang/osf.io,GageGaskins/osf.io,chrisseto/osf.io,cosenal/osf.io,leb2dg/osf.io,zamattiac/osf.io,felliott/osf.io,DanielSBrown/osf.io,mluo613/osf.io,jeffreyliu3230/osf.io,HarryRybacki/osf.io,mluke93/osf.io,binoculars/osf.io,zkraime/osf.io,bdyetton/prettychart,erinspace/osf.io,kch8qx/osf.io,bdyetton/prettychart,jeffreyliu3230/osf.io,caseyrollins/osf.io,HarryRybacki/osf.io,GaryKriebel/osf.io,jinluyuan/osf.io,petermalcolm/osf.io,dplorimer/osf,haoyuchen1992/osf.io,CenterForOpenScience/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,doublebits/osf.io,GageGaskins/osf.io,adlius/osf.io,leb2dg/osf.io,acshi/osf.io,sloria/osf.io,jnayak1/osf.io,danielneis/osf.io,amyshi188/osf.io,caseyrollins/osf.io,caseyrollins/osf.io,SSJohns/osf.io,cldershem/osf.io,hmoco/osf.io,reinaH/osf.io,danielneis/osf.io,zkraime/osf.io,lamdnhan/osf.io,baylee-d/osf.io,njantrania/osf.io,jnayak1/osf.io,alexschiller/osf.io,hmoco/osf.io,mluke93/osf.io,chennan47/osf.io,lamdnhan/osf.io,njantrania/osf.io,caseyrygt/osf.io,barbour-em/osf.io,ckc6cz/osf.io,asanfilippo7/osf.io,mluo613/osf.io,aaxelb/osf.io,erinspace/osf.io,jinluyuan/osf.io,cldershem/osf.io,saradbowman/osf.io,fabianvf/osf.io,leb2dg/osf.io,arpitar/osf.io,cosenal/osf.io,ZobairAlijan/osf.io,caseyrygt/osf.io,kushG/osf.io,lyndsysimon/osf.io,crcresearch/osf.io,samchrisinger/osf.io,kch8qx/osf.io,ckc6cz/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,chrisseto/osf.io,DanielSBrown/osf.io,kwierman/osf.io,lyndsysimon/osf.io,cwisecarver/osf.io,alexschiller/osf.io,jinluyuan/osf.io,chennan47/osf.io,caneruguz/osf.io,asanfilippo7/osf.io,SSJohns/osf.io,SSJohns/osf.io,HarryRybacki/osf.io,billyhunt/osf.io,MerlinZhang/osf.io,Johnetordoff/osf.io,crcresearch/osf.io,hmoco/osf.io,zachjanicki/osf.io,samanehsan/osf.io,abought/osf.io,DanielSBrown/osf.io,jmcarp/osf.io,doublebits/osf.io,TomHeatwole/osf.io,kushG/osf.io,abought/osf.io,amyshi188/osf.io,ticklemepierce/osf.io,DanielSBrown/osf.io
|
Add script to summarize file size distribution.
See example at
https://github.com/CenterForOpenScience/openscienceframework.org/issues/871
|
# -*- coding: utf-8 -*-
"""
Summarize distribution of file sizes in OSF git repos.
"""
from __future__ import division
import os
import numpy as np
import tabulate
from website import settings
def walk_collect(path, func):
sizes = []
for root, dirs, files in os.walk(path):
try:
dirs.remove('.git')
except ValueError:
pass
sizes.extend([
func(root, file)
for file in files
])
return sizes
def size_helper(root, file):
return root, file, os.stat(os.path.join(root, file)).st_size
def size_percentiles():
sizes = walk_collect(settings.UPLOADS_PATH, size_helper)
cutoffs = range(2, 102, 2)
percentiles = np.percentile(
[size[-1] / 1024 / 1024 for size in sizes],
cutoffs,
)
return tabulate.tabulate(
zip(cutoffs, percentiles),
headers=['Percentile', 'Size (MiB)'],
)
if __name__ == '__main__':
print(size_percentiles())
|
<commit_before><commit_msg>Add script to summarize file size distribution.
See example at
https://github.com/CenterForOpenScience/openscienceframework.org/issues/871<commit_after>
|
# -*- coding: utf-8 -*-
"""
Summarize distribution of file sizes in OSF git repos.
"""
from __future__ import division
import os
import numpy as np
import tabulate
from website import settings
def walk_collect(path, func):
sizes = []
for root, dirs, files in os.walk(path):
try:
dirs.remove('.git')
except ValueError:
pass
sizes.extend([
func(root, file)
for file in files
])
return sizes
def size_helper(root, file):
return root, file, os.stat(os.path.join(root, file)).st_size
def size_percentiles():
sizes = walk_collect(settings.UPLOADS_PATH, size_helper)
cutoffs = range(2, 102, 2)
percentiles = np.percentile(
[size[-1] / 1024 / 1024 for size in sizes],
cutoffs,
)
return tabulate.tabulate(
zip(cutoffs, percentiles),
headers=['Percentile', 'Size (MiB)'],
)
if __name__ == '__main__':
print(size_percentiles())
|
Add script to summarize file size distribution.
See example at
https://github.com/CenterForOpenScience/openscienceframework.org/issues/871# -*- coding: utf-8 -*-
"""
Summarize distribution of file sizes in OSF git repos.
"""
from __future__ import division
import os
import numpy as np
import tabulate
from website import settings
def walk_collect(path, func):
sizes = []
for root, dirs, files in os.walk(path):
try:
dirs.remove('.git')
except ValueError:
pass
sizes.extend([
func(root, file)
for file in files
])
return sizes
def size_helper(root, file):
return root, file, os.stat(os.path.join(root, file)).st_size
def size_percentiles():
sizes = walk_collect(settings.UPLOADS_PATH, size_helper)
cutoffs = range(2, 102, 2)
percentiles = np.percentile(
[size[-1] / 1024 / 1024 for size in sizes],
cutoffs,
)
return tabulate.tabulate(
zip(cutoffs, percentiles),
headers=['Percentile', 'Size (MiB)'],
)
if __name__ == '__main__':
print(size_percentiles())
|
<commit_before><commit_msg>Add script to summarize file size distribution.
See example at
https://github.com/CenterForOpenScience/openscienceframework.org/issues/871<commit_after># -*- coding: utf-8 -*-
"""
Summarize distribution of file sizes in OSF git repos.
"""
from __future__ import division
import os
import numpy as np
import tabulate
from website import settings
def walk_collect(path, func):
sizes = []
for root, dirs, files in os.walk(path):
try:
dirs.remove('.git')
except ValueError:
pass
sizes.extend([
func(root, file)
for file in files
])
return sizes
def size_helper(root, file):
return root, file, os.stat(os.path.join(root, file)).st_size
def size_percentiles():
sizes = walk_collect(settings.UPLOADS_PATH, size_helper)
cutoffs = range(2, 102, 2)
percentiles = np.percentile(
[size[-1] / 1024 / 1024 for size in sizes],
cutoffs,
)
return tabulate.tabulate(
zip(cutoffs, percentiles),
headers=['Percentile', 'Size (MiB)'],
)
if __name__ == '__main__':
print(size_percentiles())
|
|
43b3d92b3f2cd810a46a59b5769248280b6c63a9
|
main.py
|
main.py
|
import os
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice
def get_MainActivity(package_name):
return package_name + 'MainActivity'
def install_package(package_path, device):
if device.installPackage(package_path):
return True
else:
return False
def install_packages(device, directory='apk'):
apks = os.listdir(directory)
installation_results = []
for apk in apks:
package_path = os.getcwd() + '/' + directory + '/' + apk
print package_path
result = install_package(package_path, device)
installation_results.append(result)
return installation_results
def main():
timeout = 10000
device = MonkeyRunner.waitForConnection(timeout)
directory = 'apk'
packages = install_packages(device, directory=directory)
print packages
if __name__=='__main__':
main()
|
Install packages on android device
|
Install packages on android device
|
Python
|
mit
|
jonmetz/AndroFuzz
|
Install packages on android device
|
import os
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice
def get_MainActivity(package_name):
return package_name + 'MainActivity'
def install_package(package_path, device):
if device.installPackage(package_path):
return True
else:
return False
def install_packages(device, directory='apk'):
apks = os.listdir(directory)
installation_results = []
for apk in apks:
package_path = os.getcwd() + '/' + directory + '/' + apk
print package_path
result = install_package(package_path, device)
installation_results.append(result)
return installation_results
def main():
timeout = 10000
device = MonkeyRunner.waitForConnection(timeout)
directory = 'apk'
packages = install_packages(device, directory=directory)
print packages
if __name__=='__main__':
main()
|
<commit_before><commit_msg>Install packages on android device<commit_after>
|
import os
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice
def get_MainActivity(package_name):
return package_name + 'MainActivity'
def install_package(package_path, device):
if device.installPackage(package_path):
return True
else:
return False
def install_packages(device, directory='apk'):
apks = os.listdir(directory)
installation_results = []
for apk in apks:
package_path = os.getcwd() + '/' + directory + '/' + apk
print package_path
result = install_package(package_path, device)
installation_results.append(result)
return installation_results
def main():
timeout = 10000
device = MonkeyRunner.waitForConnection(timeout)
directory = 'apk'
packages = install_packages(device, directory=directory)
print packages
if __name__=='__main__':
main()
|
Install packages on android deviceimport os
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice
def get_MainActivity(package_name):
return package_name + 'MainActivity'
def install_package(package_path, device):
if device.installPackage(package_path):
return True
else:
return False
def install_packages(device, directory='apk'):
apks = os.listdir(directory)
installation_results = []
for apk in apks:
package_path = os.getcwd() + '/' + directory + '/' + apk
print package_path
result = install_package(package_path, device)
installation_results.append(result)
return installation_results
def main():
timeout = 10000
device = MonkeyRunner.waitForConnection(timeout)
directory = 'apk'
packages = install_packages(device, directory=directory)
print packages
if __name__=='__main__':
main()
|
<commit_before><commit_msg>Install packages on android device<commit_after>import os
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice
def get_MainActivity(package_name):
return package_name + 'MainActivity'
def install_package(package_path, device):
if device.installPackage(package_path):
return True
else:
return False
def install_packages(device, directory='apk'):
apks = os.listdir(directory)
installation_results = []
for apk in apks:
package_path = os.getcwd() + '/' + directory + '/' + apk
print package_path
result = install_package(package_path, device)
installation_results.append(result)
return installation_results
def main():
timeout = 10000
device = MonkeyRunner.waitForConnection(timeout)
directory = 'apk'
packages = install_packages(device, directory=directory)
print packages
if __name__=='__main__':
main()
|
|
16b3dc1f8c762a751e1476d679391f3bbc82cd5d
|
python-prefix.py
|
python-prefix.py
|
#!/usr/bin/env python
import sys
import os.path
import site
def main():
'''\
Check if the given prefix is included in sys.path for the given
python version; if not find an alternate valid prefix. Print the
result to standard out.
'''
if len(sys.argv) != 3:
msg = 'usage: %s <prefix> <python version>\n' % \
os.path.basename(sys.argv[0])
sys.stderr.write(msg)
return 1
python_prefix = sys.argv[1]
python_version = sys.argv[2]
path = '%s/lib/python%s' % (python_prefix, python_version)
path = os.path.normpath(path)
if path[-1] != '/':
path = path + '/'
prefix = None
for p in sys.path:
if p.startswith(path):
prefix = path
break
if not prefix:
prefix = site.PREFIXES[-1]
sys.stdout.write('%s\n' % prefix)
return 0
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
import sys
import os.path
import site
def main():
'''\
Check if the given prefix is included in sys.path for the given
python version; if not find an alternate valid prefix. Print the
result to standard out.
'''
if len(sys.argv) != 3:
msg = 'usage: %s <prefix> <python version>\n' % \
os.path.basename(sys.argv[0])
sys.stderr.write(msg)
return 1
python_prefix = sys.argv[1]
python_version = sys.argv[2]
path = '%s/lib/python%s' % (python_prefix, python_version)
path = os.path.normpath(path)
if path[-1] != '/':
path = path + '/'
prefix = None
for p in sys.path:
if p.startswith(path):
prefix = python_prefix
break
if not prefix:
prefix = site.PREFIXES[-1]
sys.stdout.write('%s\n' % prefix)
return 0
if __name__ == '__main__':
sys.exit(main())
|
Fix typo in previous commit.
|
Fix typo in previous commit.
|
Python
|
bsd-2-clause
|
marek-sezemsky/coreemu,tectronics/coreemu,marek-sezemsky/coreemu,guidotack/coreemu,guidotack/coreemu,tectronics/coreemu,tectronics/coreemu,gregtampa/coreemu,guidotack/coreemu,gregtampa/coreemu,marek-sezemsky/coreemu,gregtampa/coreemu
|
#!/usr/bin/env python
import sys
import os.path
import site
def main():
'''\
Check if the given prefix is included in sys.path for the given
python version; if not find an alternate valid prefix. Print the
result to standard out.
'''
if len(sys.argv) != 3:
msg = 'usage: %s <prefix> <python version>\n' % \
os.path.basename(sys.argv[0])
sys.stderr.write(msg)
return 1
python_prefix = sys.argv[1]
python_version = sys.argv[2]
path = '%s/lib/python%s' % (python_prefix, python_version)
path = os.path.normpath(path)
if path[-1] != '/':
path = path + '/'
prefix = None
for p in sys.path:
if p.startswith(path):
prefix = path
break
if not prefix:
prefix = site.PREFIXES[-1]
sys.stdout.write('%s\n' % prefix)
return 0
if __name__ == '__main__':
sys.exit(main())
Fix typo in previous commit.
|
#!/usr/bin/env python
import sys
import os.path
import site
def main():
'''\
Check if the given prefix is included in sys.path for the given
python version; if not find an alternate valid prefix. Print the
result to standard out.
'''
if len(sys.argv) != 3:
msg = 'usage: %s <prefix> <python version>\n' % \
os.path.basename(sys.argv[0])
sys.stderr.write(msg)
return 1
python_prefix = sys.argv[1]
python_version = sys.argv[2]
path = '%s/lib/python%s' % (python_prefix, python_version)
path = os.path.normpath(path)
if path[-1] != '/':
path = path + '/'
prefix = None
for p in sys.path:
if p.startswith(path):
prefix = python_prefix
break
if not prefix:
prefix = site.PREFIXES[-1]
sys.stdout.write('%s\n' % prefix)
return 0
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
import sys
import os.path
import site
def main():
'''\
Check if the given prefix is included in sys.path for the given
python version; if not find an alternate valid prefix. Print the
result to standard out.
'''
if len(sys.argv) != 3:
msg = 'usage: %s <prefix> <python version>\n' % \
os.path.basename(sys.argv[0])
sys.stderr.write(msg)
return 1
python_prefix = sys.argv[1]
python_version = sys.argv[2]
path = '%s/lib/python%s' % (python_prefix, python_version)
path = os.path.normpath(path)
if path[-1] != '/':
path = path + '/'
prefix = None
for p in sys.path:
if p.startswith(path):
prefix = path
break
if not prefix:
prefix = site.PREFIXES[-1]
sys.stdout.write('%s\n' % prefix)
return 0
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Fix typo in previous commit.<commit_after>
|
#!/usr/bin/env python
import sys
import os.path
import site
def main():
'''\
Check if the given prefix is included in sys.path for the given
python version; if not find an alternate valid prefix. Print the
result to standard out.
'''
if len(sys.argv) != 3:
msg = 'usage: %s <prefix> <python version>\n' % \
os.path.basename(sys.argv[0])
sys.stderr.write(msg)
return 1
python_prefix = sys.argv[1]
python_version = sys.argv[2]
path = '%s/lib/python%s' % (python_prefix, python_version)
path = os.path.normpath(path)
if path[-1] != '/':
path = path + '/'
prefix = None
for p in sys.path:
if p.startswith(path):
prefix = python_prefix
break
if not prefix:
prefix = site.PREFIXES[-1]
sys.stdout.write('%s\n' % prefix)
return 0
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
import sys
import os.path
import site
def main():
'''\
Check if the given prefix is included in sys.path for the given
python version; if not find an alternate valid prefix. Print the
result to standard out.
'''
if len(sys.argv) != 3:
msg = 'usage: %s <prefix> <python version>\n' % \
os.path.basename(sys.argv[0])
sys.stderr.write(msg)
return 1
python_prefix = sys.argv[1]
python_version = sys.argv[2]
path = '%s/lib/python%s' % (python_prefix, python_version)
path = os.path.normpath(path)
if path[-1] != '/':
path = path + '/'
prefix = None
for p in sys.path:
if p.startswith(path):
prefix = path
break
if not prefix:
prefix = site.PREFIXES[-1]
sys.stdout.write('%s\n' % prefix)
return 0
if __name__ == '__main__':
sys.exit(main())
Fix typo in previous commit.#!/usr/bin/env python
import sys
import os.path
import site
def main():
'''\
Check if the given prefix is included in sys.path for the given
python version; if not find an alternate valid prefix. Print the
result to standard out.
'''
if len(sys.argv) != 3:
msg = 'usage: %s <prefix> <python version>\n' % \
os.path.basename(sys.argv[0])
sys.stderr.write(msg)
return 1
python_prefix = sys.argv[1]
python_version = sys.argv[2]
path = '%s/lib/python%s' % (python_prefix, python_version)
path = os.path.normpath(path)
if path[-1] != '/':
path = path + '/'
prefix = None
for p in sys.path:
if p.startswith(path):
prefix = python_prefix
break
if not prefix:
prefix = site.PREFIXES[-1]
sys.stdout.write('%s\n' % prefix)
return 0
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
import sys
import os.path
import site
def main():
'''\
Check if the given prefix is included in sys.path for the given
python version; if not find an alternate valid prefix. Print the
result to standard out.
'''
if len(sys.argv) != 3:
msg = 'usage: %s <prefix> <python version>\n' % \
os.path.basename(sys.argv[0])
sys.stderr.write(msg)
return 1
python_prefix = sys.argv[1]
python_version = sys.argv[2]
path = '%s/lib/python%s' % (python_prefix, python_version)
path = os.path.normpath(path)
if path[-1] != '/':
path = path + '/'
prefix = None
for p in sys.path:
if p.startswith(path):
prefix = path
break
if not prefix:
prefix = site.PREFIXES[-1]
sys.stdout.write('%s\n' % prefix)
return 0
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Fix typo in previous commit.<commit_after>#!/usr/bin/env python
import sys
import os.path
import site
def main():
'''\
Check if the given prefix is included in sys.path for the given
python version; if not find an alternate valid prefix. Print the
result to standard out.
'''
if len(sys.argv) != 3:
msg = 'usage: %s <prefix> <python version>\n' % \
os.path.basename(sys.argv[0])
sys.stderr.write(msg)
return 1
python_prefix = sys.argv[1]
python_version = sys.argv[2]
path = '%s/lib/python%s' % (python_prefix, python_version)
path = os.path.normpath(path)
if path[-1] != '/':
path = path + '/'
prefix = None
for p in sys.path:
if p.startswith(path):
prefix = python_prefix
break
if not prefix:
prefix = site.PREFIXES[-1]
sys.stdout.write('%s\n' % prefix)
return 0
if __name__ == '__main__':
sys.exit(main())
|
844c6f18d06dcb397db76436e5f4b8ddcb1beddc
|
py/path-with-maximum-gold.py
|
py/path-with-maximum-gold.py
|
class Solution(object):
def getMaximumGold(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
self.grid = grid
self.rows = len(grid)
if self.rows:
self.cols = len(grid[0])
ans = 0
for x in range(self.rows):
for y in range(self.cols):
ans = max(ans, self.dfs(x, y))
return ans
def dfs(self, x, y):
if not (0 <= x < self.rows and 0 <= y < self.cols):
return 0
else:
gold = self.grid[x][y]
if gold == 0:
return 0
self.grid[x][y] = 0
ans = 0
for dx, dy in [(-1, 0), (1, 0), (0, -1), (0, 1)]:
nxt = self.dfs(x + dx, y + dy)
if nxt > ans:
ans = nxt
self.grid[x][y] = gold
return ans + gold
|
Add py solution for 1219. Path with Maximum Gold
|
Add py solution for 1219. Path with Maximum Gold
1219. Path with Maximum Gold: https://leetcode.com/problems/path-with-maximum-gold/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 1219. Path with Maximum Gold
1219. Path with Maximum Gold: https://leetcode.com/problems/path-with-maximum-gold/
|
class Solution(object):
def getMaximumGold(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
self.grid = grid
self.rows = len(grid)
if self.rows:
self.cols = len(grid[0])
ans = 0
for x in range(self.rows):
for y in range(self.cols):
ans = max(ans, self.dfs(x, y))
return ans
def dfs(self, x, y):
if not (0 <= x < self.rows and 0 <= y < self.cols):
return 0
else:
gold = self.grid[x][y]
if gold == 0:
return 0
self.grid[x][y] = 0
ans = 0
for dx, dy in [(-1, 0), (1, 0), (0, -1), (0, 1)]:
nxt = self.dfs(x + dx, y + dy)
if nxt > ans:
ans = nxt
self.grid[x][y] = gold
return ans + gold
|
<commit_before><commit_msg>Add py solution for 1219. Path with Maximum Gold
1219. Path with Maximum Gold: https://leetcode.com/problems/path-with-maximum-gold/<commit_after>
|
class Solution(object):
def getMaximumGold(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
self.grid = grid
self.rows = len(grid)
if self.rows:
self.cols = len(grid[0])
ans = 0
for x in range(self.rows):
for y in range(self.cols):
ans = max(ans, self.dfs(x, y))
return ans
def dfs(self, x, y):
if not (0 <= x < self.rows and 0 <= y < self.cols):
return 0
else:
gold = self.grid[x][y]
if gold == 0:
return 0
self.grid[x][y] = 0
ans = 0
for dx, dy in [(-1, 0), (1, 0), (0, -1), (0, 1)]:
nxt = self.dfs(x + dx, y + dy)
if nxt > ans:
ans = nxt
self.grid[x][y] = gold
return ans + gold
|
Add py solution for 1219. Path with Maximum Gold
1219. Path with Maximum Gold: https://leetcode.com/problems/path-with-maximum-gold/class Solution(object):
def getMaximumGold(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
self.grid = grid
self.rows = len(grid)
if self.rows:
self.cols = len(grid[0])
ans = 0
for x in range(self.rows):
for y in range(self.cols):
ans = max(ans, self.dfs(x, y))
return ans
def dfs(self, x, y):
if not (0 <= x < self.rows and 0 <= y < self.cols):
return 0
else:
gold = self.grid[x][y]
if gold == 0:
return 0
self.grid[x][y] = 0
ans = 0
for dx, dy in [(-1, 0), (1, 0), (0, -1), (0, 1)]:
nxt = self.dfs(x + dx, y + dy)
if nxt > ans:
ans = nxt
self.grid[x][y] = gold
return ans + gold
|
<commit_before><commit_msg>Add py solution for 1219. Path with Maximum Gold
1219. Path with Maximum Gold: https://leetcode.com/problems/path-with-maximum-gold/<commit_after>class Solution(object):
def getMaximumGold(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
self.grid = grid
self.rows = len(grid)
if self.rows:
self.cols = len(grid[0])
ans = 0
for x in range(self.rows):
for y in range(self.cols):
ans = max(ans, self.dfs(x, y))
return ans
def dfs(self, x, y):
if not (0 <= x < self.rows and 0 <= y < self.cols):
return 0
else:
gold = self.grid[x][y]
if gold == 0:
return 0
self.grid[x][y] = 0
ans = 0
for dx, dy in [(-1, 0), (1, 0), (0, -1), (0, 1)]:
nxt = self.dfs(x + dx, y + dy)
if nxt > ans:
ans = nxt
self.grid[x][y] = gold
return ans + gold
|
|
d552028fcdbe1bd3b27efb7676980abe597ed40e
|
py/readImagesSamplesSplit.py
|
py/readImagesSamplesSplit.py
|
from __future__ import print_function
import numpy as np
from six.moves import cPickle as pickle
import argparse
import os
parser = argparse.ArgumentParser()
parser.add_argument('--pickle', help='Pickle file, check the script readImages to generate this file.', required=True)
args = parser.parse_args()
pickle_file = args.pickle
print("Reading:", pickle_file)
f = open(pickle_file, 'rb')
data = pickle.load(f)
train_dataset = data["train_dataset"]
train_labels = data["train_labels"]
valid_dataset = data["valid_dataset"]
valid_labels = data["valid_labels"]
test_dataset = data["test_dataset"]
test_labels = data["test_labels"]
img_head = data["img_head"]
img_head_label = None
if "img_head_label" in data:
img_head_label = data["img_head_label"]
datasplit = {}
datasplit["train_dataset"] = train_dataset
datasplit["train_labels"] = train_labels
datasplit["img_head"] = img_head
if img_head_label is not None:
datasplit["img_head_label"] = img_head_label
print("Split train...")
f = open(os.path.splitext(pickle_file)[0] + "_train.pickle", 'wb')
try:
pickle.dump(datasplit, f, pickle.HIGHEST_PROTOCOL)
f.close()
except Exception as e:
f.close()
print("Can't save compressed file, trying without compression")
f = open(os.path.splitext(pickle_file)[0] + "_train.pickle", 'wb')
pickle.dump(datasplit, f)
f.close()
print("Split valid and test...")
datasplit = {}
datasplit["valid_dataset"] = valid_dataset
datasplit["valid_labels"] = valid_labels
datasplit["test_dataset"] = test_dataset
datasplit["test_labels"] = test_labels
datasplit["img_head"] = img_head
if img_head_label is not None:
datasplit["img_head_label"] = img_head_label
f = open(os.path.splitext(pickle_file)[0] + "_valid.pickle", 'wb')
try:
pickle.dump(datasplit, f, pickle.HIGHEST_PROTOCOL)
f.close()
except Exception as e:
f.close()
print("Can't save compressed file, trying without compression")
f = open(os.path.splitext(pickle_file)[0] + "_valid.pickle", 'wb')
pickle.dump(datasplit, f)
f.close()
|
Add script to split the data in the pickle files.
|
ENH: Add script to split the data in the pickle files.
|
Python
|
apache-2.0
|
juanprietob/ExtractMSLesion,juanprietob/ExtractMSLesion,juanprietob/ExtractMSLesion,juanprietob/ExtractMSLesion,juanprietob/ExtractMSLesion
|
ENH: Add script to split the data in the pickle files.
|
from __future__ import print_function
import numpy as np
from six.moves import cPickle as pickle
import argparse
import os
parser = argparse.ArgumentParser()
parser.add_argument('--pickle', help='Pickle file, check the script readImages to generate this file.', required=True)
args = parser.parse_args()
pickle_file = args.pickle
print("Reading:", pickle_file)
f = open(pickle_file, 'rb')
data = pickle.load(f)
train_dataset = data["train_dataset"]
train_labels = data["train_labels"]
valid_dataset = data["valid_dataset"]
valid_labels = data["valid_labels"]
test_dataset = data["test_dataset"]
test_labels = data["test_labels"]
img_head = data["img_head"]
img_head_label = None
if "img_head_label" in data:
img_head_label = data["img_head_label"]
datasplit = {}
datasplit["train_dataset"] = train_dataset
datasplit["train_labels"] = train_labels
datasplit["img_head"] = img_head
if img_head_label is not None:
datasplit["img_head_label"] = img_head_label
print("Split train...")
f = open(os.path.splitext(pickle_file)[0] + "_train.pickle", 'wb')
try:
pickle.dump(datasplit, f, pickle.HIGHEST_PROTOCOL)
f.close()
except Exception as e:
f.close()
print("Can't save compressed file, trying without compression")
f = open(os.path.splitext(pickle_file)[0] + "_train.pickle", 'wb')
pickle.dump(datasplit, f)
f.close()
print("Split valid and test...")
datasplit = {}
datasplit["valid_dataset"] = valid_dataset
datasplit["valid_labels"] = valid_labels
datasplit["test_dataset"] = test_dataset
datasplit["test_labels"] = test_labels
datasplit["img_head"] = img_head
if img_head_label is not None:
datasplit["img_head_label"] = img_head_label
f = open(os.path.splitext(pickle_file)[0] + "_valid.pickle", 'wb')
try:
pickle.dump(datasplit, f, pickle.HIGHEST_PROTOCOL)
f.close()
except Exception as e:
f.close()
print("Can't save compressed file, trying without compression")
f = open(os.path.splitext(pickle_file)[0] + "_valid.pickle", 'wb')
pickle.dump(datasplit, f)
f.close()
|
<commit_before><commit_msg>ENH: Add script to split the data in the pickle files.<commit_after>
|
from __future__ import print_function
import numpy as np
from six.moves import cPickle as pickle
import argparse
import os
parser = argparse.ArgumentParser()
parser.add_argument('--pickle', help='Pickle file, check the script readImages to generate this file.', required=True)
args = parser.parse_args()
pickle_file = args.pickle
print("Reading:", pickle_file)
f = open(pickle_file, 'rb')
data = pickle.load(f)
train_dataset = data["train_dataset"]
train_labels = data["train_labels"]
valid_dataset = data["valid_dataset"]
valid_labels = data["valid_labels"]
test_dataset = data["test_dataset"]
test_labels = data["test_labels"]
img_head = data["img_head"]
img_head_label = None
if "img_head_label" in data:
img_head_label = data["img_head_label"]
datasplit = {}
datasplit["train_dataset"] = train_dataset
datasplit["train_labels"] = train_labels
datasplit["img_head"] = img_head
if img_head_label is not None:
datasplit["img_head_label"] = img_head_label
print("Split train...")
f = open(os.path.splitext(pickle_file)[0] + "_train.pickle", 'wb')
try:
pickle.dump(datasplit, f, pickle.HIGHEST_PROTOCOL)
f.close()
except Exception as e:
f.close()
print("Can't save compressed file, trying without compression")
f = open(os.path.splitext(pickle_file)[0] + "_train.pickle", 'wb')
pickle.dump(datasplit, f)
f.close()
print("Split valid and test...")
datasplit = {}
datasplit["valid_dataset"] = valid_dataset
datasplit["valid_labels"] = valid_labels
datasplit["test_dataset"] = test_dataset
datasplit["test_labels"] = test_labels
datasplit["img_head"] = img_head
if img_head_label is not None:
datasplit["img_head_label"] = img_head_label
f = open(os.path.splitext(pickle_file)[0] + "_valid.pickle", 'wb')
try:
pickle.dump(datasplit, f, pickle.HIGHEST_PROTOCOL)
f.close()
except Exception as e:
f.close()
print("Can't save compressed file, trying without compression")
f = open(os.path.splitext(pickle_file)[0] + "_valid.pickle", 'wb')
pickle.dump(datasplit, f)
f.close()
|
ENH: Add script to split the data in the pickle files.from __future__ import print_function
import numpy as np
from six.moves import cPickle as pickle
import argparse
import os
parser = argparse.ArgumentParser()
parser.add_argument('--pickle', help='Pickle file, check the script readImages to generate this file.', required=True)
args = parser.parse_args()
pickle_file = args.pickle
print("Reading:", pickle_file)
f = open(pickle_file, 'rb')
data = pickle.load(f)
train_dataset = data["train_dataset"]
train_labels = data["train_labels"]
valid_dataset = data["valid_dataset"]
valid_labels = data["valid_labels"]
test_dataset = data["test_dataset"]
test_labels = data["test_labels"]
img_head = data["img_head"]
img_head_label = None
if "img_head_label" in data:
img_head_label = data["img_head_label"]
datasplit = {}
datasplit["train_dataset"] = train_dataset
datasplit["train_labels"] = train_labels
datasplit["img_head"] = img_head
if img_head_label is not None:
datasplit["img_head_label"] = img_head_label
print("Split train...")
f = open(os.path.splitext(pickle_file)[0] + "_train.pickle", 'wb')
try:
pickle.dump(datasplit, f, pickle.HIGHEST_PROTOCOL)
f.close()
except Exception as e:
f.close()
print("Can't save compressed file, trying without compression")
f = open(os.path.splitext(pickle_file)[0] + "_train.pickle", 'wb')
pickle.dump(datasplit, f)
f.close()
print("Split valid and test...")
datasplit = {}
datasplit["valid_dataset"] = valid_dataset
datasplit["valid_labels"] = valid_labels
datasplit["test_dataset"] = test_dataset
datasplit["test_labels"] = test_labels
datasplit["img_head"] = img_head
if img_head_label is not None:
datasplit["img_head_label"] = img_head_label
f = open(os.path.splitext(pickle_file)[0] + "_valid.pickle", 'wb')
try:
pickle.dump(datasplit, f, pickle.HIGHEST_PROTOCOL)
f.close()
except Exception as e:
f.close()
print("Can't save compressed file, trying without compression")
f = open(os.path.splitext(pickle_file)[0] + "_valid.pickle", 'wb')
pickle.dump(datasplit, f)
f.close()
|
<commit_before><commit_msg>ENH: Add script to split the data in the pickle files.<commit_after>from __future__ import print_function
import numpy as np
from six.moves import cPickle as pickle
import argparse
import os
parser = argparse.ArgumentParser()
parser.add_argument('--pickle', help='Pickle file, check the script readImages to generate this file.', required=True)
args = parser.parse_args()
pickle_file = args.pickle
print("Reading:", pickle_file)
f = open(pickle_file, 'rb')
data = pickle.load(f)
train_dataset = data["train_dataset"]
train_labels = data["train_labels"]
valid_dataset = data["valid_dataset"]
valid_labels = data["valid_labels"]
test_dataset = data["test_dataset"]
test_labels = data["test_labels"]
img_head = data["img_head"]
img_head_label = None
if "img_head_label" in data:
img_head_label = data["img_head_label"]
datasplit = {}
datasplit["train_dataset"] = train_dataset
datasplit["train_labels"] = train_labels
datasplit["img_head"] = img_head
if img_head_label is not None:
datasplit["img_head_label"] = img_head_label
print("Split train...")
f = open(os.path.splitext(pickle_file)[0] + "_train.pickle", 'wb')
try:
pickle.dump(datasplit, f, pickle.HIGHEST_PROTOCOL)
f.close()
except Exception as e:
f.close()
print("Can't save compressed file, trying without compression")
f = open(os.path.splitext(pickle_file)[0] + "_train.pickle", 'wb')
pickle.dump(datasplit, f)
f.close()
print("Split valid and test...")
datasplit = {}
datasplit["valid_dataset"] = valid_dataset
datasplit["valid_labels"] = valid_labels
datasplit["test_dataset"] = test_dataset
datasplit["test_labels"] = test_labels
datasplit["img_head"] = img_head
if img_head_label is not None:
datasplit["img_head_label"] = img_head_label
f = open(os.path.splitext(pickle_file)[0] + "_valid.pickle", 'wb')
try:
pickle.dump(datasplit, f, pickle.HIGHEST_PROTOCOL)
f.close()
except Exception as e:
f.close()
print("Can't save compressed file, trying without compression")
f = open(os.path.splitext(pickle_file)[0] + "_valid.pickle", 'wb')
pickle.dump(datasplit, f)
f.close()
|
|
d5f382aad9a378d58ff2afd9812a5ec7815f8d8a
|
tests/test_resources_remote.py
|
tests/test_resources_remote.py
|
#!/usr/bin/python3
import os
import sys
import unittest
from unittest.mock import patch, sentinel
LIB_PATH = os.path.dirname(os.path.realpath(__file__)) + '/../'
CONFIG_PATH = os.path.join(LIB_PATH, 'grafcli.example.conf')
sys.path.append(LIB_PATH)
from grafcli.config import load_config
load_config(CONFIG_PATH)
from grafcli.resources.remote import RemoteResources
class RemoteResourcesTest(unittest.TestCase):
def setUp(self):
self.elastic_patcher = patch('grafcli.resources.remote.elastic')
self.elastic = self.elastic_patcher.start()
self.elastic.return_value = self.elastic
def tearDown(self):
self.elastic_patcher.stop()
def test_list(self):
resources = RemoteResources()
self.elastic.list_dashboards.return_value = ['any_dashboard_1', 'any_dashboard_2']
self.assertListEqual(resources.list('any_host'),
['any_dashboard_1', 'any_dashboard_2'])
if __name__ == "__main__":
unittest.main()
|
Add remote resources tests stub.
|
Add remote resources tests stub.
|
Python
|
mit
|
m110/grafcli,m110/grafcli
|
Add remote resources tests stub.
|
#!/usr/bin/python3
import os
import sys
import unittest
from unittest.mock import patch, sentinel
LIB_PATH = os.path.dirname(os.path.realpath(__file__)) + '/../'
CONFIG_PATH = os.path.join(LIB_PATH, 'grafcli.example.conf')
sys.path.append(LIB_PATH)
from grafcli.config import load_config
load_config(CONFIG_PATH)
from grafcli.resources.remote import RemoteResources
class RemoteResourcesTest(unittest.TestCase):
def setUp(self):
self.elastic_patcher = patch('grafcli.resources.remote.elastic')
self.elastic = self.elastic_patcher.start()
self.elastic.return_value = self.elastic
def tearDown(self):
self.elastic_patcher.stop()
def test_list(self):
resources = RemoteResources()
self.elastic.list_dashboards.return_value = ['any_dashboard_1', 'any_dashboard_2']
self.assertListEqual(resources.list('any_host'),
['any_dashboard_1', 'any_dashboard_2'])
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add remote resources tests stub.<commit_after>
|
#!/usr/bin/python3
import os
import sys
import unittest
from unittest.mock import patch, sentinel
LIB_PATH = os.path.dirname(os.path.realpath(__file__)) + '/../'
CONFIG_PATH = os.path.join(LIB_PATH, 'grafcli.example.conf')
sys.path.append(LIB_PATH)
from grafcli.config import load_config
load_config(CONFIG_PATH)
from grafcli.resources.remote import RemoteResources
class RemoteResourcesTest(unittest.TestCase):
def setUp(self):
self.elastic_patcher = patch('grafcli.resources.remote.elastic')
self.elastic = self.elastic_patcher.start()
self.elastic.return_value = self.elastic
def tearDown(self):
self.elastic_patcher.stop()
def test_list(self):
resources = RemoteResources()
self.elastic.list_dashboards.return_value = ['any_dashboard_1', 'any_dashboard_2']
self.assertListEqual(resources.list('any_host'),
['any_dashboard_1', 'any_dashboard_2'])
if __name__ == "__main__":
unittest.main()
|
Add remote resources tests stub.#!/usr/bin/python3
import os
import sys
import unittest
from unittest.mock import patch, sentinel
LIB_PATH = os.path.dirname(os.path.realpath(__file__)) + '/../'
CONFIG_PATH = os.path.join(LIB_PATH, 'grafcli.example.conf')
sys.path.append(LIB_PATH)
from grafcli.config import load_config
load_config(CONFIG_PATH)
from grafcli.resources.remote import RemoteResources
class RemoteResourcesTest(unittest.TestCase):
def setUp(self):
self.elastic_patcher = patch('grafcli.resources.remote.elastic')
self.elastic = self.elastic_patcher.start()
self.elastic.return_value = self.elastic
def tearDown(self):
self.elastic_patcher.stop()
def test_list(self):
resources = RemoteResources()
self.elastic.list_dashboards.return_value = ['any_dashboard_1', 'any_dashboard_2']
self.assertListEqual(resources.list('any_host'),
['any_dashboard_1', 'any_dashboard_2'])
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add remote resources tests stub.<commit_after>#!/usr/bin/python3
import os
import sys
import unittest
from unittest.mock import patch, sentinel
LIB_PATH = os.path.dirname(os.path.realpath(__file__)) + '/../'
CONFIG_PATH = os.path.join(LIB_PATH, 'grafcli.example.conf')
sys.path.append(LIB_PATH)
from grafcli.config import load_config
load_config(CONFIG_PATH)
from grafcli.resources.remote import RemoteResources
class RemoteResourcesTest(unittest.TestCase):
def setUp(self):
self.elastic_patcher = patch('grafcli.resources.remote.elastic')
self.elastic = self.elastic_patcher.start()
self.elastic.return_value = self.elastic
def tearDown(self):
self.elastic_patcher.stop()
def test_list(self):
resources = RemoteResources()
self.elastic.list_dashboards.return_value = ['any_dashboard_1', 'any_dashboard_2']
self.assertListEqual(resources.list('any_host'),
['any_dashboard_1', 'any_dashboard_2'])
if __name__ == "__main__":
unittest.main()
|
|
1f425fdfcb58641025e1a41a2c2c4cbd41eb97b5
|
interesting/mock.py
|
interesting/mock.py
|
class Mock(object):
class _MockQuery(object):
'''Access Mock internals'''
def __init__(self, mock):
self._mock = mock
@property
def attributes(self):
return set(self._mock._attrs.keys())
@property
def calls(self):
return tuple(self._mock._calls)
@property
def call(self):
if self._mock._calls:
return self._mock._calls[-1]
@property
def nr_calls(self):
return len(self._mock._calls)
@property
def return_(self):
return self._mock._return
@property
def except_(self):
return self._mock._except
@property
def yield_(self):
return self._mock._yield_orig
def __init__(self, return_=None, except_=None, yield_=None, **kwargs):
self._attrs = kwargs
self._calls = []
self._return = return_
self._except = except_
self._yield_orig = yield_
self._yield = iter(yield_) if yield_ else None
self._qry = self._MockQuery(self)
def __getattr__(self, value):
return self._attrs.setdefault(value, Mock())
def __call__(self, *args, **kwargs):
self._calls.append((args, kwargs))
if self._except:
raise self._except
if self._yield:
return self._yield.next()
if self._return is None:
self._return = Mock()
return self._return
@property
def qry(self):
return self._qry
|
Add custom Mock class implementation.
|
Add custom Mock class implementation.
|
Python
|
mit
|
pepincho/Python-Course-FMI
|
Add custom Mock class implementation.
|
class Mock(object):
class _MockQuery(object):
'''Access Mock internals'''
def __init__(self, mock):
self._mock = mock
@property
def attributes(self):
return set(self._mock._attrs.keys())
@property
def calls(self):
return tuple(self._mock._calls)
@property
def call(self):
if self._mock._calls:
return self._mock._calls[-1]
@property
def nr_calls(self):
return len(self._mock._calls)
@property
def return_(self):
return self._mock._return
@property
def except_(self):
return self._mock._except
@property
def yield_(self):
return self._mock._yield_orig
def __init__(self, return_=None, except_=None, yield_=None, **kwargs):
self._attrs = kwargs
self._calls = []
self._return = return_
self._except = except_
self._yield_orig = yield_
self._yield = iter(yield_) if yield_ else None
self._qry = self._MockQuery(self)
def __getattr__(self, value):
return self._attrs.setdefault(value, Mock())
def __call__(self, *args, **kwargs):
self._calls.append((args, kwargs))
if self._except:
raise self._except
if self._yield:
return self._yield.next()
if self._return is None:
self._return = Mock()
return self._return
@property
def qry(self):
return self._qry
|
<commit_before><commit_msg>Add custom Mock class implementation.<commit_after>
|
class Mock(object):
class _MockQuery(object):
'''Access Mock internals'''
def __init__(self, mock):
self._mock = mock
@property
def attributes(self):
return set(self._mock._attrs.keys())
@property
def calls(self):
return tuple(self._mock._calls)
@property
def call(self):
if self._mock._calls:
return self._mock._calls[-1]
@property
def nr_calls(self):
return len(self._mock._calls)
@property
def return_(self):
return self._mock._return
@property
def except_(self):
return self._mock._except
@property
def yield_(self):
return self._mock._yield_orig
def __init__(self, return_=None, except_=None, yield_=None, **kwargs):
self._attrs = kwargs
self._calls = []
self._return = return_
self._except = except_
self._yield_orig = yield_
self._yield = iter(yield_) if yield_ else None
self._qry = self._MockQuery(self)
def __getattr__(self, value):
return self._attrs.setdefault(value, Mock())
def __call__(self, *args, **kwargs):
self._calls.append((args, kwargs))
if self._except:
raise self._except
if self._yield:
return self._yield.next()
if self._return is None:
self._return = Mock()
return self._return
@property
def qry(self):
return self._qry
|
Add custom Mock class implementation.class Mock(object):
class _MockQuery(object):
'''Access Mock internals'''
def __init__(self, mock):
self._mock = mock
@property
def attributes(self):
return set(self._mock._attrs.keys())
@property
def calls(self):
return tuple(self._mock._calls)
@property
def call(self):
if self._mock._calls:
return self._mock._calls[-1]
@property
def nr_calls(self):
return len(self._mock._calls)
@property
def return_(self):
return self._mock._return
@property
def except_(self):
return self._mock._except
@property
def yield_(self):
return self._mock._yield_orig
def __init__(self, return_=None, except_=None, yield_=None, **kwargs):
self._attrs = kwargs
self._calls = []
self._return = return_
self._except = except_
self._yield_orig = yield_
self._yield = iter(yield_) if yield_ else None
self._qry = self._MockQuery(self)
def __getattr__(self, value):
return self._attrs.setdefault(value, Mock())
def __call__(self, *args, **kwargs):
self._calls.append((args, kwargs))
if self._except:
raise self._except
if self._yield:
return self._yield.next()
if self._return is None:
self._return = Mock()
return self._return
@property
def qry(self):
return self._qry
|
<commit_before><commit_msg>Add custom Mock class implementation.<commit_after>class Mock(object):
class _MockQuery(object):
'''Access Mock internals'''
def __init__(self, mock):
self._mock = mock
@property
def attributes(self):
return set(self._mock._attrs.keys())
@property
def calls(self):
return tuple(self._mock._calls)
@property
def call(self):
if self._mock._calls:
return self._mock._calls[-1]
@property
def nr_calls(self):
return len(self._mock._calls)
@property
def return_(self):
return self._mock._return
@property
def except_(self):
return self._mock._except
@property
def yield_(self):
return self._mock._yield_orig
def __init__(self, return_=None, except_=None, yield_=None, **kwargs):
self._attrs = kwargs
self._calls = []
self._return = return_
self._except = except_
self._yield_orig = yield_
self._yield = iter(yield_) if yield_ else None
self._qry = self._MockQuery(self)
def __getattr__(self, value):
return self._attrs.setdefault(value, Mock())
def __call__(self, *args, **kwargs):
self._calls.append((args, kwargs))
if self._except:
raise self._except
if self._yield:
return self._yield.next()
if self._return is None:
self._return = Mock()
return self._return
@property
def qry(self):
return self._qry
|
|
56947f660581805e701b31834b6b07975e245b1d
|
samples/discover_one_node.py
|
samples/discover_one_node.py
|
from __future__ import print_function
import re
import requests
from orionsdk import SwisClient
def main():
npm_server = 'localhost'
username = 'admin'
password = ''
target_node_ip = '1.2.3.4'
snmpv3_credential_id = 3
orion_engine_id = 1
swis = SwisClient(npm_server, username, password)
print("Add an SNMP v3 node:")
corePluginContext = {
'BulkList': [{'Address': target_node_ip}],
'Credentials': [
{
'CredentialID': snmpv3_credential_id,
'Order': 1
}
],
'WmiRetriesCount': 0,
'WmiRetryIntervalMiliseconds': 1000
}
corePluginConfig = swis.invoke('Orion.Discovery', 'CreateCorePluginConfiguration', corePluginContext)
discoveryProfile = {
'Name': 'discover_one_node.py',
'EngineID': orion_engine_id,
'JobTimeoutSeconds': 3600,
'SearchTimeoutMiliseconds': 5000,
'SnmpTimeoutMiliseconds': 5000,
'SnmpRetries': 2,
'RepeatIntervalMiliseconds': 1800,
'SnmpPort': 161,
'HopCount': 0,
'PreferredSnmpVersion': 'SNMP2c',
'DisableIcmp': False,
'AllowDuplicateNodes': False,
'IsAutoImport': True,
'IsHidden': True,
'PluginConfigurations': [{'PluginConfigurationItem': corePluginConfig}]
}
print("Running discovery...")
result = swis.invoke('Orion.Discovery', 'StartDiscovery', discoveryProfile)
print("Returned discovery profile id {}".format(result))
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
|
Add sample for running discovery of one SNMPv3 node
|
Add sample for running discovery of one SNMPv3 node
|
Python
|
apache-2.0
|
solarwinds/orionsdk-python
|
Add sample for running discovery of one SNMPv3 node
|
from __future__ import print_function
import re
import requests
from orionsdk import SwisClient
def main():
npm_server = 'localhost'
username = 'admin'
password = ''
target_node_ip = '1.2.3.4'
snmpv3_credential_id = 3
orion_engine_id = 1
swis = SwisClient(npm_server, username, password)
print("Add an SNMP v3 node:")
corePluginContext = {
'BulkList': [{'Address': target_node_ip}],
'Credentials': [
{
'CredentialID': snmpv3_credential_id,
'Order': 1
}
],
'WmiRetriesCount': 0,
'WmiRetryIntervalMiliseconds': 1000
}
corePluginConfig = swis.invoke('Orion.Discovery', 'CreateCorePluginConfiguration', corePluginContext)
discoveryProfile = {
'Name': 'discover_one_node.py',
'EngineID': orion_engine_id,
'JobTimeoutSeconds': 3600,
'SearchTimeoutMiliseconds': 5000,
'SnmpTimeoutMiliseconds': 5000,
'SnmpRetries': 2,
'RepeatIntervalMiliseconds': 1800,
'SnmpPort': 161,
'HopCount': 0,
'PreferredSnmpVersion': 'SNMP2c',
'DisableIcmp': False,
'AllowDuplicateNodes': False,
'IsAutoImport': True,
'IsHidden': True,
'PluginConfigurations': [{'PluginConfigurationItem': corePluginConfig}]
}
print("Running discovery...")
result = swis.invoke('Orion.Discovery', 'StartDiscovery', discoveryProfile)
print("Returned discovery profile id {}".format(result))
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add sample for running discovery of one SNMPv3 node<commit_after>
|
from __future__ import print_function
import re
import requests
from orionsdk import SwisClient
def main():
npm_server = 'localhost'
username = 'admin'
password = ''
target_node_ip = '1.2.3.4'
snmpv3_credential_id = 3
orion_engine_id = 1
swis = SwisClient(npm_server, username, password)
print("Add an SNMP v3 node:")
corePluginContext = {
'BulkList': [{'Address': target_node_ip}],
'Credentials': [
{
'CredentialID': snmpv3_credential_id,
'Order': 1
}
],
'WmiRetriesCount': 0,
'WmiRetryIntervalMiliseconds': 1000
}
corePluginConfig = swis.invoke('Orion.Discovery', 'CreateCorePluginConfiguration', corePluginContext)
discoveryProfile = {
'Name': 'discover_one_node.py',
'EngineID': orion_engine_id,
'JobTimeoutSeconds': 3600,
'SearchTimeoutMiliseconds': 5000,
'SnmpTimeoutMiliseconds': 5000,
'SnmpRetries': 2,
'RepeatIntervalMiliseconds': 1800,
'SnmpPort': 161,
'HopCount': 0,
'PreferredSnmpVersion': 'SNMP2c',
'DisableIcmp': False,
'AllowDuplicateNodes': False,
'IsAutoImport': True,
'IsHidden': True,
'PluginConfigurations': [{'PluginConfigurationItem': corePluginConfig}]
}
print("Running discovery...")
result = swis.invoke('Orion.Discovery', 'StartDiscovery', discoveryProfile)
print("Returned discovery profile id {}".format(result))
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
|
Add sample for running discovery of one SNMPv3 nodefrom __future__ import print_function
import re
import requests
from orionsdk import SwisClient
def main():
npm_server = 'localhost'
username = 'admin'
password = ''
target_node_ip = '1.2.3.4'
snmpv3_credential_id = 3
orion_engine_id = 1
swis = SwisClient(npm_server, username, password)
print("Add an SNMP v3 node:")
corePluginContext = {
'BulkList': [{'Address': target_node_ip}],
'Credentials': [
{
'CredentialID': snmpv3_credential_id,
'Order': 1
}
],
'WmiRetriesCount': 0,
'WmiRetryIntervalMiliseconds': 1000
}
corePluginConfig = swis.invoke('Orion.Discovery', 'CreateCorePluginConfiguration', corePluginContext)
discoveryProfile = {
'Name': 'discover_one_node.py',
'EngineID': orion_engine_id,
'JobTimeoutSeconds': 3600,
'SearchTimeoutMiliseconds': 5000,
'SnmpTimeoutMiliseconds': 5000,
'SnmpRetries': 2,
'RepeatIntervalMiliseconds': 1800,
'SnmpPort': 161,
'HopCount': 0,
'PreferredSnmpVersion': 'SNMP2c',
'DisableIcmp': False,
'AllowDuplicateNodes': False,
'IsAutoImport': True,
'IsHidden': True,
'PluginConfigurations': [{'PluginConfigurationItem': corePluginConfig}]
}
print("Running discovery...")
result = swis.invoke('Orion.Discovery', 'StartDiscovery', discoveryProfile)
print("Returned discovery profile id {}".format(result))
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add sample for running discovery of one SNMPv3 node<commit_after>from __future__ import print_function
import re
import requests
from orionsdk import SwisClient
def main():
npm_server = 'localhost'
username = 'admin'
password = ''
target_node_ip = '1.2.3.4'
snmpv3_credential_id = 3
orion_engine_id = 1
swis = SwisClient(npm_server, username, password)
print("Add an SNMP v3 node:")
corePluginContext = {
'BulkList': [{'Address': target_node_ip}],
'Credentials': [
{
'CredentialID': snmpv3_credential_id,
'Order': 1
}
],
'WmiRetriesCount': 0,
'WmiRetryIntervalMiliseconds': 1000
}
corePluginConfig = swis.invoke('Orion.Discovery', 'CreateCorePluginConfiguration', corePluginContext)
discoveryProfile = {
'Name': 'discover_one_node.py',
'EngineID': orion_engine_id,
'JobTimeoutSeconds': 3600,
'SearchTimeoutMiliseconds': 5000,
'SnmpTimeoutMiliseconds': 5000,
'SnmpRetries': 2,
'RepeatIntervalMiliseconds': 1800,
'SnmpPort': 161,
'HopCount': 0,
'PreferredSnmpVersion': 'SNMP2c',
'DisableIcmp': False,
'AllowDuplicateNodes': False,
'IsAutoImport': True,
'IsHidden': True,
'PluginConfigurations': [{'PluginConfigurationItem': corePluginConfig}]
}
print("Running discovery...")
result = swis.invoke('Orion.Discovery', 'StartDiscovery', discoveryProfile)
print("Returned discovery profile id {}".format(result))
requests.packages.urllib3.disable_warnings()
if __name__ == '__main__':
main()
|
|
f13bc3017552c3d19d9b642c9f61b3f7262256a1
|
tests/cogs/test_admin.py
|
tests/cogs/test_admin.py
|
from __future__ import annotations
from typing import TYPE_CHECKING
import pytest
import pytest_mock
from erasmus.cogs.admin import Admin
from erasmus.erasmus import Erasmus
from erasmus.types import Refreshable
if TYPE_CHECKING:
from unittest.mock import Mock
class TestAdmin:
@pytest.fixture
def mock_bot(self, mocker: pytest_mock.MockerFixture) -> Mock:
return mocker.Mock(spec=Erasmus)
def test_instantiate(self, mock_bot: Erasmus) -> None:
cog = Admin(mock_bot)
assert cog is not None
assert not isinstance(cog, Refreshable)
|
Add initial tests for admin cog
|
Add initial tests for admin cog
|
Python
|
bsd-3-clause
|
bryanforbes/Erasmus
|
Add initial tests for admin cog
|
from __future__ import annotations
from typing import TYPE_CHECKING
import pytest
import pytest_mock
from erasmus.cogs.admin import Admin
from erasmus.erasmus import Erasmus
from erasmus.types import Refreshable
if TYPE_CHECKING:
from unittest.mock import Mock
class TestAdmin:
@pytest.fixture
def mock_bot(self, mocker: pytest_mock.MockerFixture) -> Mock:
return mocker.Mock(spec=Erasmus)
def test_instantiate(self, mock_bot: Erasmus) -> None:
cog = Admin(mock_bot)
assert cog is not None
assert not isinstance(cog, Refreshable)
|
<commit_before><commit_msg>Add initial tests for admin cog<commit_after>
|
from __future__ import annotations
from typing import TYPE_CHECKING
import pytest
import pytest_mock
from erasmus.cogs.admin import Admin
from erasmus.erasmus import Erasmus
from erasmus.types import Refreshable
if TYPE_CHECKING:
from unittest.mock import Mock
class TestAdmin:
@pytest.fixture
def mock_bot(self, mocker: pytest_mock.MockerFixture) -> Mock:
return mocker.Mock(spec=Erasmus)
def test_instantiate(self, mock_bot: Erasmus) -> None:
cog = Admin(mock_bot)
assert cog is not None
assert not isinstance(cog, Refreshable)
|
Add initial tests for admin cogfrom __future__ import annotations
from typing import TYPE_CHECKING
import pytest
import pytest_mock
from erasmus.cogs.admin import Admin
from erasmus.erasmus import Erasmus
from erasmus.types import Refreshable
if TYPE_CHECKING:
from unittest.mock import Mock
class TestAdmin:
@pytest.fixture
def mock_bot(self, mocker: pytest_mock.MockerFixture) -> Mock:
return mocker.Mock(spec=Erasmus)
def test_instantiate(self, mock_bot: Erasmus) -> None:
cog = Admin(mock_bot)
assert cog is not None
assert not isinstance(cog, Refreshable)
|
<commit_before><commit_msg>Add initial tests for admin cog<commit_after>from __future__ import annotations
from typing import TYPE_CHECKING
import pytest
import pytest_mock
from erasmus.cogs.admin import Admin
from erasmus.erasmus import Erasmus
from erasmus.types import Refreshable
if TYPE_CHECKING:
from unittest.mock import Mock
class TestAdmin:
@pytest.fixture
def mock_bot(self, mocker: pytest_mock.MockerFixture) -> Mock:
return mocker.Mock(spec=Erasmus)
def test_instantiate(self, mock_bot: Erasmus) -> None:
cog = Admin(mock_bot)
assert cog is not None
assert not isinstance(cog, Refreshable)
|
|
dfc22bc3b67bd6164d8eb2c2e51638a7f0fca5a9
|
parsing/tracking_logs/generate_course_tracking_logs.py
|
parsing/tracking_logs/generate_course_tracking_logs.py
|
'''
This module will extract tracking logs for a given course and date range
between when course enrollment start and when the course ended. For each log,
the parent_data and meta_data from the course_structure collection will be
appended to the log based on the event key in the log
'''
import pymongo
import sys
def main():
db_name = sys.argv[1]
collection_name = sys.argv[2]
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
tracking = db[collection_name]
if __name__ == '__main__':
main()
|
Add script to generate course specific tracking logs
|
Add script to generate course specific tracking logs
|
Python
|
mit
|
andyzsf/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research,andyzsf/edx_data_research
|
Add script to generate course specific tracking logs
|
'''
This module will extract tracking logs for a given course and date range
between when course enrollment start and when the course ended. For each log,
the parent_data and meta_data from the course_structure collection will be
appended to the log based on the event key in the log
'''
import pymongo
import sys
def main():
db_name = sys.argv[1]
collection_name = sys.argv[2]
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
tracking = db[collection_name]
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to generate course specific tracking logs<commit_after>
|
'''
This module will extract tracking logs for a given course and date range
between when course enrollment start and when the course ended. For each log,
the parent_data and meta_data from the course_structure collection will be
appended to the log based on the event key in the log
'''
import pymongo
import sys
def main():
db_name = sys.argv[1]
collection_name = sys.argv[2]
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
tracking = db[collection_name]
if __name__ == '__main__':
main()
|
Add script to generate course specific tracking logs'''
This module will extract tracking logs for a given course and date range
between when course enrollment start and when the course ended. For each log,
the parent_data and meta_data from the course_structure collection will be
appended to the log based on the event key in the log
'''
import pymongo
import sys
def main():
db_name = sys.argv[1]
collection_name = sys.argv[2]
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
tracking = db[collection_name]
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to generate course specific tracking logs<commit_after>'''
This module will extract tracking logs for a given course and date range
between when course enrollment start and when the course ended. For each log,
the parent_data and meta_data from the course_structure collection will be
appended to the log based on the event key in the log
'''
import pymongo
import sys
def main():
db_name = sys.argv[1]
collection_name = sys.argv[2]
connection = pymongo.Connection('localhost', 27017)
db = connection[db_name]
tracking = db[collection_name]
if __name__ == '__main__':
main()
|
|
6989972846e67e5f3fa92a5ed4f49526ea46e2e4
|
convergence_tests/3D_helmholtz_extr.py
|
convergence_tests/3D_helmholtz_extr.py
|
from __future__ import absolute_import, division
from firedrake import *
def run_hybrid_extr_helmholtz(degree, res, quads):
nx = 2 ** res
ny = 2 ** res
nz = 2 ** (res - 1)
h = 0.2 / nz
base = UnitSquareMesh(nx, ny, quadrilateral=quads)
mesh = ExtrudedMesh(base, layers=nz, layer_height=h)
if quads:
RT = FiniteElement("RTCF", quadrilateral, degree + 1)
DG_v = FiniteElement("DG", interval, degree)
DG_h = FiniteElement("DQ", quadrilateral, degree)
CG = FiniteElement("CG", interval, degree + 1)
else:
RT = FiniteElement("RT", triangle, degree + 1)
DG_v = FiniteElement("DG", interval, degree)
DG_h = FiniteElement("DG", triangle, degree)
CG = FiniteElement("CG", interval, degree + 1)
HDiv_ele = EnrichedElement(HDiv(TensorProductElement(RT, DG_v)),
HDiv(TensorProductElement(DG_h, CG)))
V = FunctionSpace(mesh, HDiv_ele)
U = FunctionSpace(mesh, "DG", degree)
W = V * U
x, y, z = SpatialCoordinate(mesh)
f = Function(U)
f.interpolate(Expression("(1+38*pi*pi)*sin(x[0]*pi*2)*sin(x[1]*pi*3)*sin(x[2]*pi*5)"))
exact = Function(U)
exact.interpolate(Expression("sin(x[0]*pi*2)*sin(x[1]*pi*3)*sin(x[2]*pi*5)"))
exact.rename("exact")
sigma, u = TrialFunctions(W)
tau, v = TestFunctions(W)
a = dot(sigma, tau)*dx + u*v*dx + div(sigma)*v*dx - div(tau)*u*dx
L = f*v*dx
w = Function(W)
params = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.HybridizationPC',
'hybridization': {'ksp_type': 'preonly',
'pc_type': 'lu',
'hdiv_residual': {'ksp_type': 'cg',
'ksp_rtol': 1e-14},
'use_reconstructor': True}}
solve(a == L, w, solver_parameters=params)
sigma_h, u_h = w.split()
sigma_h.rename("flux")
u_h.rename("pressure")
print errornorm(u_h, exact)
File("3D-hybrid.pvd").write(sigma_h, u_h, exact)
run_hybrid_extr_helmholtz(0, 5, quads=False)
|
Add working 3D extruded Helmholtz demo
|
Add working 3D extruded Helmholtz demo
|
Python
|
mit
|
thomasgibson/firedrake-hybridization
|
Add working 3D extruded Helmholtz demo
|
from __future__ import absolute_import, division
from firedrake import *
def run_hybrid_extr_helmholtz(degree, res, quads):
nx = 2 ** res
ny = 2 ** res
nz = 2 ** (res - 1)
h = 0.2 / nz
base = UnitSquareMesh(nx, ny, quadrilateral=quads)
mesh = ExtrudedMesh(base, layers=nz, layer_height=h)
if quads:
RT = FiniteElement("RTCF", quadrilateral, degree + 1)
DG_v = FiniteElement("DG", interval, degree)
DG_h = FiniteElement("DQ", quadrilateral, degree)
CG = FiniteElement("CG", interval, degree + 1)
else:
RT = FiniteElement("RT", triangle, degree + 1)
DG_v = FiniteElement("DG", interval, degree)
DG_h = FiniteElement("DG", triangle, degree)
CG = FiniteElement("CG", interval, degree + 1)
HDiv_ele = EnrichedElement(HDiv(TensorProductElement(RT, DG_v)),
HDiv(TensorProductElement(DG_h, CG)))
V = FunctionSpace(mesh, HDiv_ele)
U = FunctionSpace(mesh, "DG", degree)
W = V * U
x, y, z = SpatialCoordinate(mesh)
f = Function(U)
f.interpolate(Expression("(1+38*pi*pi)*sin(x[0]*pi*2)*sin(x[1]*pi*3)*sin(x[2]*pi*5)"))
exact = Function(U)
exact.interpolate(Expression("sin(x[0]*pi*2)*sin(x[1]*pi*3)*sin(x[2]*pi*5)"))
exact.rename("exact")
sigma, u = TrialFunctions(W)
tau, v = TestFunctions(W)
a = dot(sigma, tau)*dx + u*v*dx + div(sigma)*v*dx - div(tau)*u*dx
L = f*v*dx
w = Function(W)
params = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.HybridizationPC',
'hybridization': {'ksp_type': 'preonly',
'pc_type': 'lu',
'hdiv_residual': {'ksp_type': 'cg',
'ksp_rtol': 1e-14},
'use_reconstructor': True}}
solve(a == L, w, solver_parameters=params)
sigma_h, u_h = w.split()
sigma_h.rename("flux")
u_h.rename("pressure")
print errornorm(u_h, exact)
File("3D-hybrid.pvd").write(sigma_h, u_h, exact)
run_hybrid_extr_helmholtz(0, 5, quads=False)
|
<commit_before><commit_msg>Add working 3D extruded Helmholtz demo<commit_after>
|
from __future__ import absolute_import, division
from firedrake import *
def run_hybrid_extr_helmholtz(degree, res, quads):
nx = 2 ** res
ny = 2 ** res
nz = 2 ** (res - 1)
h = 0.2 / nz
base = UnitSquareMesh(nx, ny, quadrilateral=quads)
mesh = ExtrudedMesh(base, layers=nz, layer_height=h)
if quads:
RT = FiniteElement("RTCF", quadrilateral, degree + 1)
DG_v = FiniteElement("DG", interval, degree)
DG_h = FiniteElement("DQ", quadrilateral, degree)
CG = FiniteElement("CG", interval, degree + 1)
else:
RT = FiniteElement("RT", triangle, degree + 1)
DG_v = FiniteElement("DG", interval, degree)
DG_h = FiniteElement("DG", triangle, degree)
CG = FiniteElement("CG", interval, degree + 1)
HDiv_ele = EnrichedElement(HDiv(TensorProductElement(RT, DG_v)),
HDiv(TensorProductElement(DG_h, CG)))
V = FunctionSpace(mesh, HDiv_ele)
U = FunctionSpace(mesh, "DG", degree)
W = V * U
x, y, z = SpatialCoordinate(mesh)
f = Function(U)
f.interpolate(Expression("(1+38*pi*pi)*sin(x[0]*pi*2)*sin(x[1]*pi*3)*sin(x[2]*pi*5)"))
exact = Function(U)
exact.interpolate(Expression("sin(x[0]*pi*2)*sin(x[1]*pi*3)*sin(x[2]*pi*5)"))
exact.rename("exact")
sigma, u = TrialFunctions(W)
tau, v = TestFunctions(W)
a = dot(sigma, tau)*dx + u*v*dx + div(sigma)*v*dx - div(tau)*u*dx
L = f*v*dx
w = Function(W)
params = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.HybridizationPC',
'hybridization': {'ksp_type': 'preonly',
'pc_type': 'lu',
'hdiv_residual': {'ksp_type': 'cg',
'ksp_rtol': 1e-14},
'use_reconstructor': True}}
solve(a == L, w, solver_parameters=params)
sigma_h, u_h = w.split()
sigma_h.rename("flux")
u_h.rename("pressure")
print errornorm(u_h, exact)
File("3D-hybrid.pvd").write(sigma_h, u_h, exact)
run_hybrid_extr_helmholtz(0, 5, quads=False)
|
Add working 3D extruded Helmholtz demofrom __future__ import absolute_import, division
from firedrake import *
def run_hybrid_extr_helmholtz(degree, res, quads):
nx = 2 ** res
ny = 2 ** res
nz = 2 ** (res - 1)
h = 0.2 / nz
base = UnitSquareMesh(nx, ny, quadrilateral=quads)
mesh = ExtrudedMesh(base, layers=nz, layer_height=h)
if quads:
RT = FiniteElement("RTCF", quadrilateral, degree + 1)
DG_v = FiniteElement("DG", interval, degree)
DG_h = FiniteElement("DQ", quadrilateral, degree)
CG = FiniteElement("CG", interval, degree + 1)
else:
RT = FiniteElement("RT", triangle, degree + 1)
DG_v = FiniteElement("DG", interval, degree)
DG_h = FiniteElement("DG", triangle, degree)
CG = FiniteElement("CG", interval, degree + 1)
HDiv_ele = EnrichedElement(HDiv(TensorProductElement(RT, DG_v)),
HDiv(TensorProductElement(DG_h, CG)))
V = FunctionSpace(mesh, HDiv_ele)
U = FunctionSpace(mesh, "DG", degree)
W = V * U
x, y, z = SpatialCoordinate(mesh)
f = Function(U)
f.interpolate(Expression("(1+38*pi*pi)*sin(x[0]*pi*2)*sin(x[1]*pi*3)*sin(x[2]*pi*5)"))
exact = Function(U)
exact.interpolate(Expression("sin(x[0]*pi*2)*sin(x[1]*pi*3)*sin(x[2]*pi*5)"))
exact.rename("exact")
sigma, u = TrialFunctions(W)
tau, v = TestFunctions(W)
a = dot(sigma, tau)*dx + u*v*dx + div(sigma)*v*dx - div(tau)*u*dx
L = f*v*dx
w = Function(W)
params = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.HybridizationPC',
'hybridization': {'ksp_type': 'preonly',
'pc_type': 'lu',
'hdiv_residual': {'ksp_type': 'cg',
'ksp_rtol': 1e-14},
'use_reconstructor': True}}
solve(a == L, w, solver_parameters=params)
sigma_h, u_h = w.split()
sigma_h.rename("flux")
u_h.rename("pressure")
print errornorm(u_h, exact)
File("3D-hybrid.pvd").write(sigma_h, u_h, exact)
run_hybrid_extr_helmholtz(0, 5, quads=False)
|
<commit_before><commit_msg>Add working 3D extruded Helmholtz demo<commit_after>from __future__ import absolute_import, division
from firedrake import *
def run_hybrid_extr_helmholtz(degree, res, quads):
nx = 2 ** res
ny = 2 ** res
nz = 2 ** (res - 1)
h = 0.2 / nz
base = UnitSquareMesh(nx, ny, quadrilateral=quads)
mesh = ExtrudedMesh(base, layers=nz, layer_height=h)
if quads:
RT = FiniteElement("RTCF", quadrilateral, degree + 1)
DG_v = FiniteElement("DG", interval, degree)
DG_h = FiniteElement("DQ", quadrilateral, degree)
CG = FiniteElement("CG", interval, degree + 1)
else:
RT = FiniteElement("RT", triangle, degree + 1)
DG_v = FiniteElement("DG", interval, degree)
DG_h = FiniteElement("DG", triangle, degree)
CG = FiniteElement("CG", interval, degree + 1)
HDiv_ele = EnrichedElement(HDiv(TensorProductElement(RT, DG_v)),
HDiv(TensorProductElement(DG_h, CG)))
V = FunctionSpace(mesh, HDiv_ele)
U = FunctionSpace(mesh, "DG", degree)
W = V * U
x, y, z = SpatialCoordinate(mesh)
f = Function(U)
f.interpolate(Expression("(1+38*pi*pi)*sin(x[0]*pi*2)*sin(x[1]*pi*3)*sin(x[2]*pi*5)"))
exact = Function(U)
exact.interpolate(Expression("sin(x[0]*pi*2)*sin(x[1]*pi*3)*sin(x[2]*pi*5)"))
exact.rename("exact")
sigma, u = TrialFunctions(W)
tau, v = TestFunctions(W)
a = dot(sigma, tau)*dx + u*v*dx + div(sigma)*v*dx - div(tau)*u*dx
L = f*v*dx
w = Function(W)
params = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.HybridizationPC',
'hybridization': {'ksp_type': 'preonly',
'pc_type': 'lu',
'hdiv_residual': {'ksp_type': 'cg',
'ksp_rtol': 1e-14},
'use_reconstructor': True}}
solve(a == L, w, solver_parameters=params)
sigma_h, u_h = w.split()
sigma_h.rename("flux")
u_h.rename("pressure")
print errornorm(u_h, exact)
File("3D-hybrid.pvd").write(sigma_h, u_h, exact)
run_hybrid_extr_helmholtz(0, 5, quads=False)
|
|
d87b22ee69d47d0edf33be36a217ed5d5a6a599b
|
flocker/__init__.py
|
flocker/__init__.py
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
# -*- test-case-name: flocker.test -*-
"""
Flocker is a hypervisor that provides ZFS-based replication and fail-over
functionality to a Linux-based user-space operating system.
"""
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Flocker is a hypervisor that provides ZFS-based replication and fail-over
functionality to a Linux-based user-space operating system.
"""
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
Remove reference to old tests.
|
Remove reference to old tests.
|
Python
|
apache-2.0
|
hackday-profilers/flocker,jml/flocker,wallnerryan/flocker-profiles,LaynePeng/flocker,runcom/flocker,mbrukman/flocker,beni55/flocker,adamtheturtle/flocker,w4ngyi/flocker,LaynePeng/flocker,achanda/flocker,mbrukman/flocker,achanda/flocker,moypray/flocker,lukemarsden/flocker,achanda/flocker,1d4Nf6/flocker,jml/flocker,lukemarsden/flocker,agonzalezro/flocker,agonzalezro/flocker,AndyHuu/flocker,LaynePeng/flocker,AndyHuu/flocker,runcom/flocker,Azulinho/flocker,adamtheturtle/flocker,lukemarsden/flocker,w4ngyi/flocker,hackday-profilers/flocker,agonzalezro/flocker,AndyHuu/flocker,beni55/flocker,adamtheturtle/flocker,w4ngyi/flocker,moypray/flocker,1d4Nf6/flocker,wallnerryan/flocker-profiles,runcom/flocker,hackday-profilers/flocker,beni55/flocker,Azulinho/flocker,mbrukman/flocker,jml/flocker,1d4Nf6/flocker,moypray/flocker,wallnerryan/flocker-profiles,Azulinho/flocker
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
# -*- test-case-name: flocker.test -*-
"""
Flocker is a hypervisor that provides ZFS-based replication and fail-over
functionality to a Linux-based user-space operating system.
"""
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
Remove reference to old tests.
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Flocker is a hypervisor that provides ZFS-based replication and fail-over
functionality to a Linux-based user-space operating system.
"""
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
<commit_before># Copyright Hybrid Logic Ltd. See LICENSE file for details.
# -*- test-case-name: flocker.test -*-
"""
Flocker is a hypervisor that provides ZFS-based replication and fail-over
functionality to a Linux-based user-space operating system.
"""
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
<commit_msg>Remove reference to old tests.<commit_after>
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Flocker is a hypervisor that provides ZFS-based replication and fail-over
functionality to a Linux-based user-space operating system.
"""
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
# -*- test-case-name: flocker.test -*-
"""
Flocker is a hypervisor that provides ZFS-based replication and fail-over
functionality to a Linux-based user-space operating system.
"""
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
Remove reference to old tests.# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Flocker is a hypervisor that provides ZFS-based replication and fail-over
functionality to a Linux-based user-space operating system.
"""
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
<commit_before># Copyright Hybrid Logic Ltd. See LICENSE file for details.
# -*- test-case-name: flocker.test -*-
"""
Flocker is a hypervisor that provides ZFS-based replication and fail-over
functionality to a Linux-based user-space operating system.
"""
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
<commit_msg>Remove reference to old tests.<commit_after># Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Flocker is a hypervisor that provides ZFS-based replication and fail-over
functionality to a Linux-based user-space operating system.
"""
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
c7f1cbf41ef174555386fbf3b4200285bdee19b6
|
numba/tests/test_unsigned_arith.py
|
numba/tests/test_unsigned_arith.py
|
import numpy as np
import unittest
from numba import void, uint32, int32, jit, uint64, int64, typeof, bool_
@jit(void(uint32[:], uint32, uint32))
def prng(X, A, C):
for i in range(X.shape[0]):
for j in range(100):
v = (A * X[i] + C)
X[i] = v
@jit(uint32())
def unsigned_literal():
return abs(0xFFFFFFFF)
@jit(int64())
def unsigned_literal_64():
return 0x100000000
class Test(unittest.TestCase):
def test_prng(self):
N = 100
A = 1664525
C = 1013904223
X0 = np.arange(N, dtype=np.uint32)
X1 = X0.copy()
prng.py_func(X0, A, C)
prng(X1, A, C)
self.assertTrue(np.all(X1 >= 0))
self.assertTrue(np.all(X0 == X1))
def test_unsigned_literal(self):
got = unsigned_literal()
expect = abs(0xFFFFFFFF)
self.assertEqual(expect, got)
def test_unsigned_literal_64(self):
got = unsigned_literal_64()
expect = 0x100000000
self.assertEqual(expect, got)
if __name__ == '__main__':
unittest.main()
|
Add tests for unsigned arithmetic
|
Add tests for unsigned arithmetic
|
Python
|
bsd-2-clause
|
pombredanne/numba,stonebig/numba,gmarkall/numba,GaZ3ll3/numba,IntelLabs/numba,pitrou/numba,numba/numba,gdementen/numba,gdementen/numba,IntelLabs/numba,GaZ3ll3/numba,stuartarchibald/numba,pitrou/numba,IntelLabs/numba,pombredanne/numba,sklam/numba,cpcloud/numba,pombredanne/numba,stuartarchibald/numba,shiquanwang/numba,GaZ3ll3/numba,shiquanwang/numba,jriehl/numba,gmarkall/numba,pombredanne/numba,stonebig/numba,sklam/numba,jriehl/numba,sklam/numba,gmarkall/numba,ssarangi/numba,cpcloud/numba,GaZ3ll3/numba,gmarkall/numba,gmarkall/numba,seibert/numba,GaZ3ll3/numba,pitrou/numba,stefanseefeld/numba,numba/numba,stuartarchibald/numba,seibert/numba,sklam/numba,gdementen/numba,ssarangi/numba,cpcloud/numba,seibert/numba,stuartarchibald/numba,shiquanwang/numba,gdementen/numba,stonebig/numba,numba/numba,seibert/numba,seibert/numba,stuartarchibald/numba,cpcloud/numba,ssarangi/numba,stonebig/numba,numba/numba,sklam/numba,ssarangi/numba,stefanseefeld/numba,numba/numba,cpcloud/numba,stefanseefeld/numba,stonebig/numba,IntelLabs/numba,jriehl/numba,IntelLabs/numba,ssarangi/numba,pombredanne/numba,pitrou/numba,stefanseefeld/numba,pitrou/numba,jriehl/numba,gdementen/numba,stefanseefeld/numba,jriehl/numba
|
Add tests for unsigned arithmetic
|
import numpy as np
import unittest
from numba import void, uint32, int32, jit, uint64, int64, typeof, bool_
@jit(void(uint32[:], uint32, uint32))
def prng(X, A, C):
for i in range(X.shape[0]):
for j in range(100):
v = (A * X[i] + C)
X[i] = v
@jit(uint32())
def unsigned_literal():
return abs(0xFFFFFFFF)
@jit(int64())
def unsigned_literal_64():
return 0x100000000
class Test(unittest.TestCase):
def test_prng(self):
N = 100
A = 1664525
C = 1013904223
X0 = np.arange(N, dtype=np.uint32)
X1 = X0.copy()
prng.py_func(X0, A, C)
prng(X1, A, C)
self.assertTrue(np.all(X1 >= 0))
self.assertTrue(np.all(X0 == X1))
def test_unsigned_literal(self):
got = unsigned_literal()
expect = abs(0xFFFFFFFF)
self.assertEqual(expect, got)
def test_unsigned_literal_64(self):
got = unsigned_literal_64()
expect = 0x100000000
self.assertEqual(expect, got)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add tests for unsigned arithmetic<commit_after>
|
import numpy as np
import unittest
from numba import void, uint32, int32, jit, uint64, int64, typeof, bool_
@jit(void(uint32[:], uint32, uint32))
def prng(X, A, C):
for i in range(X.shape[0]):
for j in range(100):
v = (A * X[i] + C)
X[i] = v
@jit(uint32())
def unsigned_literal():
return abs(0xFFFFFFFF)
@jit(int64())
def unsigned_literal_64():
return 0x100000000
class Test(unittest.TestCase):
def test_prng(self):
N = 100
A = 1664525
C = 1013904223
X0 = np.arange(N, dtype=np.uint32)
X1 = X0.copy()
prng.py_func(X0, A, C)
prng(X1, A, C)
self.assertTrue(np.all(X1 >= 0))
self.assertTrue(np.all(X0 == X1))
def test_unsigned_literal(self):
got = unsigned_literal()
expect = abs(0xFFFFFFFF)
self.assertEqual(expect, got)
def test_unsigned_literal_64(self):
got = unsigned_literal_64()
expect = 0x100000000
self.assertEqual(expect, got)
if __name__ == '__main__':
unittest.main()
|
Add tests for unsigned arithmeticimport numpy as np
import unittest
from numba import void, uint32, int32, jit, uint64, int64, typeof, bool_
@jit(void(uint32[:], uint32, uint32))
def prng(X, A, C):
for i in range(X.shape[0]):
for j in range(100):
v = (A * X[i] + C)
X[i] = v
@jit(uint32())
def unsigned_literal():
return abs(0xFFFFFFFF)
@jit(int64())
def unsigned_literal_64():
return 0x100000000
class Test(unittest.TestCase):
def test_prng(self):
N = 100
A = 1664525
C = 1013904223
X0 = np.arange(N, dtype=np.uint32)
X1 = X0.copy()
prng.py_func(X0, A, C)
prng(X1, A, C)
self.assertTrue(np.all(X1 >= 0))
self.assertTrue(np.all(X0 == X1))
def test_unsigned_literal(self):
got = unsigned_literal()
expect = abs(0xFFFFFFFF)
self.assertEqual(expect, got)
def test_unsigned_literal_64(self):
got = unsigned_literal_64()
expect = 0x100000000
self.assertEqual(expect, got)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add tests for unsigned arithmetic<commit_after>import numpy as np
import unittest
from numba import void, uint32, int32, jit, uint64, int64, typeof, bool_
@jit(void(uint32[:], uint32, uint32))
def prng(X, A, C):
for i in range(X.shape[0]):
for j in range(100):
v = (A * X[i] + C)
X[i] = v
@jit(uint32())
def unsigned_literal():
return abs(0xFFFFFFFF)
@jit(int64())
def unsigned_literal_64():
return 0x100000000
class Test(unittest.TestCase):
def test_prng(self):
N = 100
A = 1664525
C = 1013904223
X0 = np.arange(N, dtype=np.uint32)
X1 = X0.copy()
prng.py_func(X0, A, C)
prng(X1, A, C)
self.assertTrue(np.all(X1 >= 0))
self.assertTrue(np.all(X0 == X1))
def test_unsigned_literal(self):
got = unsigned_literal()
expect = abs(0xFFFFFFFF)
self.assertEqual(expect, got)
def test_unsigned_literal_64(self):
got = unsigned_literal_64()
expect = 0x100000000
self.assertEqual(expect, got)
if __name__ == '__main__':
unittest.main()
|
|
c90b44a4bf974f6c3c8726960f84517c2ab1c363
|
modules/api_base.py
|
modules/api_base.py
|
#! /usr/bin/env python2.7
import requests
class API(object):
# This class should be inherited by more unique API specific classes
# See kraken.py for an example
def __init__(self, base_url, default_headers):
self.base_url = base_url
self.headers = default_headers
# Define all HTTP verb calls
# Each return the status of the call (True/False), and the return payload
def get(self, endpoint, headers = None, **kwargs):
if headers:
# Update the base headers with any new headers
# But w/o changing the base ones
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.get(url=url, headers=headers, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
def post(self, endpoint, data, headers = None, **kwargs):
if headers:
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.post(url=url, headers=headers, data=data, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
def put(self, endpoint, data, headers = None, **kwargs):
if headers:
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.put(url=url, headers=headers, data=data, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
# Woker functions
def update_headers(self, headers):
tmp = self.headers.copy()
return tmp.update(headers)
|
Add simple json api wrapper
|
Add simple json api wrapper
|
Python
|
mit
|
BatedUrGonnaDie/salty_bot
|
Add simple json api wrapper
|
#! /usr/bin/env python2.7
import requests
class API(object):
# This class should be inherited by more unique API specific classes
# See kraken.py for an example
def __init__(self, base_url, default_headers):
self.base_url = base_url
self.headers = default_headers
# Define all HTTP verb calls
# Each return the status of the call (True/False), and the return payload
def get(self, endpoint, headers = None, **kwargs):
if headers:
# Update the base headers with any new headers
# But w/o changing the base ones
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.get(url=url, headers=headers, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
def post(self, endpoint, data, headers = None, **kwargs):
if headers:
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.post(url=url, headers=headers, data=data, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
def put(self, endpoint, data, headers = None, **kwargs):
if headers:
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.put(url=url, headers=headers, data=data, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
# Woker functions
def update_headers(self, headers):
tmp = self.headers.copy()
return tmp.update(headers)
|
<commit_before><commit_msg>Add simple json api wrapper<commit_after>
|
#! /usr/bin/env python2.7
import requests
class API(object):
# This class should be inherited by more unique API specific classes
# See kraken.py for an example
def __init__(self, base_url, default_headers):
self.base_url = base_url
self.headers = default_headers
# Define all HTTP verb calls
# Each return the status of the call (True/False), and the return payload
def get(self, endpoint, headers = None, **kwargs):
if headers:
# Update the base headers with any new headers
# But w/o changing the base ones
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.get(url=url, headers=headers, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
def post(self, endpoint, data, headers = None, **kwargs):
if headers:
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.post(url=url, headers=headers, data=data, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
def put(self, endpoint, data, headers = None, **kwargs):
if headers:
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.put(url=url, headers=headers, data=data, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
# Woker functions
def update_headers(self, headers):
tmp = self.headers.copy()
return tmp.update(headers)
|
Add simple json api wrapper#! /usr/bin/env python2.7
import requests
class API(object):
# This class should be inherited by more unique API specific classes
# See kraken.py for an example
def __init__(self, base_url, default_headers):
self.base_url = base_url
self.headers = default_headers
# Define all HTTP verb calls
# Each return the status of the call (True/False), and the return payload
def get(self, endpoint, headers = None, **kwargs):
if headers:
# Update the base headers with any new headers
# But w/o changing the base ones
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.get(url=url, headers=headers, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
def post(self, endpoint, data, headers = None, **kwargs):
if headers:
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.post(url=url, headers=headers, data=data, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
def put(self, endpoint, data, headers = None, **kwargs):
if headers:
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.put(url=url, headers=headers, data=data, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
# Woker functions
def update_headers(self, headers):
tmp = self.headers.copy()
return tmp.update(headers)
|
<commit_before><commit_msg>Add simple json api wrapper<commit_after>#! /usr/bin/env python2.7
import requests
class API(object):
# This class should be inherited by more unique API specific classes
# See kraken.py for an example
def __init__(self, base_url, default_headers):
self.base_url = base_url
self.headers = default_headers
# Define all HTTP verb calls
# Each return the status of the call (True/False), and the return payload
def get(self, endpoint, headers = None, **kwargs):
if headers:
# Update the base headers with any new headers
# But w/o changing the base ones
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.get(url=url, headers=headers, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
def post(self, endpoint, data, headers = None, **kwargs):
if headers:
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.post(url=url, headers=headers, data=data, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
def put(self, endpoint, data, headers = None, **kwargs):
if headers:
headers = self.update_headers(headers)
url = self.base_url + endpoint
try:
data = requests.put(url=url, headers=headers, data=data, **kwargs)
data.raise_for_status()
data_decode = data.json()
return True, data_decode
except requests.exceptions.HTTPError:
return False, data
# Woker functions
def update_headers(self, headers):
tmp = self.headers.copy()
return tmp.update(headers)
|
|
2d4da5b24f8a01b7e3a09ecb9f8f55d695d01a5e
|
apns/tests/test_utils.py
|
apns/tests/test_utils.py
|
from datetime import datetime
from twisted.trial.unittest import TestCase
from apns.utils import datetime_to_timestamp
class DatetimeToTimestampTestCase(TestCase):
def test_datetime(self):
now = datetime.now()
timestamp = datetime_to_timestamp(now)
self.assertEqual(datetime.fromtimestamp(timestamp),
now.replace(microsecond=0))
|
Add tests to utils module
|
Add tests to utils module
|
Python
|
mit
|
operasoftware/twisted-apns,operasoftware/twisted-apns
|
Add tests to utils module
|
from datetime import datetime
from twisted.trial.unittest import TestCase
from apns.utils import datetime_to_timestamp
class DatetimeToTimestampTestCase(TestCase):
def test_datetime(self):
now = datetime.now()
timestamp = datetime_to_timestamp(now)
self.assertEqual(datetime.fromtimestamp(timestamp),
now.replace(microsecond=0))
|
<commit_before><commit_msg>Add tests to utils module<commit_after>
|
from datetime import datetime
from twisted.trial.unittest import TestCase
from apns.utils import datetime_to_timestamp
class DatetimeToTimestampTestCase(TestCase):
def test_datetime(self):
now = datetime.now()
timestamp = datetime_to_timestamp(now)
self.assertEqual(datetime.fromtimestamp(timestamp),
now.replace(microsecond=0))
|
Add tests to utils modulefrom datetime import datetime
from twisted.trial.unittest import TestCase
from apns.utils import datetime_to_timestamp
class DatetimeToTimestampTestCase(TestCase):
def test_datetime(self):
now = datetime.now()
timestamp = datetime_to_timestamp(now)
self.assertEqual(datetime.fromtimestamp(timestamp),
now.replace(microsecond=0))
|
<commit_before><commit_msg>Add tests to utils module<commit_after>from datetime import datetime
from twisted.trial.unittest import TestCase
from apns.utils import datetime_to_timestamp
class DatetimeToTimestampTestCase(TestCase):
def test_datetime(self):
now = datetime.now()
timestamp = datetime_to_timestamp(now)
self.assertEqual(datetime.fromtimestamp(timestamp),
now.replace(microsecond=0))
|
|
fd85f2ef68e91aea6acde2d665950ec105e1d015
|
bluesky/tests/test_progress_bar.py
|
bluesky/tests/test_progress_bar.py
|
from bluesky.utils import ProgressBar, ProgressBarManager
from bluesky.plans import mv
from bluesky import RunEngine
from bluesky.examples import NullStatus, SimpleStatus, Mover
from collections import OrderedDict
import time
def test_status_without_watch():
st = NullStatus()
ProgressBar([st])
def test_status_with_name():
st = SimpleStatus()
pbar = ProgressBar([st])
st._finished()
st = SimpleStatus()
pbar = ProgressBar([st])
assert pbar.delay_draw == 0.2
time.sleep(0.25)
st._finished()
def test_mv_progress(fresh_RE):
RE = fresh_RE
RE.waiting_hook = ProgressBarManager()
motor1 = Mover('motor1', OrderedDict([('motor1', lambda x: x),
('motor1_setpoint', lambda x: x)]),
{'x': 0})
motor2 = Mover('motor2', OrderedDict([('motor2', lambda x: x),
('motor2_setpoint', lambda x: x)]),
{'x': 0})
assert RE.waiting_hook.delay_draw == 0.2
# moving time > delay_draw
motor1._fake_sleep = 0.5
motor1._fake_sleep = 0.5
RE(mv(motor1, 0, motor2, 0))
# moving time < delay_draw
motor1._fake_sleep = 0.01
motor1._fake_sleep = 0.01
RE(mv(motor1, 0, motor2, 0))
|
Add test to cover ProgressBar and ProgressBarManager.
|
TST: Add test to cover ProgressBar and ProgressBarManager.
|
Python
|
bsd-3-clause
|
ericdill/bluesky,ericdill/bluesky
|
TST: Add test to cover ProgressBar and ProgressBarManager.
|
from bluesky.utils import ProgressBar, ProgressBarManager
from bluesky.plans import mv
from bluesky import RunEngine
from bluesky.examples import NullStatus, SimpleStatus, Mover
from collections import OrderedDict
import time
def test_status_without_watch():
st = NullStatus()
ProgressBar([st])
def test_status_with_name():
st = SimpleStatus()
pbar = ProgressBar([st])
st._finished()
st = SimpleStatus()
pbar = ProgressBar([st])
assert pbar.delay_draw == 0.2
time.sleep(0.25)
st._finished()
def test_mv_progress(fresh_RE):
RE = fresh_RE
RE.waiting_hook = ProgressBarManager()
motor1 = Mover('motor1', OrderedDict([('motor1', lambda x: x),
('motor1_setpoint', lambda x: x)]),
{'x': 0})
motor2 = Mover('motor2', OrderedDict([('motor2', lambda x: x),
('motor2_setpoint', lambda x: x)]),
{'x': 0})
assert RE.waiting_hook.delay_draw == 0.2
# moving time > delay_draw
motor1._fake_sleep = 0.5
motor1._fake_sleep = 0.5
RE(mv(motor1, 0, motor2, 0))
# moving time < delay_draw
motor1._fake_sleep = 0.01
motor1._fake_sleep = 0.01
RE(mv(motor1, 0, motor2, 0))
|
<commit_before><commit_msg>TST: Add test to cover ProgressBar and ProgressBarManager.<commit_after>
|
from bluesky.utils import ProgressBar, ProgressBarManager
from bluesky.plans import mv
from bluesky import RunEngine
from bluesky.examples import NullStatus, SimpleStatus, Mover
from collections import OrderedDict
import time
def test_status_without_watch():
st = NullStatus()
ProgressBar([st])
def test_status_with_name():
st = SimpleStatus()
pbar = ProgressBar([st])
st._finished()
st = SimpleStatus()
pbar = ProgressBar([st])
assert pbar.delay_draw == 0.2
time.sleep(0.25)
st._finished()
def test_mv_progress(fresh_RE):
RE = fresh_RE
RE.waiting_hook = ProgressBarManager()
motor1 = Mover('motor1', OrderedDict([('motor1', lambda x: x),
('motor1_setpoint', lambda x: x)]),
{'x': 0})
motor2 = Mover('motor2', OrderedDict([('motor2', lambda x: x),
('motor2_setpoint', lambda x: x)]),
{'x': 0})
assert RE.waiting_hook.delay_draw == 0.2
# moving time > delay_draw
motor1._fake_sleep = 0.5
motor1._fake_sleep = 0.5
RE(mv(motor1, 0, motor2, 0))
# moving time < delay_draw
motor1._fake_sleep = 0.01
motor1._fake_sleep = 0.01
RE(mv(motor1, 0, motor2, 0))
|
TST: Add test to cover ProgressBar and ProgressBarManager.from bluesky.utils import ProgressBar, ProgressBarManager
from bluesky.plans import mv
from bluesky import RunEngine
from bluesky.examples import NullStatus, SimpleStatus, Mover
from collections import OrderedDict
import time
def test_status_without_watch():
st = NullStatus()
ProgressBar([st])
def test_status_with_name():
st = SimpleStatus()
pbar = ProgressBar([st])
st._finished()
st = SimpleStatus()
pbar = ProgressBar([st])
assert pbar.delay_draw == 0.2
time.sleep(0.25)
st._finished()
def test_mv_progress(fresh_RE):
RE = fresh_RE
RE.waiting_hook = ProgressBarManager()
motor1 = Mover('motor1', OrderedDict([('motor1', lambda x: x),
('motor1_setpoint', lambda x: x)]),
{'x': 0})
motor2 = Mover('motor2', OrderedDict([('motor2', lambda x: x),
('motor2_setpoint', lambda x: x)]),
{'x': 0})
assert RE.waiting_hook.delay_draw == 0.2
# moving time > delay_draw
motor1._fake_sleep = 0.5
motor1._fake_sleep = 0.5
RE(mv(motor1, 0, motor2, 0))
# moving time < delay_draw
motor1._fake_sleep = 0.01
motor1._fake_sleep = 0.01
RE(mv(motor1, 0, motor2, 0))
|
<commit_before><commit_msg>TST: Add test to cover ProgressBar and ProgressBarManager.<commit_after>from bluesky.utils import ProgressBar, ProgressBarManager
from bluesky.plans import mv
from bluesky import RunEngine
from bluesky.examples import NullStatus, SimpleStatus, Mover
from collections import OrderedDict
import time
def test_status_without_watch():
st = NullStatus()
ProgressBar([st])
def test_status_with_name():
st = SimpleStatus()
pbar = ProgressBar([st])
st._finished()
st = SimpleStatus()
pbar = ProgressBar([st])
assert pbar.delay_draw == 0.2
time.sleep(0.25)
st._finished()
def test_mv_progress(fresh_RE):
RE = fresh_RE
RE.waiting_hook = ProgressBarManager()
motor1 = Mover('motor1', OrderedDict([('motor1', lambda x: x),
('motor1_setpoint', lambda x: x)]),
{'x': 0})
motor2 = Mover('motor2', OrderedDict([('motor2', lambda x: x),
('motor2_setpoint', lambda x: x)]),
{'x': 0})
assert RE.waiting_hook.delay_draw == 0.2
# moving time > delay_draw
motor1._fake_sleep = 0.5
motor1._fake_sleep = 0.5
RE(mv(motor1, 0, motor2, 0))
# moving time < delay_draw
motor1._fake_sleep = 0.01
motor1._fake_sleep = 0.01
RE(mv(motor1, 0, motor2, 0))
|
|
516f4ff71a062247bb026bf43257362c7386e403
|
test.py
|
test.py
|
# test for logic
import sys, operator, random
Base1 = sys.argv[1]
Base2 = sys.argv[2]
Base3 = sys.argv[3]
Base4 = sys.argv[4]
Base5 = sys.argv[5]
Base6 = sys.argv[6]
Level = 10
#relation = {1: Base1, 2: Base2, 3: Base3, 4: Base4, 5: Base5, 6: Base6}
relation = [(1, int(Base1)), (2, int(Base2)), (3, int(Base3)), (4, int(Base4)), (5, int(Base5)), (6, int(Base6))]
#sorted_relation = sorted(relation.items(), key=operator.itemgetter(1))
def getKey(item):
return item[1]
new_relation = sorted(relation, key=getKey)
sorted_relation = new_relation[::-1]
print "Starting Stat table\n", sorted_relation
def statup():
StatPoint = Level-1
while StatPoint > 0:
# Stat = random.choice(sorted_relation)
StatI = random.randrange(len(sorted_relation))
print StatI
Stat = sorted_relation[StatI]
print Stat
StatN = int(Stat[0])
print StatN
StatV = int(Stat[1])
print StatV
prevstat = StatI - 1
# if StatI > 0:
# print "error Stat is less than 0!"
# continue
if StatI == 0:
print "Highest Stat"
NewV = StatV + 1
print NewV
sorted_relation[StatN] = NewV
StatPoint -= 1
continue
elif StatV > sorted_relation[prevstat]:
print "Greater than base relation allows"
continue
elif StatV <= sorted_relation[prevstat]:
print "Adding a point to", StatN
NewV = StatV + 1
print StatV
print NewV
sorted_relation[StatN] = NewV
StatPoint -= 1
continue
statup()
print "Ending Stat table\n", sorted_relation
|
Test script for logic added
|
Test script for logic added
|
Python
|
apache-2.0
|
Phixia/WildEncounter
|
Test script for logic added
|
# test for logic
import sys, operator, random
Base1 = sys.argv[1]
Base2 = sys.argv[2]
Base3 = sys.argv[3]
Base4 = sys.argv[4]
Base5 = sys.argv[5]
Base6 = sys.argv[6]
Level = 10
#relation = {1: Base1, 2: Base2, 3: Base3, 4: Base4, 5: Base5, 6: Base6}
relation = [(1, int(Base1)), (2, int(Base2)), (3, int(Base3)), (4, int(Base4)), (5, int(Base5)), (6, int(Base6))]
#sorted_relation = sorted(relation.items(), key=operator.itemgetter(1))
def getKey(item):
return item[1]
new_relation = sorted(relation, key=getKey)
sorted_relation = new_relation[::-1]
print "Starting Stat table\n", sorted_relation
def statup():
StatPoint = Level-1
while StatPoint > 0:
# Stat = random.choice(sorted_relation)
StatI = random.randrange(len(sorted_relation))
print StatI
Stat = sorted_relation[StatI]
print Stat
StatN = int(Stat[0])
print StatN
StatV = int(Stat[1])
print StatV
prevstat = StatI - 1
# if StatI > 0:
# print "error Stat is less than 0!"
# continue
if StatI == 0:
print "Highest Stat"
NewV = StatV + 1
print NewV
sorted_relation[StatN] = NewV
StatPoint -= 1
continue
elif StatV > sorted_relation[prevstat]:
print "Greater than base relation allows"
continue
elif StatV <= sorted_relation[prevstat]:
print "Adding a point to", StatN
NewV = StatV + 1
print StatV
print NewV
sorted_relation[StatN] = NewV
StatPoint -= 1
continue
statup()
print "Ending Stat table\n", sorted_relation
|
<commit_before><commit_msg>Test script for logic added<commit_after>
|
# test for logic
import sys, operator, random
Base1 = sys.argv[1]
Base2 = sys.argv[2]
Base3 = sys.argv[3]
Base4 = sys.argv[4]
Base5 = sys.argv[5]
Base6 = sys.argv[6]
Level = 10
#relation = {1: Base1, 2: Base2, 3: Base3, 4: Base4, 5: Base5, 6: Base6}
relation = [(1, int(Base1)), (2, int(Base2)), (3, int(Base3)), (4, int(Base4)), (5, int(Base5)), (6, int(Base6))]
#sorted_relation = sorted(relation.items(), key=operator.itemgetter(1))
def getKey(item):
return item[1]
new_relation = sorted(relation, key=getKey)
sorted_relation = new_relation[::-1]
print "Starting Stat table\n", sorted_relation
def statup():
StatPoint = Level-1
while StatPoint > 0:
# Stat = random.choice(sorted_relation)
StatI = random.randrange(len(sorted_relation))
print StatI
Stat = sorted_relation[StatI]
print Stat
StatN = int(Stat[0])
print StatN
StatV = int(Stat[1])
print StatV
prevstat = StatI - 1
# if StatI > 0:
# print "error Stat is less than 0!"
# continue
if StatI == 0:
print "Highest Stat"
NewV = StatV + 1
print NewV
sorted_relation[StatN] = NewV
StatPoint -= 1
continue
elif StatV > sorted_relation[prevstat]:
print "Greater than base relation allows"
continue
elif StatV <= sorted_relation[prevstat]:
print "Adding a point to", StatN
NewV = StatV + 1
print StatV
print NewV
sorted_relation[StatN] = NewV
StatPoint -= 1
continue
statup()
print "Ending Stat table\n", sorted_relation
|
Test script for logic added# test for logic
import sys, operator, random
Base1 = sys.argv[1]
Base2 = sys.argv[2]
Base3 = sys.argv[3]
Base4 = sys.argv[4]
Base5 = sys.argv[5]
Base6 = sys.argv[6]
Level = 10
#relation = {1: Base1, 2: Base2, 3: Base3, 4: Base4, 5: Base5, 6: Base6}
relation = [(1, int(Base1)), (2, int(Base2)), (3, int(Base3)), (4, int(Base4)), (5, int(Base5)), (6, int(Base6))]
#sorted_relation = sorted(relation.items(), key=operator.itemgetter(1))
def getKey(item):
return item[1]
new_relation = sorted(relation, key=getKey)
sorted_relation = new_relation[::-1]
print "Starting Stat table\n", sorted_relation
def statup():
StatPoint = Level-1
while StatPoint > 0:
# Stat = random.choice(sorted_relation)
StatI = random.randrange(len(sorted_relation))
print StatI
Stat = sorted_relation[StatI]
print Stat
StatN = int(Stat[0])
print StatN
StatV = int(Stat[1])
print StatV
prevstat = StatI - 1
# if StatI > 0:
# print "error Stat is less than 0!"
# continue
if StatI == 0:
print "Highest Stat"
NewV = StatV + 1
print NewV
sorted_relation[StatN] = NewV
StatPoint -= 1
continue
elif StatV > sorted_relation[prevstat]:
print "Greater than base relation allows"
continue
elif StatV <= sorted_relation[prevstat]:
print "Adding a point to", StatN
NewV = StatV + 1
print StatV
print NewV
sorted_relation[StatN] = NewV
StatPoint -= 1
continue
statup()
print "Ending Stat table\n", sorted_relation
|
<commit_before><commit_msg>Test script for logic added<commit_after># test for logic
import sys, operator, random
Base1 = sys.argv[1]
Base2 = sys.argv[2]
Base3 = sys.argv[3]
Base4 = sys.argv[4]
Base5 = sys.argv[5]
Base6 = sys.argv[6]
Level = 10
#relation = {1: Base1, 2: Base2, 3: Base3, 4: Base4, 5: Base5, 6: Base6}
relation = [(1, int(Base1)), (2, int(Base2)), (3, int(Base3)), (4, int(Base4)), (5, int(Base5)), (6, int(Base6))]
#sorted_relation = sorted(relation.items(), key=operator.itemgetter(1))
def getKey(item):
return item[1]
new_relation = sorted(relation, key=getKey)
sorted_relation = new_relation[::-1]
print "Starting Stat table\n", sorted_relation
def statup():
StatPoint = Level-1
while StatPoint > 0:
# Stat = random.choice(sorted_relation)
StatI = random.randrange(len(sorted_relation))
print StatI
Stat = sorted_relation[StatI]
print Stat
StatN = int(Stat[0])
print StatN
StatV = int(Stat[1])
print StatV
prevstat = StatI - 1
# if StatI > 0:
# print "error Stat is less than 0!"
# continue
if StatI == 0:
print "Highest Stat"
NewV = StatV + 1
print NewV
sorted_relation[StatN] = NewV
StatPoint -= 1
continue
elif StatV > sorted_relation[prevstat]:
print "Greater than base relation allows"
continue
elif StatV <= sorted_relation[prevstat]:
print "Adding a point to", StatN
NewV = StatV + 1
print StatV
print NewV
sorted_relation[StatN] = NewV
StatPoint -= 1
continue
statup()
print "Ending Stat table\n", sorted_relation
|
|
eb44db8a4dbe3e6c261497e249d984059b1e32fe
|
infinity.py
|
infinity.py
|
try:
from functools import total_ordering
except ImportError:
# Use Python 2.6 port
from total_ordering import total_ordering
@total_ordering
class Infinity(object):
"""
An object that is greater than any other object (except itself).
Inspired by https://pypi.python.org/pypi/Extremes
Examples::
Infinity can be compared to any object:
>>> from infinity import inf
>>> import sys
>>> inf > -sys.maxint
True
>>> inf > None
True
>>> inf > ''
True
>>> inf > datetime(2000, 20, 2)
"""
def __init__(self, positive=True):
self.positive = positive
def __neg__(self):
return Infinity(not self.positive)
def __gt__(self, other):
if isinstance(other, self.__class__) and other.positive == self.positive:
return False
return self.positive
def __eq__(self, other):
if isinstance(other, self.__class__) and other.positive == self.positive:
return True
return False
def __ne__(self, other):
return not (self == other)
def __bool__(self):
return self.positive
def __nonzero__(self):
return self.positive
def __str__(self):
return '%sinf' % ('' if self.positive else '-')
inf = Infinity()
|
Add first draft for Infinity class
|
Add first draft for Infinity class
|
Python
|
bsd-3-clause
|
kvesteri/infinity
|
Add first draft for Infinity class
|
try:
from functools import total_ordering
except ImportError:
# Use Python 2.6 port
from total_ordering import total_ordering
@total_ordering
class Infinity(object):
"""
An object that is greater than any other object (except itself).
Inspired by https://pypi.python.org/pypi/Extremes
Examples::
Infinity can be compared to any object:
>>> from infinity import inf
>>> import sys
>>> inf > -sys.maxint
True
>>> inf > None
True
>>> inf > ''
True
>>> inf > datetime(2000, 20, 2)
"""
def __init__(self, positive=True):
self.positive = positive
def __neg__(self):
return Infinity(not self.positive)
def __gt__(self, other):
if isinstance(other, self.__class__) and other.positive == self.positive:
return False
return self.positive
def __eq__(self, other):
if isinstance(other, self.__class__) and other.positive == self.positive:
return True
return False
def __ne__(self, other):
return not (self == other)
def __bool__(self):
return self.positive
def __nonzero__(self):
return self.positive
def __str__(self):
return '%sinf' % ('' if self.positive else '-')
inf = Infinity()
|
<commit_before><commit_msg>Add first draft for Infinity class<commit_after>
|
try:
from functools import total_ordering
except ImportError:
# Use Python 2.6 port
from total_ordering import total_ordering
@total_ordering
class Infinity(object):
"""
An object that is greater than any other object (except itself).
Inspired by https://pypi.python.org/pypi/Extremes
Examples::
Infinity can be compared to any object:
>>> from infinity import inf
>>> import sys
>>> inf > -sys.maxint
True
>>> inf > None
True
>>> inf > ''
True
>>> inf > datetime(2000, 20, 2)
"""
def __init__(self, positive=True):
self.positive = positive
def __neg__(self):
return Infinity(not self.positive)
def __gt__(self, other):
if isinstance(other, self.__class__) and other.positive == self.positive:
return False
return self.positive
def __eq__(self, other):
if isinstance(other, self.__class__) and other.positive == self.positive:
return True
return False
def __ne__(self, other):
return not (self == other)
def __bool__(self):
return self.positive
def __nonzero__(self):
return self.positive
def __str__(self):
return '%sinf' % ('' if self.positive else '-')
inf = Infinity()
|
Add first draft for Infinity classtry:
from functools import total_ordering
except ImportError:
# Use Python 2.6 port
from total_ordering import total_ordering
@total_ordering
class Infinity(object):
"""
An object that is greater than any other object (except itself).
Inspired by https://pypi.python.org/pypi/Extremes
Examples::
Infinity can be compared to any object:
>>> from infinity import inf
>>> import sys
>>> inf > -sys.maxint
True
>>> inf > None
True
>>> inf > ''
True
>>> inf > datetime(2000, 20, 2)
"""
def __init__(self, positive=True):
self.positive = positive
def __neg__(self):
return Infinity(not self.positive)
def __gt__(self, other):
if isinstance(other, self.__class__) and other.positive == self.positive:
return False
return self.positive
def __eq__(self, other):
if isinstance(other, self.__class__) and other.positive == self.positive:
return True
return False
def __ne__(self, other):
return not (self == other)
def __bool__(self):
return self.positive
def __nonzero__(self):
return self.positive
def __str__(self):
return '%sinf' % ('' if self.positive else '-')
inf = Infinity()
|
<commit_before><commit_msg>Add first draft for Infinity class<commit_after>try:
from functools import total_ordering
except ImportError:
# Use Python 2.6 port
from total_ordering import total_ordering
@total_ordering
class Infinity(object):
"""
An object that is greater than any other object (except itself).
Inspired by https://pypi.python.org/pypi/Extremes
Examples::
Infinity can be compared to any object:
>>> from infinity import inf
>>> import sys
>>> inf > -sys.maxint
True
>>> inf > None
True
>>> inf > ''
True
>>> inf > datetime(2000, 20, 2)
"""
def __init__(self, positive=True):
self.positive = positive
def __neg__(self):
return Infinity(not self.positive)
def __gt__(self, other):
if isinstance(other, self.__class__) and other.positive == self.positive:
return False
return self.positive
def __eq__(self, other):
if isinstance(other, self.__class__) and other.positive == self.positive:
return True
return False
def __ne__(self, other):
return not (self == other)
def __bool__(self):
return self.positive
def __nonzero__(self):
return self.positive
def __str__(self):
return '%sinf' % ('' if self.positive else '-')
inf = Infinity()
|
|
20c3a662e276767e3ed36acc15e7f450737d5e80
|
ircutils.py
|
ircutils.py
|
""" Collection of helper functions
"""
def privmsg(to, msg):
return 'PRIVMSG %s :%s' % (to, msg)
def join(channel):
return 'JOIN %s' % channel
def leave(channel, reason=''):
return 'PART %s :%s' % (channel, reason)
def quit(reason=''):
return 'QUIT :%s' % reason
def topic(channel, topic=''):
if topic:
return 'TOPIC %s :%s' % (channel, topic)
else:
return 'TOPIC %s' % channel
def mode(mode, target):
target = target.split()
if len(target) != 0:
return 'MODE %s %s' % (len(target) * mode, ' '.join(target))
else:
return 'MODE %s' % target
def op(users):
return mode('+o', users)
def deop(users):
return mode('-o', users)
def voice(users):
return mode('+v', users)
def devoice(users):
return mode('-v', users)
def ban(mask):
return mode('+b', mask)
def unban(mask):
return mode('-b', mask)
|
Add some IRC helper functions
|
Add some IRC helper functions
|
Python
|
mit
|
aalien/mib
|
Add some IRC helper functions
|
""" Collection of helper functions
"""
def privmsg(to, msg):
return 'PRIVMSG %s :%s' % (to, msg)
def join(channel):
return 'JOIN %s' % channel
def leave(channel, reason=''):
return 'PART %s :%s' % (channel, reason)
def quit(reason=''):
return 'QUIT :%s' % reason
def topic(channel, topic=''):
if topic:
return 'TOPIC %s :%s' % (channel, topic)
else:
return 'TOPIC %s' % channel
def mode(mode, target):
target = target.split()
if len(target) != 0:
return 'MODE %s %s' % (len(target) * mode, ' '.join(target))
else:
return 'MODE %s' % target
def op(users):
return mode('+o', users)
def deop(users):
return mode('-o', users)
def voice(users):
return mode('+v', users)
def devoice(users):
return mode('-v', users)
def ban(mask):
return mode('+b', mask)
def unban(mask):
return mode('-b', mask)
|
<commit_before><commit_msg>Add some IRC helper functions<commit_after>
|
""" Collection of helper functions
"""
def privmsg(to, msg):
return 'PRIVMSG %s :%s' % (to, msg)
def join(channel):
return 'JOIN %s' % channel
def leave(channel, reason=''):
return 'PART %s :%s' % (channel, reason)
def quit(reason=''):
return 'QUIT :%s' % reason
def topic(channel, topic=''):
if topic:
return 'TOPIC %s :%s' % (channel, topic)
else:
return 'TOPIC %s' % channel
def mode(mode, target):
target = target.split()
if len(target) != 0:
return 'MODE %s %s' % (len(target) * mode, ' '.join(target))
else:
return 'MODE %s' % target
def op(users):
return mode('+o', users)
def deop(users):
return mode('-o', users)
def voice(users):
return mode('+v', users)
def devoice(users):
return mode('-v', users)
def ban(mask):
return mode('+b', mask)
def unban(mask):
return mode('-b', mask)
|
Add some IRC helper functions""" Collection of helper functions
"""
def privmsg(to, msg):
return 'PRIVMSG %s :%s' % (to, msg)
def join(channel):
return 'JOIN %s' % channel
def leave(channel, reason=''):
return 'PART %s :%s' % (channel, reason)
def quit(reason=''):
return 'QUIT :%s' % reason
def topic(channel, topic=''):
if topic:
return 'TOPIC %s :%s' % (channel, topic)
else:
return 'TOPIC %s' % channel
def mode(mode, target):
target = target.split()
if len(target) != 0:
return 'MODE %s %s' % (len(target) * mode, ' '.join(target))
else:
return 'MODE %s' % target
def op(users):
return mode('+o', users)
def deop(users):
return mode('-o', users)
def voice(users):
return mode('+v', users)
def devoice(users):
return mode('-v', users)
def ban(mask):
return mode('+b', mask)
def unban(mask):
return mode('-b', mask)
|
<commit_before><commit_msg>Add some IRC helper functions<commit_after>""" Collection of helper functions
"""
def privmsg(to, msg):
return 'PRIVMSG %s :%s' % (to, msg)
def join(channel):
return 'JOIN %s' % channel
def leave(channel, reason=''):
return 'PART %s :%s' % (channel, reason)
def quit(reason=''):
return 'QUIT :%s' % reason
def topic(channel, topic=''):
if topic:
return 'TOPIC %s :%s' % (channel, topic)
else:
return 'TOPIC %s' % channel
def mode(mode, target):
target = target.split()
if len(target) != 0:
return 'MODE %s %s' % (len(target) * mode, ' '.join(target))
else:
return 'MODE %s' % target
def op(users):
return mode('+o', users)
def deop(users):
return mode('-o', users)
def voice(users):
return mode('+v', users)
def devoice(users):
return mode('-v', users)
def ban(mask):
return mode('+b', mask)
def unban(mask):
return mode('-b', mask)
|
|
c4e74be4b508c226b907b27b4a00b197289680e0
|
django/applications/catmaid/control/ajax_templates.py
|
django/applications/catmaid/control/ajax_templates.py
|
from django.template.loader_tags import BlockNode, ExtendsNode
from django.template import loader, Context, RequestContext, TextNode
# Most parts of this code has been taken from this Django snippet:
# http://djangosnippets.org/snippets/942/
def get_template(template):
if isinstance(template, (tuple, list)):
return loader.select_template(template)
return loader.get_template(template)
class BlockNotFound(Exception):
pass
def render_template_block(template, block, context):
"""
Renders a single block from a template. This template should have previously been rendered.
"""
return render_template_block_nodelist(template.nodelist, block, context)
def render_template_block_nodelist(nodelist, block, context):
for node in nodelist:
if isinstance(node, BlockNode) and node.name == block:
return node.render(context)
for key in ('nodelist', 'nodelist_true', 'nodelist_false'):
if hasattr(node, key):
try:
return render_template_block_nodelist(getattr(node, key), block, context)
except:
pass
for node in nodelist:
if isinstance(node, ExtendsNode):
try:
return render_template_block(node.get_parent(context), block, context)
except BlockNotFound:
pass
raise BlockNotFound
def render_block_to_string(template_name, block, dictionary=None, context_instance=None):
"""
Loads the given template_name and renders the given block with the given dictionary as
context. Returns a string.
"""
import re
dictionary = dictionary or {}
t = get_template(template_name)
if context_instance:
context_instance.update(dictionary)
else:
context_instance = Context(dictionary)
template_block = render_template_block(t, block, context_instance)
return re.sub(r'\s+', ' ', template_block)
def direct_block_to_template(request, template, block, extra_context=None, mimetype=None, **kwargs):
"""
Render a given block in a given template with any extra URL parameters in the context as
``{{ params }}``.
"""
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = get_template(template)
t.render(c)
return HttpResponse(render_template_block(t, block, c), mimetype=mimetype)
|
Add helper methods to render template blocks independently
|
Add helper methods to render template blocks independently
|
Python
|
agpl-3.0
|
htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID,htem/CATMAID,htem/CATMAID,htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID
|
Add helper methods to render template blocks independently
|
from django.template.loader_tags import BlockNode, ExtendsNode
from django.template import loader, Context, RequestContext, TextNode
# Most parts of this code has been taken from this Django snippet:
# http://djangosnippets.org/snippets/942/
def get_template(template):
if isinstance(template, (tuple, list)):
return loader.select_template(template)
return loader.get_template(template)
class BlockNotFound(Exception):
pass
def render_template_block(template, block, context):
"""
Renders a single block from a template. This template should have previously been rendered.
"""
return render_template_block_nodelist(template.nodelist, block, context)
def render_template_block_nodelist(nodelist, block, context):
for node in nodelist:
if isinstance(node, BlockNode) and node.name == block:
return node.render(context)
for key in ('nodelist', 'nodelist_true', 'nodelist_false'):
if hasattr(node, key):
try:
return render_template_block_nodelist(getattr(node, key), block, context)
except:
pass
for node in nodelist:
if isinstance(node, ExtendsNode):
try:
return render_template_block(node.get_parent(context), block, context)
except BlockNotFound:
pass
raise BlockNotFound
def render_block_to_string(template_name, block, dictionary=None, context_instance=None):
"""
Loads the given template_name and renders the given block with the given dictionary as
context. Returns a string.
"""
import re
dictionary = dictionary or {}
t = get_template(template_name)
if context_instance:
context_instance.update(dictionary)
else:
context_instance = Context(dictionary)
template_block = render_template_block(t, block, context_instance)
return re.sub(r'\s+', ' ', template_block)
def direct_block_to_template(request, template, block, extra_context=None, mimetype=None, **kwargs):
"""
Render a given block in a given template with any extra URL parameters in the context as
``{{ params }}``.
"""
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = get_template(template)
t.render(c)
return HttpResponse(render_template_block(t, block, c), mimetype=mimetype)
|
<commit_before><commit_msg>Add helper methods to render template blocks independently<commit_after>
|
from django.template.loader_tags import BlockNode, ExtendsNode
from django.template import loader, Context, RequestContext, TextNode
# Most parts of this code has been taken from this Django snippet:
# http://djangosnippets.org/snippets/942/
def get_template(template):
if isinstance(template, (tuple, list)):
return loader.select_template(template)
return loader.get_template(template)
class BlockNotFound(Exception):
pass
def render_template_block(template, block, context):
"""
Renders a single block from a template. This template should have previously been rendered.
"""
return render_template_block_nodelist(template.nodelist, block, context)
def render_template_block_nodelist(nodelist, block, context):
for node in nodelist:
if isinstance(node, BlockNode) and node.name == block:
return node.render(context)
for key in ('nodelist', 'nodelist_true', 'nodelist_false'):
if hasattr(node, key):
try:
return render_template_block_nodelist(getattr(node, key), block, context)
except:
pass
for node in nodelist:
if isinstance(node, ExtendsNode):
try:
return render_template_block(node.get_parent(context), block, context)
except BlockNotFound:
pass
raise BlockNotFound
def render_block_to_string(template_name, block, dictionary=None, context_instance=None):
"""
Loads the given template_name and renders the given block with the given dictionary as
context. Returns a string.
"""
import re
dictionary = dictionary or {}
t = get_template(template_name)
if context_instance:
context_instance.update(dictionary)
else:
context_instance = Context(dictionary)
template_block = render_template_block(t, block, context_instance)
return re.sub(r'\s+', ' ', template_block)
def direct_block_to_template(request, template, block, extra_context=None, mimetype=None, **kwargs):
"""
Render a given block in a given template with any extra URL parameters in the context as
``{{ params }}``.
"""
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = get_template(template)
t.render(c)
return HttpResponse(render_template_block(t, block, c), mimetype=mimetype)
|
Add helper methods to render template blocks independentlyfrom django.template.loader_tags import BlockNode, ExtendsNode
from django.template import loader, Context, RequestContext, TextNode
# Most parts of this code has been taken from this Django snippet:
# http://djangosnippets.org/snippets/942/
def get_template(template):
if isinstance(template, (tuple, list)):
return loader.select_template(template)
return loader.get_template(template)
class BlockNotFound(Exception):
pass
def render_template_block(template, block, context):
"""
Renders a single block from a template. This template should have previously been rendered.
"""
return render_template_block_nodelist(template.nodelist, block, context)
def render_template_block_nodelist(nodelist, block, context):
for node in nodelist:
if isinstance(node, BlockNode) and node.name == block:
return node.render(context)
for key in ('nodelist', 'nodelist_true', 'nodelist_false'):
if hasattr(node, key):
try:
return render_template_block_nodelist(getattr(node, key), block, context)
except:
pass
for node in nodelist:
if isinstance(node, ExtendsNode):
try:
return render_template_block(node.get_parent(context), block, context)
except BlockNotFound:
pass
raise BlockNotFound
def render_block_to_string(template_name, block, dictionary=None, context_instance=None):
"""
Loads the given template_name and renders the given block with the given dictionary as
context. Returns a string.
"""
import re
dictionary = dictionary or {}
t = get_template(template_name)
if context_instance:
context_instance.update(dictionary)
else:
context_instance = Context(dictionary)
template_block = render_template_block(t, block, context_instance)
return re.sub(r'\s+', ' ', template_block)
def direct_block_to_template(request, template, block, extra_context=None, mimetype=None, **kwargs):
"""
Render a given block in a given template with any extra URL parameters in the context as
``{{ params }}``.
"""
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = get_template(template)
t.render(c)
return HttpResponse(render_template_block(t, block, c), mimetype=mimetype)
|
<commit_before><commit_msg>Add helper methods to render template blocks independently<commit_after>from django.template.loader_tags import BlockNode, ExtendsNode
from django.template import loader, Context, RequestContext, TextNode
# Most parts of this code has been taken from this Django snippet:
# http://djangosnippets.org/snippets/942/
def get_template(template):
if isinstance(template, (tuple, list)):
return loader.select_template(template)
return loader.get_template(template)
class BlockNotFound(Exception):
pass
def render_template_block(template, block, context):
"""
Renders a single block from a template. This template should have previously been rendered.
"""
return render_template_block_nodelist(template.nodelist, block, context)
def render_template_block_nodelist(nodelist, block, context):
for node in nodelist:
if isinstance(node, BlockNode) and node.name == block:
return node.render(context)
for key in ('nodelist', 'nodelist_true', 'nodelist_false'):
if hasattr(node, key):
try:
return render_template_block_nodelist(getattr(node, key), block, context)
except:
pass
for node in nodelist:
if isinstance(node, ExtendsNode):
try:
return render_template_block(node.get_parent(context), block, context)
except BlockNotFound:
pass
raise BlockNotFound
def render_block_to_string(template_name, block, dictionary=None, context_instance=None):
"""
Loads the given template_name and renders the given block with the given dictionary as
context. Returns a string.
"""
import re
dictionary = dictionary or {}
t = get_template(template_name)
if context_instance:
context_instance.update(dictionary)
else:
context_instance = Context(dictionary)
template_block = render_template_block(t, block, context_instance)
return re.sub(r'\s+', ' ', template_block)
def direct_block_to_template(request, template, block, extra_context=None, mimetype=None, **kwargs):
"""
Render a given block in a given template with any extra URL parameters in the context as
``{{ params }}``.
"""
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = get_template(template)
t.render(c)
return HttpResponse(render_template_block(t, block, c), mimetype=mimetype)
|
|
0fc5bc7d4582fc2af65e4c87e1962eeb702b051e
|
indra/sources/eidos/scala_utils.py
|
indra/sources/eidos/scala_utils.py
|
from jnius import autoclass
def get_python_list(scala_list):
"""Return list from elements of scala.collection.immutable.List"""
python_list = []
for i in range(scala_list.length()):
python_list.append(scala_list.apply(i))
return python_list
def get_python_dict(scala_map):
"""Return a dict from entries in a scala.collection.immutable.Map"""
python_dict = {}
keys = get_python_list(scala_map.keys().toList())
for key in keys:
python_dict[key] = scala_map.apply(key)
return python_dict
def get_python_json(scala_json):
"""Return a JSON dict from a org.json4s.JsonAST"""
def convert_node(node):
if node.__class__.__name__ == 'org.json4s.JsonAST$JValue':
values_raw = get_python_dict(node.values())
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
'''
entries = []
children = get_python_list(node.children())
for child in children:
entries.append(convert_node(child))
return entries
'''
elif node.__class__.__name__ == 'org.json4s.JsonAST$JObject':
values_raw = get_python_dict(node.values())
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
elif node.__class__.__name__ == 'org.json4s.JsonAST$JArray':
entries_raw = get_python_list(node.values())
entries = []
for entry in entries_raw:
entries.append(convert_node(entry))
return entries
elif node.__class__.__name__.startswith('scala.collection.immutable.Map') or \
node.__class__.__name__ == 'scala.collection.immutable.HashMap$HashTrieMap':
values_raw = get_python_dict(node)
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
elif node.__class__.__name__ == 'scala.collection.immutable.$colon$colon':
entries_raw = get_python_list(node)
entries = []
for entry in entries_raw:
entries.append(convert_node(entry))
return entries
elif node.__class__.__name__ == 'scala.math.BigInt':
return node.intValue()
elif node.__class__.__name__ == 'scala.None$':
return None
elif node.__class__.__name__ == 'scala.collection.immutable.Nil$':
return []
elif isinstance(node, (str, int, float)):
return node
else:
print(node.__class__.__name__)
return node.__class__.__name__
python_json = convert_node(scala_json)
return python_json
|
Add some Scala/Python conversion utilities
|
Add some Scala/Python conversion utilities
|
Python
|
bsd-2-clause
|
johnbachman/indra,bgyori/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,sorgerlab/belpy,pvtodorov/indra,pvtodorov/indra,johnbachman/indra,bgyori/indra,sorgerlab/indra,bgyori/indra,pvtodorov/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/indra,johnbachman/belpy,pvtodorov/indra
|
Add some Scala/Python conversion utilities
|
from jnius import autoclass
def get_python_list(scala_list):
"""Return list from elements of scala.collection.immutable.List"""
python_list = []
for i in range(scala_list.length()):
python_list.append(scala_list.apply(i))
return python_list
def get_python_dict(scala_map):
"""Return a dict from entries in a scala.collection.immutable.Map"""
python_dict = {}
keys = get_python_list(scala_map.keys().toList())
for key in keys:
python_dict[key] = scala_map.apply(key)
return python_dict
def get_python_json(scala_json):
"""Return a JSON dict from a org.json4s.JsonAST"""
def convert_node(node):
if node.__class__.__name__ == 'org.json4s.JsonAST$JValue':
values_raw = get_python_dict(node.values())
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
'''
entries = []
children = get_python_list(node.children())
for child in children:
entries.append(convert_node(child))
return entries
'''
elif node.__class__.__name__ == 'org.json4s.JsonAST$JObject':
values_raw = get_python_dict(node.values())
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
elif node.__class__.__name__ == 'org.json4s.JsonAST$JArray':
entries_raw = get_python_list(node.values())
entries = []
for entry in entries_raw:
entries.append(convert_node(entry))
return entries
elif node.__class__.__name__.startswith('scala.collection.immutable.Map') or \
node.__class__.__name__ == 'scala.collection.immutable.HashMap$HashTrieMap':
values_raw = get_python_dict(node)
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
elif node.__class__.__name__ == 'scala.collection.immutable.$colon$colon':
entries_raw = get_python_list(node)
entries = []
for entry in entries_raw:
entries.append(convert_node(entry))
return entries
elif node.__class__.__name__ == 'scala.math.BigInt':
return node.intValue()
elif node.__class__.__name__ == 'scala.None$':
return None
elif node.__class__.__name__ == 'scala.collection.immutable.Nil$':
return []
elif isinstance(node, (str, int, float)):
return node
else:
print(node.__class__.__name__)
return node.__class__.__name__
python_json = convert_node(scala_json)
return python_json
|
<commit_before><commit_msg>Add some Scala/Python conversion utilities<commit_after>
|
from jnius import autoclass
def get_python_list(scala_list):
"""Return list from elements of scala.collection.immutable.List"""
python_list = []
for i in range(scala_list.length()):
python_list.append(scala_list.apply(i))
return python_list
def get_python_dict(scala_map):
"""Return a dict from entries in a scala.collection.immutable.Map"""
python_dict = {}
keys = get_python_list(scala_map.keys().toList())
for key in keys:
python_dict[key] = scala_map.apply(key)
return python_dict
def get_python_json(scala_json):
"""Return a JSON dict from a org.json4s.JsonAST"""
def convert_node(node):
if node.__class__.__name__ == 'org.json4s.JsonAST$JValue':
values_raw = get_python_dict(node.values())
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
'''
entries = []
children = get_python_list(node.children())
for child in children:
entries.append(convert_node(child))
return entries
'''
elif node.__class__.__name__ == 'org.json4s.JsonAST$JObject':
values_raw = get_python_dict(node.values())
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
elif node.__class__.__name__ == 'org.json4s.JsonAST$JArray':
entries_raw = get_python_list(node.values())
entries = []
for entry in entries_raw:
entries.append(convert_node(entry))
return entries
elif node.__class__.__name__.startswith('scala.collection.immutable.Map') or \
node.__class__.__name__ == 'scala.collection.immutable.HashMap$HashTrieMap':
values_raw = get_python_dict(node)
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
elif node.__class__.__name__ == 'scala.collection.immutable.$colon$colon':
entries_raw = get_python_list(node)
entries = []
for entry in entries_raw:
entries.append(convert_node(entry))
return entries
elif node.__class__.__name__ == 'scala.math.BigInt':
return node.intValue()
elif node.__class__.__name__ == 'scala.None$':
return None
elif node.__class__.__name__ == 'scala.collection.immutable.Nil$':
return []
elif isinstance(node, (str, int, float)):
return node
else:
print(node.__class__.__name__)
return node.__class__.__name__
python_json = convert_node(scala_json)
return python_json
|
Add some Scala/Python conversion utilitiesfrom jnius import autoclass
def get_python_list(scala_list):
"""Return list from elements of scala.collection.immutable.List"""
python_list = []
for i in range(scala_list.length()):
python_list.append(scala_list.apply(i))
return python_list
def get_python_dict(scala_map):
"""Return a dict from entries in a scala.collection.immutable.Map"""
python_dict = {}
keys = get_python_list(scala_map.keys().toList())
for key in keys:
python_dict[key] = scala_map.apply(key)
return python_dict
def get_python_json(scala_json):
"""Return a JSON dict from a org.json4s.JsonAST"""
def convert_node(node):
if node.__class__.__name__ == 'org.json4s.JsonAST$JValue':
values_raw = get_python_dict(node.values())
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
'''
entries = []
children = get_python_list(node.children())
for child in children:
entries.append(convert_node(child))
return entries
'''
elif node.__class__.__name__ == 'org.json4s.JsonAST$JObject':
values_raw = get_python_dict(node.values())
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
elif node.__class__.__name__ == 'org.json4s.JsonAST$JArray':
entries_raw = get_python_list(node.values())
entries = []
for entry in entries_raw:
entries.append(convert_node(entry))
return entries
elif node.__class__.__name__.startswith('scala.collection.immutable.Map') or \
node.__class__.__name__ == 'scala.collection.immutable.HashMap$HashTrieMap':
values_raw = get_python_dict(node)
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
elif node.__class__.__name__ == 'scala.collection.immutable.$colon$colon':
entries_raw = get_python_list(node)
entries = []
for entry in entries_raw:
entries.append(convert_node(entry))
return entries
elif node.__class__.__name__ == 'scala.math.BigInt':
return node.intValue()
elif node.__class__.__name__ == 'scala.None$':
return None
elif node.__class__.__name__ == 'scala.collection.immutable.Nil$':
return []
elif isinstance(node, (str, int, float)):
return node
else:
print(node.__class__.__name__)
return node.__class__.__name__
python_json = convert_node(scala_json)
return python_json
|
<commit_before><commit_msg>Add some Scala/Python conversion utilities<commit_after>from jnius import autoclass
def get_python_list(scala_list):
"""Return list from elements of scala.collection.immutable.List"""
python_list = []
for i in range(scala_list.length()):
python_list.append(scala_list.apply(i))
return python_list
def get_python_dict(scala_map):
"""Return a dict from entries in a scala.collection.immutable.Map"""
python_dict = {}
keys = get_python_list(scala_map.keys().toList())
for key in keys:
python_dict[key] = scala_map.apply(key)
return python_dict
def get_python_json(scala_json):
"""Return a JSON dict from a org.json4s.JsonAST"""
def convert_node(node):
if node.__class__.__name__ == 'org.json4s.JsonAST$JValue':
values_raw = get_python_dict(node.values())
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
'''
entries = []
children = get_python_list(node.children())
for child in children:
entries.append(convert_node(child))
return entries
'''
elif node.__class__.__name__ == 'org.json4s.JsonAST$JObject':
values_raw = get_python_dict(node.values())
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
elif node.__class__.__name__ == 'org.json4s.JsonAST$JArray':
entries_raw = get_python_list(node.values())
entries = []
for entry in entries_raw:
entries.append(convert_node(entry))
return entries
elif node.__class__.__name__.startswith('scala.collection.immutable.Map') or \
node.__class__.__name__ == 'scala.collection.immutable.HashMap$HashTrieMap':
values_raw = get_python_dict(node)
values = {}
for k, v in values_raw.items():
values[k] = convert_node(v)
return values
elif node.__class__.__name__ == 'scala.collection.immutable.$colon$colon':
entries_raw = get_python_list(node)
entries = []
for entry in entries_raw:
entries.append(convert_node(entry))
return entries
elif node.__class__.__name__ == 'scala.math.BigInt':
return node.intValue()
elif node.__class__.__name__ == 'scala.None$':
return None
elif node.__class__.__name__ == 'scala.collection.immutable.Nil$':
return []
elif isinstance(node, (str, int, float)):
return node
else:
print(node.__class__.__name__)
return node.__class__.__name__
python_json = convert_node(scala_json)
return python_json
|
|
39beb7b196d67dd1b8e6c3d450e48f7cea409f22
|
system-tests/test_efu.py
|
system-tests/test_efu.py
|
import sys
from os import path
sys.path.append(path.join(path.dirname(path.dirname(path.abspath(__file__))), "utils", "efushell"))
from verifymetrics import verify_metrics
def test_efu_received_all_sent_udp_packets(docker_compose):
for name, op, value, retval in verify_metrics("localhost", "8889", "efu.mbcaen.receive.packets:2000"):
print("Validation failed for {}: expected {} {}, got {}".format(name, op, value, retval), flush=True)
assert 0
|
Add first simple system test, checks received num of packets
|
Add first simple system test, checks received num of packets
|
Python
|
bsd-2-clause
|
ess-dmsc/event-formation-unit,ess-dmsc/event-formation-unit,ess-dmsc/event-formation-unit,ess-dmsc/event-formation-unit
|
Add first simple system test, checks received num of packets
|
import sys
from os import path
sys.path.append(path.join(path.dirname(path.dirname(path.abspath(__file__))), "utils", "efushell"))
from verifymetrics import verify_metrics
def test_efu_received_all_sent_udp_packets(docker_compose):
for name, op, value, retval in verify_metrics("localhost", "8889", "efu.mbcaen.receive.packets:2000"):
print("Validation failed for {}: expected {} {}, got {}".format(name, op, value, retval), flush=True)
assert 0
|
<commit_before><commit_msg>Add first simple system test, checks received num of packets<commit_after>
|
import sys
from os import path
sys.path.append(path.join(path.dirname(path.dirname(path.abspath(__file__))), "utils", "efushell"))
from verifymetrics import verify_metrics
def test_efu_received_all_sent_udp_packets(docker_compose):
for name, op, value, retval in verify_metrics("localhost", "8889", "efu.mbcaen.receive.packets:2000"):
print("Validation failed for {}: expected {} {}, got {}".format(name, op, value, retval), flush=True)
assert 0
|
Add first simple system test, checks received num of packetsimport sys
from os import path
sys.path.append(path.join(path.dirname(path.dirname(path.abspath(__file__))), "utils", "efushell"))
from verifymetrics import verify_metrics
def test_efu_received_all_sent_udp_packets(docker_compose):
for name, op, value, retval in verify_metrics("localhost", "8889", "efu.mbcaen.receive.packets:2000"):
print("Validation failed for {}: expected {} {}, got {}".format(name, op, value, retval), flush=True)
assert 0
|
<commit_before><commit_msg>Add first simple system test, checks received num of packets<commit_after>import sys
from os import path
sys.path.append(path.join(path.dirname(path.dirname(path.abspath(__file__))), "utils", "efushell"))
from verifymetrics import verify_metrics
def test_efu_received_all_sent_udp_packets(docker_compose):
for name, op, value, retval in verify_metrics("localhost", "8889", "efu.mbcaen.receive.packets:2000"):
print("Validation failed for {}: expected {} {}, got {}".format(name, op, value, retval), flush=True)
assert 0
|
|
85688be0cc1dc7a760783f1bed5c54b5e5e7a44f
|
tests/redirect_bot_tests.py
|
tests/redirect_bot_tests.py
|
# -*- coding: utf-8 -*-
"""Tests for the redirect.py script."""
#
# (C) Pywikibot team, 2017
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
import pywikibot
from pywikibot import site, Page, i18n
from scripts.redirect import RedirectRobot
from tests.aspects import DefaultSiteTestCase
# To make `self.site.logged_in(sysop=True)` always return False
@patch.object(site.APISite, 'logged_in', new=Mock(return_value=False))
@patch.object(Page, 'exists', new=Mock(return_value=True))
class TestGetSDTemplateNoSysop(DefaultSiteTestCase):
"""Test the get_sd_template method of the RedirectRobot class."""
def test_with_delete_and_existing_sdtemplate(self):
"""Test with delete and existing sdtemplate."""
options = {'delete': True, 'sdtemplate': '{{t}}'}
bot = RedirectRobot('broken', None, **options)
self.assertEqual(bot.sdtemplate, '{{t}}')
@patch.object(i18n, 'twhas_key', new=Mock(return_value=True))
@patch.object(i18n, 'twtranslate', new=Mock(return_value='{{sd_title}}'))
def test_with_delete_and_i18n_sd(self):
"""Test with delete and i18n template."""
bot = RedirectRobot('broken', None, delete=True)
self.assertEqual(bot.sdtemplate, '{{sd_title}}')
@patch.object(i18n, 'twhas_key', new=Mock(return_value=False))
def test_with_delete_no_sd_no_i18n(self):
"""Test with delete and no i18n template."""
with patch.object(pywikibot, 'warning') as w:
bot = RedirectRobot('broken', None, delete=True)
w.assert_called_once_with('No speedy deletion template available.')
self.assertEqual(bot.sdtemplate, None)
def test_with_delete_and_non_existing_sdtemplate(self):
"""Test with delete and non-exisitng sdtemplate."""
options = {'delete': True, 'sdtemplate': 'txt {{n|a}} txt'}
with patch.object(Page, 'exists', new=Mock(return_value=False)):
with patch.object(pywikibot, 'warning') as w:
bot = RedirectRobot('broken', None, **options)
w.assert_called_once_with('No speedy deletion template "n" available.')
self.assertEqual(bot.sdtemplate, None)
|
Add a few tests for RedirectRobot.get_sd_template
|
Add a few tests for RedirectRobot.get_sd_template
Change-Id: I79c8ef9129adc4bb623179429495c5bac0ecff78
|
Python
|
mit
|
hasteur/g13bot_tools_new,hasteur/g13bot_tools_new,npdoty/pywikibot,happy5214/pywikibot-core,wikimedia/pywikibot-core,jayvdb/pywikibot-core,PersianWikipedia/pywikibot-core,magul/pywikibot-core,wikimedia/pywikibot-core,jayvdb/pywikibot-core,happy5214/pywikibot-core,magul/pywikibot-core,Darkdadaah/pywikibot-core,npdoty/pywikibot,hasteur/g13bot_tools_new,Darkdadaah/pywikibot-core
|
Add a few tests for RedirectRobot.get_sd_template
Change-Id: I79c8ef9129adc4bb623179429495c5bac0ecff78
|
# -*- coding: utf-8 -*-
"""Tests for the redirect.py script."""
#
# (C) Pywikibot team, 2017
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
import pywikibot
from pywikibot import site, Page, i18n
from scripts.redirect import RedirectRobot
from tests.aspects import DefaultSiteTestCase
# To make `self.site.logged_in(sysop=True)` always return False
@patch.object(site.APISite, 'logged_in', new=Mock(return_value=False))
@patch.object(Page, 'exists', new=Mock(return_value=True))
class TestGetSDTemplateNoSysop(DefaultSiteTestCase):
"""Test the get_sd_template method of the RedirectRobot class."""
def test_with_delete_and_existing_sdtemplate(self):
"""Test with delete and existing sdtemplate."""
options = {'delete': True, 'sdtemplate': '{{t}}'}
bot = RedirectRobot('broken', None, **options)
self.assertEqual(bot.sdtemplate, '{{t}}')
@patch.object(i18n, 'twhas_key', new=Mock(return_value=True))
@patch.object(i18n, 'twtranslate', new=Mock(return_value='{{sd_title}}'))
def test_with_delete_and_i18n_sd(self):
"""Test with delete and i18n template."""
bot = RedirectRobot('broken', None, delete=True)
self.assertEqual(bot.sdtemplate, '{{sd_title}}')
@patch.object(i18n, 'twhas_key', new=Mock(return_value=False))
def test_with_delete_no_sd_no_i18n(self):
"""Test with delete and no i18n template."""
with patch.object(pywikibot, 'warning') as w:
bot = RedirectRobot('broken', None, delete=True)
w.assert_called_once_with('No speedy deletion template available.')
self.assertEqual(bot.sdtemplate, None)
def test_with_delete_and_non_existing_sdtemplate(self):
"""Test with delete and non-exisitng sdtemplate."""
options = {'delete': True, 'sdtemplate': 'txt {{n|a}} txt'}
with patch.object(Page, 'exists', new=Mock(return_value=False)):
with patch.object(pywikibot, 'warning') as w:
bot = RedirectRobot('broken', None, **options)
w.assert_called_once_with('No speedy deletion template "n" available.')
self.assertEqual(bot.sdtemplate, None)
|
<commit_before><commit_msg>Add a few tests for RedirectRobot.get_sd_template
Change-Id: I79c8ef9129adc4bb623179429495c5bac0ecff78<commit_after>
|
# -*- coding: utf-8 -*-
"""Tests for the redirect.py script."""
#
# (C) Pywikibot team, 2017
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
import pywikibot
from pywikibot import site, Page, i18n
from scripts.redirect import RedirectRobot
from tests.aspects import DefaultSiteTestCase
# To make `self.site.logged_in(sysop=True)` always return False
@patch.object(site.APISite, 'logged_in', new=Mock(return_value=False))
@patch.object(Page, 'exists', new=Mock(return_value=True))
class TestGetSDTemplateNoSysop(DefaultSiteTestCase):
"""Test the get_sd_template method of the RedirectRobot class."""
def test_with_delete_and_existing_sdtemplate(self):
"""Test with delete and existing sdtemplate."""
options = {'delete': True, 'sdtemplate': '{{t}}'}
bot = RedirectRobot('broken', None, **options)
self.assertEqual(bot.sdtemplate, '{{t}}')
@patch.object(i18n, 'twhas_key', new=Mock(return_value=True))
@patch.object(i18n, 'twtranslate', new=Mock(return_value='{{sd_title}}'))
def test_with_delete_and_i18n_sd(self):
"""Test with delete and i18n template."""
bot = RedirectRobot('broken', None, delete=True)
self.assertEqual(bot.sdtemplate, '{{sd_title}}')
@patch.object(i18n, 'twhas_key', new=Mock(return_value=False))
def test_with_delete_no_sd_no_i18n(self):
"""Test with delete and no i18n template."""
with patch.object(pywikibot, 'warning') as w:
bot = RedirectRobot('broken', None, delete=True)
w.assert_called_once_with('No speedy deletion template available.')
self.assertEqual(bot.sdtemplate, None)
def test_with_delete_and_non_existing_sdtemplate(self):
"""Test with delete and non-exisitng sdtemplate."""
options = {'delete': True, 'sdtemplate': 'txt {{n|a}} txt'}
with patch.object(Page, 'exists', new=Mock(return_value=False)):
with patch.object(pywikibot, 'warning') as w:
bot = RedirectRobot('broken', None, **options)
w.assert_called_once_with('No speedy deletion template "n" available.')
self.assertEqual(bot.sdtemplate, None)
|
Add a few tests for RedirectRobot.get_sd_template
Change-Id: I79c8ef9129adc4bb623179429495c5bac0ecff78# -*- coding: utf-8 -*-
"""Tests for the redirect.py script."""
#
# (C) Pywikibot team, 2017
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
import pywikibot
from pywikibot import site, Page, i18n
from scripts.redirect import RedirectRobot
from tests.aspects import DefaultSiteTestCase
# To make `self.site.logged_in(sysop=True)` always return False
@patch.object(site.APISite, 'logged_in', new=Mock(return_value=False))
@patch.object(Page, 'exists', new=Mock(return_value=True))
class TestGetSDTemplateNoSysop(DefaultSiteTestCase):
"""Test the get_sd_template method of the RedirectRobot class."""
def test_with_delete_and_existing_sdtemplate(self):
"""Test with delete and existing sdtemplate."""
options = {'delete': True, 'sdtemplate': '{{t}}'}
bot = RedirectRobot('broken', None, **options)
self.assertEqual(bot.sdtemplate, '{{t}}')
@patch.object(i18n, 'twhas_key', new=Mock(return_value=True))
@patch.object(i18n, 'twtranslate', new=Mock(return_value='{{sd_title}}'))
def test_with_delete_and_i18n_sd(self):
"""Test with delete and i18n template."""
bot = RedirectRobot('broken', None, delete=True)
self.assertEqual(bot.sdtemplate, '{{sd_title}}')
@patch.object(i18n, 'twhas_key', new=Mock(return_value=False))
def test_with_delete_no_sd_no_i18n(self):
"""Test with delete and no i18n template."""
with patch.object(pywikibot, 'warning') as w:
bot = RedirectRobot('broken', None, delete=True)
w.assert_called_once_with('No speedy deletion template available.')
self.assertEqual(bot.sdtemplate, None)
def test_with_delete_and_non_existing_sdtemplate(self):
"""Test with delete and non-exisitng sdtemplate."""
options = {'delete': True, 'sdtemplate': 'txt {{n|a}} txt'}
with patch.object(Page, 'exists', new=Mock(return_value=False)):
with patch.object(pywikibot, 'warning') as w:
bot = RedirectRobot('broken', None, **options)
w.assert_called_once_with('No speedy deletion template "n" available.')
self.assertEqual(bot.sdtemplate, None)
|
<commit_before><commit_msg>Add a few tests for RedirectRobot.get_sd_template
Change-Id: I79c8ef9129adc4bb623179429495c5bac0ecff78<commit_after># -*- coding: utf-8 -*-
"""Tests for the redirect.py script."""
#
# (C) Pywikibot team, 2017
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
import pywikibot
from pywikibot import site, Page, i18n
from scripts.redirect import RedirectRobot
from tests.aspects import DefaultSiteTestCase
# To make `self.site.logged_in(sysop=True)` always return False
@patch.object(site.APISite, 'logged_in', new=Mock(return_value=False))
@patch.object(Page, 'exists', new=Mock(return_value=True))
class TestGetSDTemplateNoSysop(DefaultSiteTestCase):
"""Test the get_sd_template method of the RedirectRobot class."""
def test_with_delete_and_existing_sdtemplate(self):
"""Test with delete and existing sdtemplate."""
options = {'delete': True, 'sdtemplate': '{{t}}'}
bot = RedirectRobot('broken', None, **options)
self.assertEqual(bot.sdtemplate, '{{t}}')
@patch.object(i18n, 'twhas_key', new=Mock(return_value=True))
@patch.object(i18n, 'twtranslate', new=Mock(return_value='{{sd_title}}'))
def test_with_delete_and_i18n_sd(self):
"""Test with delete and i18n template."""
bot = RedirectRobot('broken', None, delete=True)
self.assertEqual(bot.sdtemplate, '{{sd_title}}')
@patch.object(i18n, 'twhas_key', new=Mock(return_value=False))
def test_with_delete_no_sd_no_i18n(self):
"""Test with delete and no i18n template."""
with patch.object(pywikibot, 'warning') as w:
bot = RedirectRobot('broken', None, delete=True)
w.assert_called_once_with('No speedy deletion template available.')
self.assertEqual(bot.sdtemplate, None)
def test_with_delete_and_non_existing_sdtemplate(self):
"""Test with delete and non-exisitng sdtemplate."""
options = {'delete': True, 'sdtemplate': 'txt {{n|a}} txt'}
with patch.object(Page, 'exists', new=Mock(return_value=False)):
with patch.object(pywikibot, 'warning') as w:
bot = RedirectRobot('broken', None, **options)
w.assert_called_once_with('No speedy deletion template "n" available.')
self.assertEqual(bot.sdtemplate, None)
|
|
8354df962c9a38270531a76385ea2ecef11e0836
|
examples/reset_return_delay_time.py
|
examples/reset_return_delay_time.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# PyAX-12
# The MIT License
#
# Copyright (c) 2010,2015,2017 Jeremie DECOCK (http://www.jdhp.org)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
A PyAX-12 demo.
"""
from pyax12.connection import Connection
from pyax12.argparse_default import common_argument_parser
import pyax12.packet as pk
from pyax12 import utils
import time
def main():
"""
A PyAX-12 demo.
"""
# Parse options
parser = common_argument_parser(desc=main.__doc__)
args = parser.parse_args()
# Connect to the serial port
serial_connection = Connection(port=args.port,
baudrate=args.baudrate,
timeout=args.timeout,
rpi_gpio=args.rpi)
dynamixel_id = args.dynamixel_id
serial_connection.set_return_delay_time(dynamixel_id, 250)
# Close the serial connection
serial_connection.close()
if __name__ == '__main__':
main()
|
Add a script to reset the return delay time.
|
Add a script to reset the return delay time.
|
Python
|
mit
|
jeremiedecock/pyax12,jeremiedecock/pyax12
|
Add a script to reset the return delay time.
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# PyAX-12
# The MIT License
#
# Copyright (c) 2010,2015,2017 Jeremie DECOCK (http://www.jdhp.org)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
A PyAX-12 demo.
"""
from pyax12.connection import Connection
from pyax12.argparse_default import common_argument_parser
import pyax12.packet as pk
from pyax12 import utils
import time
def main():
"""
A PyAX-12 demo.
"""
# Parse options
parser = common_argument_parser(desc=main.__doc__)
args = parser.parse_args()
# Connect to the serial port
serial_connection = Connection(port=args.port,
baudrate=args.baudrate,
timeout=args.timeout,
rpi_gpio=args.rpi)
dynamixel_id = args.dynamixel_id
serial_connection.set_return_delay_time(dynamixel_id, 250)
# Close the serial connection
serial_connection.close()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a script to reset the return delay time.<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# PyAX-12
# The MIT License
#
# Copyright (c) 2010,2015,2017 Jeremie DECOCK (http://www.jdhp.org)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
A PyAX-12 demo.
"""
from pyax12.connection import Connection
from pyax12.argparse_default import common_argument_parser
import pyax12.packet as pk
from pyax12 import utils
import time
def main():
"""
A PyAX-12 demo.
"""
# Parse options
parser = common_argument_parser(desc=main.__doc__)
args = parser.parse_args()
# Connect to the serial port
serial_connection = Connection(port=args.port,
baudrate=args.baudrate,
timeout=args.timeout,
rpi_gpio=args.rpi)
dynamixel_id = args.dynamixel_id
serial_connection.set_return_delay_time(dynamixel_id, 250)
# Close the serial connection
serial_connection.close()
if __name__ == '__main__':
main()
|
Add a script to reset the return delay time.#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# PyAX-12
# The MIT License
#
# Copyright (c) 2010,2015,2017 Jeremie DECOCK (http://www.jdhp.org)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
A PyAX-12 demo.
"""
from pyax12.connection import Connection
from pyax12.argparse_default import common_argument_parser
import pyax12.packet as pk
from pyax12 import utils
import time
def main():
"""
A PyAX-12 demo.
"""
# Parse options
parser = common_argument_parser(desc=main.__doc__)
args = parser.parse_args()
# Connect to the serial port
serial_connection = Connection(port=args.port,
baudrate=args.baudrate,
timeout=args.timeout,
rpi_gpio=args.rpi)
dynamixel_id = args.dynamixel_id
serial_connection.set_return_delay_time(dynamixel_id, 250)
# Close the serial connection
serial_connection.close()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a script to reset the return delay time.<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# PyAX-12
# The MIT License
#
# Copyright (c) 2010,2015,2017 Jeremie DECOCK (http://www.jdhp.org)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
A PyAX-12 demo.
"""
from pyax12.connection import Connection
from pyax12.argparse_default import common_argument_parser
import pyax12.packet as pk
from pyax12 import utils
import time
def main():
"""
A PyAX-12 demo.
"""
# Parse options
parser = common_argument_parser(desc=main.__doc__)
args = parser.parse_args()
# Connect to the serial port
serial_connection = Connection(port=args.port,
baudrate=args.baudrate,
timeout=args.timeout,
rpi_gpio=args.rpi)
dynamixel_id = args.dynamixel_id
serial_connection.set_return_delay_time(dynamixel_id, 250)
# Close the serial connection
serial_connection.close()
if __name__ == '__main__':
main()
|
|
2dced181595cabbe9c19a544ab7cb705a3c89ad5
|
osc_demo.py
|
osc_demo.py
|
import s3g
import serial
import time
import optparse
import OSC
import threading
"""
Control an s3g device (Makerbot, etc) using osc!
Requires these modules:
* pySerial: http://pypi.python.org/pypi/pyserial
* pyOSC: https://trac.v2.nl/wiki/pyOSC
"""
parser = optparse.OptionParser()
parser.add_option("-s", "--serialport", dest="serialportname",
help="serial port (ex: /dev/ttyUSB0)", default="/dev/ttyACM0")
parser.add_option("-p", "--oscport", dest="oscport",
help="OSC port to listen on", default="10000")
(options, args) = parser.parse_args()
print "here!"
r = s3g.Replicator()
print "here!"
r.file = serial.Serial(options.serialportname, 115200)
print "here!"
def move_handler(addr, tags, stuff, source):
print addr, tags, stuff, source
#target = [stuff[0], stuff[1], stuff[2], stuff[3], stuff[4]]
#velocity = stuff[5]
x = (1 - stuff[0]) * 3000
y = stuff[1] * 3000
target = [x, y, 0, 0, 0]
velocity = 400
r.Move(target, velocity)
return
print "starting server"
s = OSC.OSCServer(('192.168.1.162', int(options.oscport)))
s.addDefaultHandlers()
s.addMsgHandler("/move", move_handler)
st = threading.Thread(target=s.serve_forever)
st.start()
try:
while True:
time.sleep(0.1)
except KeyboardInterrupt:
exit(1)
pass
s.close()
st.join()
|
Add Open Sound Controller (OSC) example
|
Add Open Sound Controller (OSC) example
|
Python
|
agpl-3.0
|
makerbot/s3g,makerbot/s3g,makerbot/s3g,Jnesselr/s3g,makerbot/s3g,Jnesselr/s3g
|
Add Open Sound Controller (OSC) example
|
import s3g
import serial
import time
import optparse
import OSC
import threading
"""
Control an s3g device (Makerbot, etc) using osc!
Requires these modules:
* pySerial: http://pypi.python.org/pypi/pyserial
* pyOSC: https://trac.v2.nl/wiki/pyOSC
"""
parser = optparse.OptionParser()
parser.add_option("-s", "--serialport", dest="serialportname",
help="serial port (ex: /dev/ttyUSB0)", default="/dev/ttyACM0")
parser.add_option("-p", "--oscport", dest="oscport",
help="OSC port to listen on", default="10000")
(options, args) = parser.parse_args()
print "here!"
r = s3g.Replicator()
print "here!"
r.file = serial.Serial(options.serialportname, 115200)
print "here!"
def move_handler(addr, tags, stuff, source):
print addr, tags, stuff, source
#target = [stuff[0], stuff[1], stuff[2], stuff[3], stuff[4]]
#velocity = stuff[5]
x = (1 - stuff[0]) * 3000
y = stuff[1] * 3000
target = [x, y, 0, 0, 0]
velocity = 400
r.Move(target, velocity)
return
print "starting server"
s = OSC.OSCServer(('192.168.1.162', int(options.oscport)))
s.addDefaultHandlers()
s.addMsgHandler("/move", move_handler)
st = threading.Thread(target=s.serve_forever)
st.start()
try:
while True:
time.sleep(0.1)
except KeyboardInterrupt:
exit(1)
pass
s.close()
st.join()
|
<commit_before><commit_msg>Add Open Sound Controller (OSC) example<commit_after>
|
import s3g
import serial
import time
import optparse
import OSC
import threading
"""
Control an s3g device (Makerbot, etc) using osc!
Requires these modules:
* pySerial: http://pypi.python.org/pypi/pyserial
* pyOSC: https://trac.v2.nl/wiki/pyOSC
"""
parser = optparse.OptionParser()
parser.add_option("-s", "--serialport", dest="serialportname",
help="serial port (ex: /dev/ttyUSB0)", default="/dev/ttyACM0")
parser.add_option("-p", "--oscport", dest="oscport",
help="OSC port to listen on", default="10000")
(options, args) = parser.parse_args()
print "here!"
r = s3g.Replicator()
print "here!"
r.file = serial.Serial(options.serialportname, 115200)
print "here!"
def move_handler(addr, tags, stuff, source):
print addr, tags, stuff, source
#target = [stuff[0], stuff[1], stuff[2], stuff[3], stuff[4]]
#velocity = stuff[5]
x = (1 - stuff[0]) * 3000
y = stuff[1] * 3000
target = [x, y, 0, 0, 0]
velocity = 400
r.Move(target, velocity)
return
print "starting server"
s = OSC.OSCServer(('192.168.1.162', int(options.oscport)))
s.addDefaultHandlers()
s.addMsgHandler("/move", move_handler)
st = threading.Thread(target=s.serve_forever)
st.start()
try:
while True:
time.sleep(0.1)
except KeyboardInterrupt:
exit(1)
pass
s.close()
st.join()
|
Add Open Sound Controller (OSC) exampleimport s3g
import serial
import time
import optparse
import OSC
import threading
"""
Control an s3g device (Makerbot, etc) using osc!
Requires these modules:
* pySerial: http://pypi.python.org/pypi/pyserial
* pyOSC: https://trac.v2.nl/wiki/pyOSC
"""
parser = optparse.OptionParser()
parser.add_option("-s", "--serialport", dest="serialportname",
help="serial port (ex: /dev/ttyUSB0)", default="/dev/ttyACM0")
parser.add_option("-p", "--oscport", dest="oscport",
help="OSC port to listen on", default="10000")
(options, args) = parser.parse_args()
print "here!"
r = s3g.Replicator()
print "here!"
r.file = serial.Serial(options.serialportname, 115200)
print "here!"
def move_handler(addr, tags, stuff, source):
print addr, tags, stuff, source
#target = [stuff[0], stuff[1], stuff[2], stuff[3], stuff[4]]
#velocity = stuff[5]
x = (1 - stuff[0]) * 3000
y = stuff[1] * 3000
target = [x, y, 0, 0, 0]
velocity = 400
r.Move(target, velocity)
return
print "starting server"
s = OSC.OSCServer(('192.168.1.162', int(options.oscport)))
s.addDefaultHandlers()
s.addMsgHandler("/move", move_handler)
st = threading.Thread(target=s.serve_forever)
st.start()
try:
while True:
time.sleep(0.1)
except KeyboardInterrupt:
exit(1)
pass
s.close()
st.join()
|
<commit_before><commit_msg>Add Open Sound Controller (OSC) example<commit_after>import s3g
import serial
import time
import optparse
import OSC
import threading
"""
Control an s3g device (Makerbot, etc) using osc!
Requires these modules:
* pySerial: http://pypi.python.org/pypi/pyserial
* pyOSC: https://trac.v2.nl/wiki/pyOSC
"""
parser = optparse.OptionParser()
parser.add_option("-s", "--serialport", dest="serialportname",
help="serial port (ex: /dev/ttyUSB0)", default="/dev/ttyACM0")
parser.add_option("-p", "--oscport", dest="oscport",
help="OSC port to listen on", default="10000")
(options, args) = parser.parse_args()
print "here!"
r = s3g.Replicator()
print "here!"
r.file = serial.Serial(options.serialportname, 115200)
print "here!"
def move_handler(addr, tags, stuff, source):
print addr, tags, stuff, source
#target = [stuff[0], stuff[1], stuff[2], stuff[3], stuff[4]]
#velocity = stuff[5]
x = (1 - stuff[0]) * 3000
y = stuff[1] * 3000
target = [x, y, 0, 0, 0]
velocity = 400
r.Move(target, velocity)
return
print "starting server"
s = OSC.OSCServer(('192.168.1.162', int(options.oscport)))
s.addDefaultHandlers()
s.addMsgHandler("/move", move_handler)
st = threading.Thread(target=s.serve_forever)
st.start()
try:
while True:
time.sleep(0.1)
except KeyboardInterrupt:
exit(1)
pass
s.close()
st.join()
|
|
e05854a61052b4d34657db912f93eea007f2dda9
|
read_symbols_rel.py
|
read_symbols_rel.py
|
import struct
import datetime
import argparse
import sys
record_length = 128
def dump_content(filename):
try:
fp = open(filename, 'rb')
except OSError as e:
print("[ERROR] '%s' raised when tried to read the file '%s'" % (e.strerror, filename))
sys.exit(1)
magic = fp.read(4)
while True:
buf = fp.read(record_length)
if len(buf) != record_length:
break
symbol = buf[:12].decode('utf-8')
fields = struct.unpack('<IIIIIdIIIHHIIdddd4Idd', buf[12:])
sym_table = [
'digits',
'index',
None,
'group',
None,
'pointSize',
'spread',
None,
['tickType', lambda x: ['uptick', 'downtick', 'n/a'][x]],
None,
None,
['time', lambda x: datetime.datetime.fromtimestamp(x)],
None,
'bid',
'ask',
'sessionHigh',
'sessionLow',
None,
None,
None,
None,
'bid_2',
'ask_2',
]
out = 'Symbol: {}\n'.format(symbol)
for (i, obj) in enumerate(fields):
handler = sym_table[i]
if handler == None:
continue
if type(handler) is list:
name = handler[0]
val = handler[1](obj)
else:
name = handler
val = obj
out += '{}: {}\n'.format(name, val)
print(out)
if __name__ == '__main__':
# Parse the arguments
argumentParser = argparse.ArgumentParser(add_help=False)
argumentParser.add_argument('-i', '--input-file', action='store', dest='inputFile', help='input file', required=True)
argumentParser.add_argument('-h', '--help', action='help', help='Show this help message and exit')
args = argumentParser.parse_args()
dump_content(args.inputFile)
|
Add a script to dump the .sel files
|
Add a script to dump the .sel files
|
Python
|
mit
|
FX31337/FX-BT-Scripts,FX31337/FX-BT-Scripts,EA31337/FX-BT-Scripts,EA31337/FX-BT-Scripts,EA31337/FX-BT-Scripts,FX31337/FX-BT-Scripts
|
Add a script to dump the .sel files
|
import struct
import datetime
import argparse
import sys
record_length = 128
def dump_content(filename):
try:
fp = open(filename, 'rb')
except OSError as e:
print("[ERROR] '%s' raised when tried to read the file '%s'" % (e.strerror, filename))
sys.exit(1)
magic = fp.read(4)
while True:
buf = fp.read(record_length)
if len(buf) != record_length:
break
symbol = buf[:12].decode('utf-8')
fields = struct.unpack('<IIIIIdIIIHHIIdddd4Idd', buf[12:])
sym_table = [
'digits',
'index',
None,
'group',
None,
'pointSize',
'spread',
None,
['tickType', lambda x: ['uptick', 'downtick', 'n/a'][x]],
None,
None,
['time', lambda x: datetime.datetime.fromtimestamp(x)],
None,
'bid',
'ask',
'sessionHigh',
'sessionLow',
None,
None,
None,
None,
'bid_2',
'ask_2',
]
out = 'Symbol: {}\n'.format(symbol)
for (i, obj) in enumerate(fields):
handler = sym_table[i]
if handler == None:
continue
if type(handler) is list:
name = handler[0]
val = handler[1](obj)
else:
name = handler
val = obj
out += '{}: {}\n'.format(name, val)
print(out)
if __name__ == '__main__':
# Parse the arguments
argumentParser = argparse.ArgumentParser(add_help=False)
argumentParser.add_argument('-i', '--input-file', action='store', dest='inputFile', help='input file', required=True)
argumentParser.add_argument('-h', '--help', action='help', help='Show this help message and exit')
args = argumentParser.parse_args()
dump_content(args.inputFile)
|
<commit_before><commit_msg>Add a script to dump the .sel files<commit_after>
|
import struct
import datetime
import argparse
import sys
record_length = 128
def dump_content(filename):
try:
fp = open(filename, 'rb')
except OSError as e:
print("[ERROR] '%s' raised when tried to read the file '%s'" % (e.strerror, filename))
sys.exit(1)
magic = fp.read(4)
while True:
buf = fp.read(record_length)
if len(buf) != record_length:
break
symbol = buf[:12].decode('utf-8')
fields = struct.unpack('<IIIIIdIIIHHIIdddd4Idd', buf[12:])
sym_table = [
'digits',
'index',
None,
'group',
None,
'pointSize',
'spread',
None,
['tickType', lambda x: ['uptick', 'downtick', 'n/a'][x]],
None,
None,
['time', lambda x: datetime.datetime.fromtimestamp(x)],
None,
'bid',
'ask',
'sessionHigh',
'sessionLow',
None,
None,
None,
None,
'bid_2',
'ask_2',
]
out = 'Symbol: {}\n'.format(symbol)
for (i, obj) in enumerate(fields):
handler = sym_table[i]
if handler == None:
continue
if type(handler) is list:
name = handler[0]
val = handler[1](obj)
else:
name = handler
val = obj
out += '{}: {}\n'.format(name, val)
print(out)
if __name__ == '__main__':
# Parse the arguments
argumentParser = argparse.ArgumentParser(add_help=False)
argumentParser.add_argument('-i', '--input-file', action='store', dest='inputFile', help='input file', required=True)
argumentParser.add_argument('-h', '--help', action='help', help='Show this help message and exit')
args = argumentParser.parse_args()
dump_content(args.inputFile)
|
Add a script to dump the .sel filesimport struct
import datetime
import argparse
import sys
record_length = 128
def dump_content(filename):
try:
fp = open(filename, 'rb')
except OSError as e:
print("[ERROR] '%s' raised when tried to read the file '%s'" % (e.strerror, filename))
sys.exit(1)
magic = fp.read(4)
while True:
buf = fp.read(record_length)
if len(buf) != record_length:
break
symbol = buf[:12].decode('utf-8')
fields = struct.unpack('<IIIIIdIIIHHIIdddd4Idd', buf[12:])
sym_table = [
'digits',
'index',
None,
'group',
None,
'pointSize',
'spread',
None,
['tickType', lambda x: ['uptick', 'downtick', 'n/a'][x]],
None,
None,
['time', lambda x: datetime.datetime.fromtimestamp(x)],
None,
'bid',
'ask',
'sessionHigh',
'sessionLow',
None,
None,
None,
None,
'bid_2',
'ask_2',
]
out = 'Symbol: {}\n'.format(symbol)
for (i, obj) in enumerate(fields):
handler = sym_table[i]
if handler == None:
continue
if type(handler) is list:
name = handler[0]
val = handler[1](obj)
else:
name = handler
val = obj
out += '{}: {}\n'.format(name, val)
print(out)
if __name__ == '__main__':
# Parse the arguments
argumentParser = argparse.ArgumentParser(add_help=False)
argumentParser.add_argument('-i', '--input-file', action='store', dest='inputFile', help='input file', required=True)
argumentParser.add_argument('-h', '--help', action='help', help='Show this help message and exit')
args = argumentParser.parse_args()
dump_content(args.inputFile)
|
<commit_before><commit_msg>Add a script to dump the .sel files<commit_after>import struct
import datetime
import argparse
import sys
record_length = 128
def dump_content(filename):
try:
fp = open(filename, 'rb')
except OSError as e:
print("[ERROR] '%s' raised when tried to read the file '%s'" % (e.strerror, filename))
sys.exit(1)
magic = fp.read(4)
while True:
buf = fp.read(record_length)
if len(buf) != record_length:
break
symbol = buf[:12].decode('utf-8')
fields = struct.unpack('<IIIIIdIIIHHIIdddd4Idd', buf[12:])
sym_table = [
'digits',
'index',
None,
'group',
None,
'pointSize',
'spread',
None,
['tickType', lambda x: ['uptick', 'downtick', 'n/a'][x]],
None,
None,
['time', lambda x: datetime.datetime.fromtimestamp(x)],
None,
'bid',
'ask',
'sessionHigh',
'sessionLow',
None,
None,
None,
None,
'bid_2',
'ask_2',
]
out = 'Symbol: {}\n'.format(symbol)
for (i, obj) in enumerate(fields):
handler = sym_table[i]
if handler == None:
continue
if type(handler) is list:
name = handler[0]
val = handler[1](obj)
else:
name = handler
val = obj
out += '{}: {}\n'.format(name, val)
print(out)
if __name__ == '__main__':
# Parse the arguments
argumentParser = argparse.ArgumentParser(add_help=False)
argumentParser.add_argument('-i', '--input-file', action='store', dest='inputFile', help='input file', required=True)
argumentParser.add_argument('-h', '--help', action='help', help='Show this help message and exit')
args = argumentParser.parse_args()
dump_content(args.inputFile)
|
|
cb4d916a23792f92f0929693d58350e7b045fb3d
|
plugins/coinflip.py
|
plugins/coinflip.py
|
import random
from plugin import CommandPlugin, PluginException
class CoinFlip(CommandPlugin):
"""
Flip a coin
"""
max_coin_flips = 1000000
def __init__(self):
CommandPlugin.__init__(self)
self.triggers = ['coin', 'coinflip']
self.short_help = 'Flip a coin'
self.help = 'Flip a coin or number of coins'
self.help_example = ['!coin', '!coinflip 5']
def on_command(self, bot, event, response):
args = event['text']
if not args:
response['text'] = 'A coin is flipped and it is *_%s_*!' % random.choice(['Heads', 'Tails'])
else:
try:
tosses = int(args)
if tosses <= 0:
raise PluginException('Invalid argument! No coins to flip!')
# Avoid taking too long to generate coin flips
if tosses > CoinFlip.max_coin_flips:
raise PluginException(
'Invalid argument! Number of coins to flip is too large! Max flips is `%s`.' % CoinFlip.max_coin_flips)
rand_bits = bin(random.getrandbits(tosses))[2:]
heads = rand_bits.count('0')
tails = rand_bits.count('1')
response['text'] = '*_%s_* coins are flipped and the result is *_%s Heads_* and *_%s Tails_*!' % (
tosses, heads, tails)
except ValueError:
raise PluginException('Invalid argument! Specify a *number* of coins to flip. E.g. `!coin 5`')
response['mrkdwn_in'] = ['text']
bot.sc.api_call('chat.postMessage', **response)
|
Add CoinFlip plugin for some coin flipping fun
|
Add CoinFlip plugin for some coin flipping fun
|
Python
|
mit
|
Brottweiler/nimbus,itsmartin/nimbus,Plastix/nimbus,bcbwilla/nimbus
|
Add CoinFlip plugin for some coin flipping fun
|
import random
from plugin import CommandPlugin, PluginException
class CoinFlip(CommandPlugin):
"""
Flip a coin
"""
max_coin_flips = 1000000
def __init__(self):
CommandPlugin.__init__(self)
self.triggers = ['coin', 'coinflip']
self.short_help = 'Flip a coin'
self.help = 'Flip a coin or number of coins'
self.help_example = ['!coin', '!coinflip 5']
def on_command(self, bot, event, response):
args = event['text']
if not args:
response['text'] = 'A coin is flipped and it is *_%s_*!' % random.choice(['Heads', 'Tails'])
else:
try:
tosses = int(args)
if tosses <= 0:
raise PluginException('Invalid argument! No coins to flip!')
# Avoid taking too long to generate coin flips
if tosses > CoinFlip.max_coin_flips:
raise PluginException(
'Invalid argument! Number of coins to flip is too large! Max flips is `%s`.' % CoinFlip.max_coin_flips)
rand_bits = bin(random.getrandbits(tosses))[2:]
heads = rand_bits.count('0')
tails = rand_bits.count('1')
response['text'] = '*_%s_* coins are flipped and the result is *_%s Heads_* and *_%s Tails_*!' % (
tosses, heads, tails)
except ValueError:
raise PluginException('Invalid argument! Specify a *number* of coins to flip. E.g. `!coin 5`')
response['mrkdwn_in'] = ['text']
bot.sc.api_call('chat.postMessage', **response)
|
<commit_before><commit_msg>Add CoinFlip plugin for some coin flipping fun<commit_after>
|
import random
from plugin import CommandPlugin, PluginException
class CoinFlip(CommandPlugin):
"""
Flip a coin
"""
max_coin_flips = 1000000
def __init__(self):
CommandPlugin.__init__(self)
self.triggers = ['coin', 'coinflip']
self.short_help = 'Flip a coin'
self.help = 'Flip a coin or number of coins'
self.help_example = ['!coin', '!coinflip 5']
def on_command(self, bot, event, response):
args = event['text']
if not args:
response['text'] = 'A coin is flipped and it is *_%s_*!' % random.choice(['Heads', 'Tails'])
else:
try:
tosses = int(args)
if tosses <= 0:
raise PluginException('Invalid argument! No coins to flip!')
# Avoid taking too long to generate coin flips
if tosses > CoinFlip.max_coin_flips:
raise PluginException(
'Invalid argument! Number of coins to flip is too large! Max flips is `%s`.' % CoinFlip.max_coin_flips)
rand_bits = bin(random.getrandbits(tosses))[2:]
heads = rand_bits.count('0')
tails = rand_bits.count('1')
response['text'] = '*_%s_* coins are flipped and the result is *_%s Heads_* and *_%s Tails_*!' % (
tosses, heads, tails)
except ValueError:
raise PluginException('Invalid argument! Specify a *number* of coins to flip. E.g. `!coin 5`')
response['mrkdwn_in'] = ['text']
bot.sc.api_call('chat.postMessage', **response)
|
Add CoinFlip plugin for some coin flipping funimport random
from plugin import CommandPlugin, PluginException
class CoinFlip(CommandPlugin):
"""
Flip a coin
"""
max_coin_flips = 1000000
def __init__(self):
CommandPlugin.__init__(self)
self.triggers = ['coin', 'coinflip']
self.short_help = 'Flip a coin'
self.help = 'Flip a coin or number of coins'
self.help_example = ['!coin', '!coinflip 5']
def on_command(self, bot, event, response):
args = event['text']
if not args:
response['text'] = 'A coin is flipped and it is *_%s_*!' % random.choice(['Heads', 'Tails'])
else:
try:
tosses = int(args)
if tosses <= 0:
raise PluginException('Invalid argument! No coins to flip!')
# Avoid taking too long to generate coin flips
if tosses > CoinFlip.max_coin_flips:
raise PluginException(
'Invalid argument! Number of coins to flip is too large! Max flips is `%s`.' % CoinFlip.max_coin_flips)
rand_bits = bin(random.getrandbits(tosses))[2:]
heads = rand_bits.count('0')
tails = rand_bits.count('1')
response['text'] = '*_%s_* coins are flipped and the result is *_%s Heads_* and *_%s Tails_*!' % (
tosses, heads, tails)
except ValueError:
raise PluginException('Invalid argument! Specify a *number* of coins to flip. E.g. `!coin 5`')
response['mrkdwn_in'] = ['text']
bot.sc.api_call('chat.postMessage', **response)
|
<commit_before><commit_msg>Add CoinFlip plugin for some coin flipping fun<commit_after>import random
from plugin import CommandPlugin, PluginException
class CoinFlip(CommandPlugin):
"""
Flip a coin
"""
max_coin_flips = 1000000
def __init__(self):
CommandPlugin.__init__(self)
self.triggers = ['coin', 'coinflip']
self.short_help = 'Flip a coin'
self.help = 'Flip a coin or number of coins'
self.help_example = ['!coin', '!coinflip 5']
def on_command(self, bot, event, response):
args = event['text']
if not args:
response['text'] = 'A coin is flipped and it is *_%s_*!' % random.choice(['Heads', 'Tails'])
else:
try:
tosses = int(args)
if tosses <= 0:
raise PluginException('Invalid argument! No coins to flip!')
# Avoid taking too long to generate coin flips
if tosses > CoinFlip.max_coin_flips:
raise PluginException(
'Invalid argument! Number of coins to flip is too large! Max flips is `%s`.' % CoinFlip.max_coin_flips)
rand_bits = bin(random.getrandbits(tosses))[2:]
heads = rand_bits.count('0')
tails = rand_bits.count('1')
response['text'] = '*_%s_* coins are flipped and the result is *_%s Heads_* and *_%s Tails_*!' % (
tosses, heads, tails)
except ValueError:
raise PluginException('Invalid argument! Specify a *number* of coins to flip. E.g. `!coin 5`')
response['mrkdwn_in'] = ['text']
bot.sc.api_call('chat.postMessage', **response)
|
|
bebdeaf75891ef83a801e243b8bd57f5aa770a3f
|
parameter_prediction/datasets/timit.py
|
parameter_prediction/datasets/timit.py
|
import numpy as np
import os
from pylearn2.datasets import dense_design_matrix
from pylearn2.utils import string_utils
class TIMIT(dense_design_matrix.DenseDesignMatrix):
def __init__(self, which_set, preprocessor=None):
"""
which_set can be train, test, valid or test_valid.
"""
if which_set == "train":
X, Y = self._load_train()
elif which_set == "test":
X, Y = self._load_test()
elif which_set == "valid":
X, Y = self._load_valid()
elif which_set == "test_valid":
X1, Y1 = self._load_test()
X2, Y3 = self._load_valid()
X = np.concatenate([X1, X2], axis=0)
Y = np.concatenate([Y1, Y2], axis=0)
else:
raise Exception("TIMIT doesn't understand which_set='{}'".format(which_set))
super(TIMIT,self).__init__(X=X, y=Y, axes=('b', 0))
assert not np.any(np.isnan(self.X))
assert not np.any(np.isnan(self.y))
if preprocessor:
preprocessor.apply(self)
def _load_file(self, name):
data = np.load(name)
return data['X'], data['Y']
def _load_train(self):
n_batches = 5
Xs = []
Ys = []
for b in xrange(1, n_batches+1):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_train_b" + str(b) + ".npz")
X, Y = self._load_file(fname)
Xs.append(X)
Ys.append(Y)
X = np.concatenate(Xs, axis=0)
Y = np.concatenate(Ys, axis=0)
return X, Y
def _load_test(self):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_test.npz")
return self._load_file(fname)
def _load_valid(self):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_valid.npz")
return self._load_file(fname)
|
Add TIMIT dataset class for pylearn2.
|
Add TIMIT dataset class for pylearn2.
|
Python
|
mit
|
mdenil/parameter_prediction,mdenil/parameter_prediction,mdenil/parameter_prediction
|
Add TIMIT dataset class for pylearn2.
|
import numpy as np
import os
from pylearn2.datasets import dense_design_matrix
from pylearn2.utils import string_utils
class TIMIT(dense_design_matrix.DenseDesignMatrix):
def __init__(self, which_set, preprocessor=None):
"""
which_set can be train, test, valid or test_valid.
"""
if which_set == "train":
X, Y = self._load_train()
elif which_set == "test":
X, Y = self._load_test()
elif which_set == "valid":
X, Y = self._load_valid()
elif which_set == "test_valid":
X1, Y1 = self._load_test()
X2, Y3 = self._load_valid()
X = np.concatenate([X1, X2], axis=0)
Y = np.concatenate([Y1, Y2], axis=0)
else:
raise Exception("TIMIT doesn't understand which_set='{}'".format(which_set))
super(TIMIT,self).__init__(X=X, y=Y, axes=('b', 0))
assert not np.any(np.isnan(self.X))
assert not np.any(np.isnan(self.y))
if preprocessor:
preprocessor.apply(self)
def _load_file(self, name):
data = np.load(name)
return data['X'], data['Y']
def _load_train(self):
n_batches = 5
Xs = []
Ys = []
for b in xrange(1, n_batches+1):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_train_b" + str(b) + ".npz")
X, Y = self._load_file(fname)
Xs.append(X)
Ys.append(Y)
X = np.concatenate(Xs, axis=0)
Y = np.concatenate(Ys, axis=0)
return X, Y
def _load_test(self):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_test.npz")
return self._load_file(fname)
def _load_valid(self):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_valid.npz")
return self._load_file(fname)
|
<commit_before><commit_msg>Add TIMIT dataset class for pylearn2.<commit_after>
|
import numpy as np
import os
from pylearn2.datasets import dense_design_matrix
from pylearn2.utils import string_utils
class TIMIT(dense_design_matrix.DenseDesignMatrix):
def __init__(self, which_set, preprocessor=None):
"""
which_set can be train, test, valid or test_valid.
"""
if which_set == "train":
X, Y = self._load_train()
elif which_set == "test":
X, Y = self._load_test()
elif which_set == "valid":
X, Y = self._load_valid()
elif which_set == "test_valid":
X1, Y1 = self._load_test()
X2, Y3 = self._load_valid()
X = np.concatenate([X1, X2], axis=0)
Y = np.concatenate([Y1, Y2], axis=0)
else:
raise Exception("TIMIT doesn't understand which_set='{}'".format(which_set))
super(TIMIT,self).__init__(X=X, y=Y, axes=('b', 0))
assert not np.any(np.isnan(self.X))
assert not np.any(np.isnan(self.y))
if preprocessor:
preprocessor.apply(self)
def _load_file(self, name):
data = np.load(name)
return data['X'], data['Y']
def _load_train(self):
n_batches = 5
Xs = []
Ys = []
for b in xrange(1, n_batches+1):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_train_b" + str(b) + ".npz")
X, Y = self._load_file(fname)
Xs.append(X)
Ys.append(Y)
X = np.concatenate(Xs, axis=0)
Y = np.concatenate(Ys, axis=0)
return X, Y
def _load_test(self):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_test.npz")
return self._load_file(fname)
def _load_valid(self):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_valid.npz")
return self._load_file(fname)
|
Add TIMIT dataset class for pylearn2.import numpy as np
import os
from pylearn2.datasets import dense_design_matrix
from pylearn2.utils import string_utils
class TIMIT(dense_design_matrix.DenseDesignMatrix):
def __init__(self, which_set, preprocessor=None):
"""
which_set can be train, test, valid or test_valid.
"""
if which_set == "train":
X, Y = self._load_train()
elif which_set == "test":
X, Y = self._load_test()
elif which_set == "valid":
X, Y = self._load_valid()
elif which_set == "test_valid":
X1, Y1 = self._load_test()
X2, Y3 = self._load_valid()
X = np.concatenate([X1, X2], axis=0)
Y = np.concatenate([Y1, Y2], axis=0)
else:
raise Exception("TIMIT doesn't understand which_set='{}'".format(which_set))
super(TIMIT,self).__init__(X=X, y=Y, axes=('b', 0))
assert not np.any(np.isnan(self.X))
assert not np.any(np.isnan(self.y))
if preprocessor:
preprocessor.apply(self)
def _load_file(self, name):
data = np.load(name)
return data['X'], data['Y']
def _load_train(self):
n_batches = 5
Xs = []
Ys = []
for b in xrange(1, n_batches+1):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_train_b" + str(b) + ".npz")
X, Y = self._load_file(fname)
Xs.append(X)
Ys.append(Y)
X = np.concatenate(Xs, axis=0)
Y = np.concatenate(Ys, axis=0)
return X, Y
def _load_test(self):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_test.npz")
return self._load_file(fname)
def _load_valid(self):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_valid.npz")
return self._load_file(fname)
|
<commit_before><commit_msg>Add TIMIT dataset class for pylearn2.<commit_after>import numpy as np
import os
from pylearn2.datasets import dense_design_matrix
from pylearn2.utils import string_utils
class TIMIT(dense_design_matrix.DenseDesignMatrix):
def __init__(self, which_set, preprocessor=None):
"""
which_set can be train, test, valid or test_valid.
"""
if which_set == "train":
X, Y = self._load_train()
elif which_set == "test":
X, Y = self._load_test()
elif which_set == "valid":
X, Y = self._load_valid()
elif which_set == "test_valid":
X1, Y1 = self._load_test()
X2, Y3 = self._load_valid()
X = np.concatenate([X1, X2], axis=0)
Y = np.concatenate([Y1, Y2], axis=0)
else:
raise Exception("TIMIT doesn't understand which_set='{}'".format(which_set))
super(TIMIT,self).__init__(X=X, y=Y, axes=('b', 0))
assert not np.any(np.isnan(self.X))
assert not np.any(np.isnan(self.y))
if preprocessor:
preprocessor.apply(self)
def _load_file(self, name):
data = np.load(name)
return data['X'], data['Y']
def _load_train(self):
n_batches = 5
Xs = []
Ys = []
for b in xrange(1, n_batches+1):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_train_b" + str(b) + ".npz")
X, Y = self._load_file(fname)
Xs.append(X)
Ys.append(Y)
X = np.concatenate(Xs, axis=0)
Y = np.concatenate(Ys, axis=0)
return X, Y
def _load_test(self):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_test.npz")
return self._load_file(fname)
def _load_valid(self):
fname = os.path.join(
string_utils.preprocess("${PYLEARN2_DATA_PATH}"),
"timit",
"timit_valid.npz")
return self._load_file(fname)
|
|
df431d81d6524dc414061a4c39152378fb622eb5
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name='pymorph',
version='0.88',
description='Image Morphology Toolbox',
author='Luís Pedro Coelho',
author_email='lpc@mcu.edu',
url='http://luispedro.org/pymorph/',
packages=find_packages(),
)
|
Make this into a python package
|
Make this into a python package
git-svn-id: d22b6ed1b13e3e3451c5d6fab7d0adc235b2f27e@51 5128b7d6-d249-4073-bd28-aebf1259cb0a
|
Python
|
bsd-3-clause
|
luispedro/pymorph
|
Make this into a python package
git-svn-id: d22b6ed1b13e3e3451c5d6fab7d0adc235b2f27e@51 5128b7d6-d249-4073-bd28-aebf1259cb0a
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name='pymorph',
version='0.88',
description='Image Morphology Toolbox',
author='Luís Pedro Coelho',
author_email='lpc@mcu.edu',
url='http://luispedro.org/pymorph/',
packages=find_packages(),
)
|
<commit_before><commit_msg>Make this into a python package
git-svn-id: d22b6ed1b13e3e3451c5d6fab7d0adc235b2f27e@51 5128b7d6-d249-4073-bd28-aebf1259cb0a<commit_after>
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name='pymorph',
version='0.88',
description='Image Morphology Toolbox',
author='Luís Pedro Coelho',
author_email='lpc@mcu.edu',
url='http://luispedro.org/pymorph/',
packages=find_packages(),
)
|
Make this into a python package
git-svn-id: d22b6ed1b13e3e3451c5d6fab7d0adc235b2f27e@51 5128b7d6-d249-4073-bd28-aebf1259cb0a# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name='pymorph',
version='0.88',
description='Image Morphology Toolbox',
author='Luís Pedro Coelho',
author_email='lpc@mcu.edu',
url='http://luispedro.org/pymorph/',
packages=find_packages(),
)
|
<commit_before><commit_msg>Make this into a python package
git-svn-id: d22b6ed1b13e3e3451c5d6fab7d0adc235b2f27e@51 5128b7d6-d249-4073-bd28-aebf1259cb0a<commit_after># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name='pymorph',
version='0.88',
description='Image Morphology Toolbox',
author='Luís Pedro Coelho',
author_email='lpc@mcu.edu',
url='http://luispedro.org/pymorph/',
packages=find_packages(),
)
|
|
7d6fa5dda38dbac5c1b694f8b0d72901087be6f4
|
candidates/election_specific.py
|
candidates/election_specific.py
|
from django.conf import settings
# This is actually taken from Pombola's country-specific code package
# in pombola/country/__init__.py. You should add to this list anything
# country-specific you want to be available through an import from
# candidates.election_specific
imports_and_defaults = []
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.ELECTION_APP:
try:
globals()[name_to_import] = \
getattr(
__import__(
settings.ELECTION_APP_FULLY_QUALIFIED + '.lib',
fromlist=[name_to_import]
),
name_to_import
)
except (ImportError, AttributeError):
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
|
Add a module for election-specific functions
|
Add a module for election-specific functions
This gives us an extra mechanism for customizing behaviour for different
elections.
|
Python
|
agpl-3.0
|
datamade/yournextmp-popit,openstate/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextmp-popit,datamade/yournextmp-popit,mysociety/yournextrepresentative,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit,DemocracyClub/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextrepresentative,datamade/yournextmp-popit,YoQuieroSaber/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextmp-popit,datamade/yournextmp-popit,neavouli/yournextrepresentative
|
Add a module for election-specific functions
This gives us an extra mechanism for customizing behaviour for different
elections.
|
from django.conf import settings
# This is actually taken from Pombola's country-specific code package
# in pombola/country/__init__.py. You should add to this list anything
# country-specific you want to be available through an import from
# candidates.election_specific
imports_and_defaults = []
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.ELECTION_APP:
try:
globals()[name_to_import] = \
getattr(
__import__(
settings.ELECTION_APP_FULLY_QUALIFIED + '.lib',
fromlist=[name_to_import]
),
name_to_import
)
except (ImportError, AttributeError):
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
|
<commit_before><commit_msg>Add a module for election-specific functions
This gives us an extra mechanism for customizing behaviour for different
elections.<commit_after>
|
from django.conf import settings
# This is actually taken from Pombola's country-specific code package
# in pombola/country/__init__.py. You should add to this list anything
# country-specific you want to be available through an import from
# candidates.election_specific
imports_and_defaults = []
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.ELECTION_APP:
try:
globals()[name_to_import] = \
getattr(
__import__(
settings.ELECTION_APP_FULLY_QUALIFIED + '.lib',
fromlist=[name_to_import]
),
name_to_import
)
except (ImportError, AttributeError):
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
|
Add a module for election-specific functions
This gives us an extra mechanism for customizing behaviour for different
elections.from django.conf import settings
# This is actually taken from Pombola's country-specific code package
# in pombola/country/__init__.py. You should add to this list anything
# country-specific you want to be available through an import from
# candidates.election_specific
imports_and_defaults = []
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.ELECTION_APP:
try:
globals()[name_to_import] = \
getattr(
__import__(
settings.ELECTION_APP_FULLY_QUALIFIED + '.lib',
fromlist=[name_to_import]
),
name_to_import
)
except (ImportError, AttributeError):
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
|
<commit_before><commit_msg>Add a module for election-specific functions
This gives us an extra mechanism for customizing behaviour for different
elections.<commit_after>from django.conf import settings
# This is actually taken from Pombola's country-specific code package
# in pombola/country/__init__.py. You should add to this list anything
# country-specific you want to be available through an import from
# candidates.election_specific
imports_and_defaults = []
# Note that one could do this without the dynamic import and use of
# globals() by switching on country names and importing * from each
# country specific module, as MapIt does. [1] I slightly prefer the
# version here since you can explicitly list the names to be imported,
# and provide a default value.
#
# [1] https://github.com/mysociety/mapit/blob/master/mapit/countries/__init__.py
for name_to_import, default_value in imports_and_defaults:
if settings.ELECTION_APP:
try:
globals()[name_to_import] = \
getattr(
__import__(
settings.ELECTION_APP_FULLY_QUALIFIED + '.lib',
fromlist=[name_to_import]
),
name_to_import
)
except (ImportError, AttributeError):
globals()[name_to_import] = default_value
else:
globals()[name_to_import] = default_value
|
|
370cc8fb0b22c122e17ef23cd5c71c4531352609
|
migrations/versions/62d4d27afcf_.py
|
migrations/versions/62d4d27afcf_.py
|
"""empty message
Revision ID: 62d4d27afcf
Revises: 453729c2bc21
Create Date: 2014-11-03 14:52:53.884479
"""
# revision identifiers, used by Alembic.
revision = '62d4d27afcf'
down_revision = '453729c2bc21'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('dlannotations', sa.Column('selected_texts_json', sa.Text(convert_unicode=True), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('dlannotations', 'selected_texts_json')
### end Alembic commands ###
|
Add migration to add selected_texts_json column.
|
Add migration to add selected_texts_json column.
|
Python
|
bsd-3-clause
|
dlarochelle/extractor_train,dlarochelle/extractor_train,dlarochelle/extractor_train
|
Add migration to add selected_texts_json column.
|
"""empty message
Revision ID: 62d4d27afcf
Revises: 453729c2bc21
Create Date: 2014-11-03 14:52:53.884479
"""
# revision identifiers, used by Alembic.
revision = '62d4d27afcf'
down_revision = '453729c2bc21'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('dlannotations', sa.Column('selected_texts_json', sa.Text(convert_unicode=True), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('dlannotations', 'selected_texts_json')
### end Alembic commands ###
|
<commit_before><commit_msg>Add migration to add selected_texts_json column.<commit_after>
|
"""empty message
Revision ID: 62d4d27afcf
Revises: 453729c2bc21
Create Date: 2014-11-03 14:52:53.884479
"""
# revision identifiers, used by Alembic.
revision = '62d4d27afcf'
down_revision = '453729c2bc21'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('dlannotations', sa.Column('selected_texts_json', sa.Text(convert_unicode=True), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('dlannotations', 'selected_texts_json')
### end Alembic commands ###
|
Add migration to add selected_texts_json column."""empty message
Revision ID: 62d4d27afcf
Revises: 453729c2bc21
Create Date: 2014-11-03 14:52:53.884479
"""
# revision identifiers, used by Alembic.
revision = '62d4d27afcf'
down_revision = '453729c2bc21'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('dlannotations', sa.Column('selected_texts_json', sa.Text(convert_unicode=True), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('dlannotations', 'selected_texts_json')
### end Alembic commands ###
|
<commit_before><commit_msg>Add migration to add selected_texts_json column.<commit_after>"""empty message
Revision ID: 62d4d27afcf
Revises: 453729c2bc21
Create Date: 2014-11-03 14:52:53.884479
"""
# revision identifiers, used by Alembic.
revision = '62d4d27afcf'
down_revision = '453729c2bc21'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('dlannotations', sa.Column('selected_texts_json', sa.Text(convert_unicode=True), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('dlannotations', 'selected_texts_json')
### end Alembic commands ###
|
|
da97cd5378e849de9c0bfc959186db5564e04cfa
|
osbrain/tests/test_agentaddress.py
|
osbrain/tests/test_agentaddress.py
|
import pytest
import zmq
from osbrain.core import AgentAddressKind
from osbrain.core import AgentAddressRole
def twin_list(elements):
result = elements[:]
result[1::2] = elements[::2]
result[::2] = elements[1::2]
return result
def test_kind():
"""
This test aims to cover basic AgentAddressKind operations: initialization,
equivalence and basic methods.
"""
strings = ['REQ', 'REP', 'PUSH', 'PULL', 'PUB', 'SUB']
zmqints = [zmq.REQ, zmq.REP, zmq.PUSH, zmq.PULL, zmq.PUB, zmq.SUB]
handlers = [False, True, False, True, False, True]
strtwins = twin_list(strings)
zmqtwins = twin_list(zmqints)
configurations = zip(strings, strtwins, zmqints, zmqtwins, handlers)
# Make sure there are no missing values
assert len(list(configurations)) == len(strings)
for string, strtwin, zmqint, zmqtwin, handler in configurations:
# Initialization and equivalence
kind = AgentAddressKind(string)
assert kind == zmqint
assert kind == string
assert kind == AgentAddressKind(zmqint)
assert kind == AgentAddressKind(kind)
# Basic methods
assert isinstance(kind.twin(), AgentAddressKind)
assert kind.twin() == strtwin
assert kind.requires_handler() == handler
def test_role():
"""
This test aims to cover basic AgentAddressRole operations: initialization,
equivalence and basic methods.
"""
values = ['server', 'client']
twins = twin_list(values)
for value, twin in zip(values, twins):
# Initialization and equivalence
role = AgentAddressRole(value)
assert role == value
assert role == AgentAddressRole(role)
# Basic methods
assert isinstance(role.twin(), AgentAddressRole)
assert role.twin() == twin
|
Add tests for AgentAddressRole and AgentAddressKind classes
|
Add tests for AgentAddressRole and AgentAddressKind classes
|
Python
|
apache-2.0
|
opensistemas-hub/osbrain
|
Add tests for AgentAddressRole and AgentAddressKind classes
|
import pytest
import zmq
from osbrain.core import AgentAddressKind
from osbrain.core import AgentAddressRole
def twin_list(elements):
result = elements[:]
result[1::2] = elements[::2]
result[::2] = elements[1::2]
return result
def test_kind():
"""
This test aims to cover basic AgentAddressKind operations: initialization,
equivalence and basic methods.
"""
strings = ['REQ', 'REP', 'PUSH', 'PULL', 'PUB', 'SUB']
zmqints = [zmq.REQ, zmq.REP, zmq.PUSH, zmq.PULL, zmq.PUB, zmq.SUB]
handlers = [False, True, False, True, False, True]
strtwins = twin_list(strings)
zmqtwins = twin_list(zmqints)
configurations = zip(strings, strtwins, zmqints, zmqtwins, handlers)
# Make sure there are no missing values
assert len(list(configurations)) == len(strings)
for string, strtwin, zmqint, zmqtwin, handler in configurations:
# Initialization and equivalence
kind = AgentAddressKind(string)
assert kind == zmqint
assert kind == string
assert kind == AgentAddressKind(zmqint)
assert kind == AgentAddressKind(kind)
# Basic methods
assert isinstance(kind.twin(), AgentAddressKind)
assert kind.twin() == strtwin
assert kind.requires_handler() == handler
def test_role():
"""
This test aims to cover basic AgentAddressRole operations: initialization,
equivalence and basic methods.
"""
values = ['server', 'client']
twins = twin_list(values)
for value, twin in zip(values, twins):
# Initialization and equivalence
role = AgentAddressRole(value)
assert role == value
assert role == AgentAddressRole(role)
# Basic methods
assert isinstance(role.twin(), AgentAddressRole)
assert role.twin() == twin
|
<commit_before><commit_msg>Add tests for AgentAddressRole and AgentAddressKind classes<commit_after>
|
import pytest
import zmq
from osbrain.core import AgentAddressKind
from osbrain.core import AgentAddressRole
def twin_list(elements):
result = elements[:]
result[1::2] = elements[::2]
result[::2] = elements[1::2]
return result
def test_kind():
"""
This test aims to cover basic AgentAddressKind operations: initialization,
equivalence and basic methods.
"""
strings = ['REQ', 'REP', 'PUSH', 'PULL', 'PUB', 'SUB']
zmqints = [zmq.REQ, zmq.REP, zmq.PUSH, zmq.PULL, zmq.PUB, zmq.SUB]
handlers = [False, True, False, True, False, True]
strtwins = twin_list(strings)
zmqtwins = twin_list(zmqints)
configurations = zip(strings, strtwins, zmqints, zmqtwins, handlers)
# Make sure there are no missing values
assert len(list(configurations)) == len(strings)
for string, strtwin, zmqint, zmqtwin, handler in configurations:
# Initialization and equivalence
kind = AgentAddressKind(string)
assert kind == zmqint
assert kind == string
assert kind == AgentAddressKind(zmqint)
assert kind == AgentAddressKind(kind)
# Basic methods
assert isinstance(kind.twin(), AgentAddressKind)
assert kind.twin() == strtwin
assert kind.requires_handler() == handler
def test_role():
"""
This test aims to cover basic AgentAddressRole operations: initialization,
equivalence and basic methods.
"""
values = ['server', 'client']
twins = twin_list(values)
for value, twin in zip(values, twins):
# Initialization and equivalence
role = AgentAddressRole(value)
assert role == value
assert role == AgentAddressRole(role)
# Basic methods
assert isinstance(role.twin(), AgentAddressRole)
assert role.twin() == twin
|
Add tests for AgentAddressRole and AgentAddressKind classesimport pytest
import zmq
from osbrain.core import AgentAddressKind
from osbrain.core import AgentAddressRole
def twin_list(elements):
result = elements[:]
result[1::2] = elements[::2]
result[::2] = elements[1::2]
return result
def test_kind():
"""
This test aims to cover basic AgentAddressKind operations: initialization,
equivalence and basic methods.
"""
strings = ['REQ', 'REP', 'PUSH', 'PULL', 'PUB', 'SUB']
zmqints = [zmq.REQ, zmq.REP, zmq.PUSH, zmq.PULL, zmq.PUB, zmq.SUB]
handlers = [False, True, False, True, False, True]
strtwins = twin_list(strings)
zmqtwins = twin_list(zmqints)
configurations = zip(strings, strtwins, zmqints, zmqtwins, handlers)
# Make sure there are no missing values
assert len(list(configurations)) == len(strings)
for string, strtwin, zmqint, zmqtwin, handler in configurations:
# Initialization and equivalence
kind = AgentAddressKind(string)
assert kind == zmqint
assert kind == string
assert kind == AgentAddressKind(zmqint)
assert kind == AgentAddressKind(kind)
# Basic methods
assert isinstance(kind.twin(), AgentAddressKind)
assert kind.twin() == strtwin
assert kind.requires_handler() == handler
def test_role():
"""
This test aims to cover basic AgentAddressRole operations: initialization,
equivalence and basic methods.
"""
values = ['server', 'client']
twins = twin_list(values)
for value, twin in zip(values, twins):
# Initialization and equivalence
role = AgentAddressRole(value)
assert role == value
assert role == AgentAddressRole(role)
# Basic methods
assert isinstance(role.twin(), AgentAddressRole)
assert role.twin() == twin
|
<commit_before><commit_msg>Add tests for AgentAddressRole and AgentAddressKind classes<commit_after>import pytest
import zmq
from osbrain.core import AgentAddressKind
from osbrain.core import AgentAddressRole
def twin_list(elements):
result = elements[:]
result[1::2] = elements[::2]
result[::2] = elements[1::2]
return result
def test_kind():
"""
This test aims to cover basic AgentAddressKind operations: initialization,
equivalence and basic methods.
"""
strings = ['REQ', 'REP', 'PUSH', 'PULL', 'PUB', 'SUB']
zmqints = [zmq.REQ, zmq.REP, zmq.PUSH, zmq.PULL, zmq.PUB, zmq.SUB]
handlers = [False, True, False, True, False, True]
strtwins = twin_list(strings)
zmqtwins = twin_list(zmqints)
configurations = zip(strings, strtwins, zmqints, zmqtwins, handlers)
# Make sure there are no missing values
assert len(list(configurations)) == len(strings)
for string, strtwin, zmqint, zmqtwin, handler in configurations:
# Initialization and equivalence
kind = AgentAddressKind(string)
assert kind == zmqint
assert kind == string
assert kind == AgentAddressKind(zmqint)
assert kind == AgentAddressKind(kind)
# Basic methods
assert isinstance(kind.twin(), AgentAddressKind)
assert kind.twin() == strtwin
assert kind.requires_handler() == handler
def test_role():
"""
This test aims to cover basic AgentAddressRole operations: initialization,
equivalence and basic methods.
"""
values = ['server', 'client']
twins = twin_list(values)
for value, twin in zip(values, twins):
# Initialization and equivalence
role = AgentAddressRole(value)
assert role == value
assert role == AgentAddressRole(role)
# Basic methods
assert isinstance(role.twin(), AgentAddressRole)
assert role.twin() == twin
|
|
7d9b6a55ddcccc2449831f081247d697773a9aa6
|
common/djangoapps/student/management/commands/transfer_students.py
|
common/djangoapps/student/management/commands/transfer_students.py
|
from optparse import make_option
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from student.models import CourseEnrollment
from shoppingcart.models import CertificateItem
class Command(BaseCommand):
help = """
This command takes two course ids as input and transfers
all students enrolled in one course into the other. This will
remove them from the first class and enroll them in the second
class in the same mode as the first one. eg. honor, verified,
audit.
example:
# Transfer students from the old demoX class to a new one.
manage.py ... transfer_students -f edX/Open_DemoX/edx_demo_course -t edX/Open_DemoX/new_demoX
"""
option_list = BaseCommand.option_list + (
make_option('-f', '--from',
metavar='SOURCE_COURSE',
dest='source_course',
help='The course to transfer students from.'),
make_option('-t', '--to',
metavar='DEST_COURSE',
dest='dest_course',
help='The new course to enroll the student into.'),
)
def handle(self, *args, **options):
source = options['source_course']
dest = options['dest_course']
source_students = User.objects.filter(
courseenrollment__course_id=source,
courseenrollment__is_active=True)
for user in source_students:
print("Moving {}.".format(user.username))
# Find the old enrollment.
enrollment = CourseEnrollment.objects.get(user=user,
course_id=source)
# Move the Student between the classes.
mode = enrollment.mode
CourseEnrollment.unenroll(user,source)
CourseEnrollment.enroll(user, dest, mode=mode)
if mode == 'verified':
try:
certificate_item = CertificateItem.objects.get(
course_id=source,
course_enrollment=enrollment)
except CertificateItem.DoesNotExist:
print("No certificate for {}".format(user))
continue
new_enrollment = CourseEnrollment.objects.get(user=user,
course_id=dest)
certificate_item.course_id = dest
certificate_item.course_enrollment = new_enrollment
certificate_item.save()
|
Add a command to transfer students from one course to another.
|
Add a command to transfer students from one course to another.
Move only active students and also update cert item for verified students.
|
Python
|
agpl-3.0
|
peterm-itr/edx-platform,SravanthiSinha/edx-platform,Edraak/edx-platform,eestay/edx-platform,LICEF/edx-platform,ahmedaljazzar/edx-platform,TeachAtTUM/edx-platform,cselis86/edx-platform,openfun/edx-platform,bitifirefly/edx-platform,dsajkl/reqiop,jazztpt/edx-platform,ak2703/edx-platform,kursitet/edx-platform,motion2015/edx-platform,10clouds/edx-platform,zofuthan/edx-platform,jazkarta/edx-platform-for-isc,eemirtekin/edx-platform,cyanna/edx-platform,cyanna/edx-platform,torchingloom/edx-platform,rhndg/openedx,B-MOOC/edx-platform,wwj718/ANALYSE,chauhanhardik/populo,benpatterson/edx-platform,BehavioralInsightsTeam/edx-platform,edx-solutions/edx-platform,halvertoluke/edx-platform,pomegranited/edx-platform,ferabra/edx-platform,a-parhom/edx-platform,JCBarahona/edX,tiagochiavericosta/edx-platform,DNFcode/edx-platform,4eek/edx-platform,tanmaykm/edx-platform,arifsetiawan/edx-platform,dsajkl/123,WatanabeYasumasa/edx-platform,nanolearningllc/edx-platform-cypress-2,CredoReference/edx-platform,abdoosh00/edraak,pabloborrego93/edx-platform,edx-solutions/edx-platform,morenopc/edx-platform,Ayub-Khan/edx-platform,AkA84/edx-platform,auferack08/edx-platform,appsembler/edx-platform,yokose-ks/edx-platform,jruiperezv/ANALYSE,rue89-tech/edx-platform,deepsrijit1105/edx-platform,motion2015/a3,nttks/edx-platform,solashirai/edx-platform,antonve/s4-project-mooc,torchingloom/edx-platform,louyihua/edx-platform,jswope00/GAI,nikolas/edx-platform,kxliugang/edx-platform,cecep-edu/edx-platform,analyseuc3m/ANALYSE-v1,xingyepei/edx-platform,Shrhawk/edx-platform,jamiefolsom/edx-platform,inares/edx-platform,jzoldak/edx-platform,bigdatauniversity/edx-platform,franosincic/edx-platform,mjg2203/edx-platform-seas,nanolearning/edx-platform,analyseuc3m/ANALYSE-v1,IndonesiaX/edx-platform,nanolearningllc/edx-platform-cypress,Edraak/edx-platform,CourseTalk/edx-platform,Semi-global/edx-platform,beni55/edx-platform,dcosentino/edx-platform,chrisndodge/edx-platform,vismartltd/edx-platform,pepeportela/edx-platform,dcosentino/edx-platform,hkawasaki/kawasaki-aio8-0,wwj718/edx-platform,bdero/edx-platform,xingyepei/edx-platform,ferabra/edx-platform,devs1991/test_edx_docmode,UOMx/edx-platform,lduarte1991/edx-platform,simbs/edx-platform,ahmedaljazzar/edx-platform,abdoosh00/edraak,atsolakid/edx-platform,doganov/edx-platform,chudaol/edx-platform,arifsetiawan/edx-platform,jelugbo/tundex,doganov/edx-platform,kamalx/edx-platform,nttks/jenkins-test,Softmotions/edx-platform,xuxiao19910803/edx-platform,devs1991/test_edx_docmode,tiagochiavericosta/edx-platform,ampax/edx-platform-backup,msegado/edx-platform,jelugbo/tundex,chauhanhardik/populo_2,xuxiao19910803/edx,AkA84/edx-platform,bigdatauniversity/edx-platform,ferabra/edx-platform,benpatterson/edx-platform,synergeticsedx/deployment-wipro,raccoongang/edx-platform,dcosentino/edx-platform,y12uc231/edx-platform,CourseTalk/edx-platform,cognitiveclass/edx-platform,dsajkl/reqiop,IONISx/edx-platform,jbassen/edx-platform,shashank971/edx-platform,marcore/edx-platform,ahmadiga/min_edx,hamzehd/edx-platform,IONISx/edx-platform,Edraak/circleci-edx-platform,pepeportela/edx-platform,eduNEXT/edx-platform,eduNEXT/edunext-platform,nanolearningllc/edx-platform-cypress-2,ak2703/edx-platform,naresh21/synergetics-edx-platform,shubhdev/edx-platform,jamesblunt/edx-platform,leansoft/edx-platform,antonve/s4-project-mooc,IndonesiaX/edx-platform,defance/edx-platform,Endika/edx-platform,xuxiao19910803/edx-platform,zubair-arbi/edx-platform,wwj718/edx-platform,nikolas/edx-platform,unicri/edx-platform,naresh21/synergetics-edx-platform,ahmadio/edx-platform,knehez/edx-platform,chauhanhardik/populo,playm2mboy/edx-platform,peterm-itr/edx-platform,polimediaupv/edx-platform,rismalrv/edx-platform,Unow/edx-platform,DNFcode/edx-platform,etzhou/edx-platform,xuxiao19910803/edx,don-github/edx-platform,chand3040/cloud_that,hmcmooc/muddx-platform,adoosii/edx-platform,jamesblunt/edx-platform,kamalx/edx-platform,Softmotions/edx-platform,OmarIthawi/edx-platform,gsehub/edx-platform,IndonesiaX/edx-platform,B-MOOC/edx-platform,jbzdak/edx-platform,hmcmooc/muddx-platform,SivilTaram/edx-platform,y12uc231/edx-platform,pku9104038/edx-platform,Stanford-Online/edx-platform,pku9104038/edx-platform,hmcmooc/muddx-platform,cselis86/edx-platform,shubhdev/edxOnBaadal,miptliot/edx-platform,edx-solutions/edx-platform,xinjiguaike/edx-platform,atsolakid/edx-platform,JioEducation/edx-platform,Edraak/edraak-platform,cselis86/edx-platform,unicri/edx-platform,stvstnfrd/edx-platform,mahendra-r/edx-platform,xuxiao19910803/edx,etzhou/edx-platform,leansoft/edx-platform,rue89-tech/edx-platform,J861449197/edx-platform,dsajkl/123,valtech-mooc/edx-platform,JioEducation/edx-platform,olexiim/edx-platform,iivic/BoiseStateX,shubhdev/edx-platform,bitifirefly/edx-platform,BehavioralInsightsTeam/edx-platform,OmarIthawi/edx-platform,louyihua/edx-platform,gsehub/edx-platform,nanolearningllc/edx-platform-cypress,sudheerchintala/LearnEraPlatForm,morenopc/edx-platform,edx/edx-platform,waheedahmed/edx-platform,adoosii/edx-platform,martynovp/edx-platform,10clouds/edx-platform,tiagochiavericosta/edx-platform,benpatterson/edx-platform,RPI-OPENEDX/edx-platform,alu042/edx-platform,mbareta/edx-platform-ft,devs1991/test_edx_docmode,playm2mboy/edx-platform,synergeticsedx/deployment-wipro,devs1991/test_edx_docmode,AkA84/edx-platform,shurihell/testasia,shurihell/testasia,Kalyzee/edx-platform,ovnicraft/edx-platform,shashank971/edx-platform,hkawasaki/kawasaki-aio8-1,olexiim/edx-platform,MSOpenTech/edx-platform,UOMx/edx-platform,LICEF/edx-platform,rhndg/openedx,etzhou/edx-platform,MSOpenTech/edx-platform,devs1991/test_edx_docmode,nanolearningllc/edx-platform-cypress-2,shubhdev/edx-platform,beni55/edx-platform,leansoft/edx-platform,jazztpt/edx-platform,LearnEra/LearnEraPlaftform,IndonesiaX/edx-platform,zofuthan/edx-platform,Edraak/edx-platform,ak2703/edx-platform,MSOpenTech/edx-platform,jonathan-beard/edx-platform,zerobatu/edx-platform,4eek/edx-platform,dsajkl/reqiop,hamzehd/edx-platform,xinjiguaike/edx-platform,AkA84/edx-platform,jolyonb/edx-platform,synergeticsedx/deployment-wipro,martynovp/edx-platform,DefyVentures/edx-platform,pomegranited/edx-platform,waheedahmed/edx-platform,vismartltd/edx-platform,pepeportela/edx-platform,ubc/edx-platform,shubhdev/openedx,waheedahmed/edx-platform,jamiefolsom/edx-platform,dkarakats/edx-platform,openfun/edx-platform,jolyonb/edx-platform,hastexo/edx-platform,chand3040/cloud_that,jswope00/griffinx,motion2015/edx-platform,Edraak/circleci-edx-platform,philanthropy-u/edx-platform,ampax/edx-platform,chrisndodge/edx-platform,alu042/edx-platform,prarthitm/edxplatform,TeachAtTUM/edx-platform,hastexo/edx-platform,jazztpt/edx-platform,antoviaque/edx-platform,prarthitm/edxplatform,zofuthan/edx-platform,atsolakid/edx-platform,ZLLab-Mooc/edx-platform,ak2703/edx-platform,Unow/edx-platform,cpennington/edx-platform,yokose-ks/edx-platform,Shrhawk/edx-platform,peterm-itr/edx-platform,valtech-mooc/edx-platform,jonathan-beard/edx-platform,cecep-edu/edx-platform,sameetb-cuelogic/edx-platform-test,MakeHer/edx-platform,angelapper/edx-platform,doismellburning/edx-platform,Lektorium-LLC/edx-platform,knehez/edx-platform,yokose-ks/edx-platform,knehez/edx-platform,mjg2203/edx-platform-seas,rhndg/openedx,kxliugang/edx-platform,ESOedX/edx-platform,kamalx/edx-platform,Kalyzee/edx-platform,OmarIthawi/edx-platform,wwj718/ANALYSE,vasyarv/edx-platform,zofuthan/edx-platform,y12uc231/edx-platform,xuxiao19910803/edx-platform,antoviaque/edx-platform,philanthropy-u/edx-platform,Unow/edx-platform,angelapper/edx-platform,Lektorium-LLC/edx-platform,rismalrv/edx-platform,nttks/edx-platform,jazkarta/edx-platform-for-isc,mbareta/edx-platform-ft,procangroup/edx-platform,jbassen/edx-platform,LICEF/edx-platform,wwj718/edx-platform,teltek/edx-platform,LearnEra/LearnEraPlaftform,mjg2203/edx-platform-seas,Stanford-Online/edx-platform,zubair-arbi/edx-platform,Stanford-Online/edx-platform,openfun/edx-platform,martynovp/edx-platform,arbrandes/edx-platform,shabab12/edx-platform,dkarakats/edx-platform,alu042/edx-platform,B-MOOC/edx-platform,Edraak/circleci-edx-platform,motion2015/edx-platform,mtlchun/edx,procangroup/edx-platform,pelikanchik/edx-platform,mtlchun/edx,morenopc/edx-platform,polimediaupv/edx-platform,jazkarta/edx-platform,vasyarv/edx-platform,shubhdev/edxOnBaadal,WatanabeYasumasa/edx-platform,miptliot/edx-platform,eemirtekin/edx-platform,jonathan-beard/edx-platform,kmoocdev2/edx-platform,fly19890211/edx-platform,ubc/edx-platform,jazkarta/edx-platform-for-isc,inares/edx-platform,jbassen/edx-platform,nanolearning/edx-platform,simbs/edx-platform,romain-li/edx-platform,Ayub-Khan/edx-platform,jbzdak/edx-platform,arbrandes/edx-platform,UXE/local-edx,unicri/edx-platform,utecuy/edx-platform,Edraak/circleci-edx-platform,vikas1885/test1,hastexo/edx-platform,teltek/edx-platform,deepsrijit1105/edx-platform,Edraak/edx-platform,vasyarv/edx-platform,hkawasaki/kawasaki-aio8-2,tanmaykm/edx-platform,edx/edx-platform,gymnasium/edx-platform,xingyepei/edx-platform,franosincic/edx-platform,JioEducation/edx-platform,beni55/edx-platform,jjmiranda/edx-platform,unicri/edx-platform,chrisndodge/edx-platform,Livit/Livit.Learn.EdX,eduNEXT/edunext-platform,solashirai/edx-platform,J861449197/edx-platform,SravanthiSinha/edx-platform,torchingloom/edx-platform,B-MOOC/edx-platform,appliedx/edx-platform,mcgachey/edx-platform,jzoldak/edx-platform,xinjiguaike/edx-platform,shashank971/edx-platform,proversity-org/edx-platform,eduNEXT/edx-platform,bdero/edx-platform,fintech-circle/edx-platform,chauhanhardik/populo,edx/edx-platform,jazkarta/edx-platform,kmoocdev2/edx-platform,chauhanhardik/populo_2,ubc/edx-platform,edry/edx-platform,nikolas/edx-platform,vikas1885/test1,amir-qayyum-khan/edx-platform,appliedx/edx-platform,CredoReference/edx-platform,procangroup/edx-platform,ampax/edx-platform-backup,antoviaque/edx-platform,hkawasaki/kawasaki-aio8-0,shubhdev/openedx,mbareta/edx-platform-ft,franosincic/edx-platform,edry/edx-platform,jruiperezv/ANALYSE,cselis86/edx-platform,cognitiveclass/edx-platform,jruiperezv/ANALYSE,alexthered/kienhoc-platform,ampax/edx-platform-backup,pomegranited/edx-platform,solashirai/edx-platform,Endika/edx-platform,iivic/BoiseStateX,zhenzhai/edx-platform,sameetb-cuelogic/edx-platform-test,synergeticsedx/deployment-wipro,eemirtekin/edx-platform,JCBarahona/edX,kmoocdev2/edx-platform,nanolearningllc/edx-platform-cypress,RPI-OPENEDX/edx-platform,chauhanhardik/populo_2,auferack08/edx-platform,tiagochiavericosta/edx-platform,torchingloom/edx-platform,zadgroup/edx-platform,kxliugang/edx-platform,nagyistoce/edx-platform,pomegranited/edx-platform,msegado/edx-platform,beni55/edx-platform,beacloudgenius/edx-platform,Semi-global/edx-platform,Ayub-Khan/edx-platform,sudheerchintala/LearnEraPlatForm,openfun/edx-platform,kxliugang/edx-platform,jolyonb/edx-platform,kursitet/edx-platform,jruiperezv/ANALYSE,doganov/edx-platform,Endika/edx-platform,bigdatauniversity/edx-platform,dsajkl/123,appsembler/edx-platform,Softmotions/edx-platform,louyihua/edx-platform,jbassen/edx-platform,Edraak/edx-platform,RPI-OPENEDX/edx-platform,nttks/jenkins-test,MakeHer/edx-platform,eduNEXT/edunext-platform,philanthropy-u/edx-platform,procangroup/edx-platform,bitifirefly/edx-platform,shashank971/edx-platform,utecuy/edx-platform,ahmadio/edx-platform,LearnEra/LearnEraPlaftform,nttks/edx-platform,fly19890211/edx-platform,chudaol/edx-platform,alu042/edx-platform,pku9104038/edx-platform,beacloudgenius/edx-platform,rismalrv/edx-platform,miptliot/edx-platform,mtlchun/edx,y12uc231/edx-platform,ampax/edx-platform,arbrandes/edx-platform,utecuy/edx-platform,unicri/edx-platform,stvstnfrd/edx-platform,cpennington/edx-platform,itsjeyd/edx-platform,zubair-arbi/edx-platform,knehez/edx-platform,martynovp/edx-platform,BehavioralInsightsTeam/edx-platform,cyanna/edx-platform,chudaol/edx-platform,a-parhom/edx-platform,Shrhawk/edx-platform,Ayub-Khan/edx-platform,zerobatu/edx-platform,pepeportela/edx-platform,mahendra-r/edx-platform,sudheerchintala/LearnEraPlatForm,waheedahmed/edx-platform,carsongee/edx-platform,mjirayu/sit_academy,eduNEXT/edunext-platform,nanolearningllc/edx-platform-cypress-2,Lektorium-LLC/edx-platform,vikas1885/test1,eestay/edx-platform,shubhdev/edx-platform,dkarakats/edx-platform,ZLLab-Mooc/edx-platform,xuxiao19910803/edx,zubair-arbi/edx-platform,DNFcode/edx-platform,sudheerchintala/LearnEraPlatForm,fly19890211/edx-platform,iivic/BoiseStateX,ahmedaljazzar/edx-platform,ubc/edx-platform,caesar2164/edx-platform,kmoocdev/edx-platform,beacloudgenius/edx-platform,4eek/edx-platform,hkawasaki/kawasaki-aio8-1,jamesblunt/edx-platform,gymnasium/edx-platform,nagyistoce/edx-platform,zhenzhai/edx-platform,openfun/edx-platform,jswope00/GAI,EDUlib/edx-platform,mushtaqak/edx-platform,JCBarahona/edX,analyseuc3m/ANALYSE-v1,dkarakats/edx-platform,zadgroup/edx-platform,yokose-ks/edx-platform,Edraak/circleci-edx-platform,nttks/jenkins-test,cpennington/edx-platform,playm2mboy/edx-platform,ampax/edx-platform-backup,xuxiao19910803/edx,jelugbo/tundex,mahendra-r/edx-platform,wwj718/edx-platform,mushtaqak/edx-platform,nanolearningllc/edx-platform-cypress-2,alexthered/kienhoc-platform,chudaol/edx-platform,fintech-circle/edx-platform,shurihell/testasia,gymnasium/edx-platform,shubhdev/openedx,mushtaqak/edx-platform,JCBarahona/edX,defance/edx-platform,vismartltd/edx-platform,andyzsf/edx,jazkarta/edx-platform,eemirtekin/edx-platform,bitifirefly/edx-platform,alexthered/kienhoc-platform,mjirayu/sit_academy,4eek/edx-platform,Edraak/edraak-platform,doganov/edx-platform,eestay/edx-platform,RPI-OPENEDX/edx-platform,cecep-edu/edx-platform,pelikanchik/edx-platform,chrisndodge/edx-platform,msegado/edx-platform,andyzsf/edx,bigdatauniversity/edx-platform,edry/edx-platform,proversity-org/edx-platform,jamiefolsom/edx-platform,valtech-mooc/edx-platform,stvstnfrd/edx-platform,mushtaqak/edx-platform,chudaol/edx-platform,romain-li/edx-platform,itsjeyd/edx-platform,sameetb-cuelogic/edx-platform-test,jolyonb/edx-platform,cyanna/edx-platform,romain-li/edx-platform,pelikanchik/edx-platform,hkawasaki/kawasaki-aio8-2,ak2703/edx-platform,lduarte1991/edx-platform,10clouds/edx-platform,Kalyzee/edx-platform,zerobatu/edx-platform,jamesblunt/edx-platform,Kalyzee/edx-platform,nikolas/edx-platform,10clouds/edx-platform,LearnEra/LearnEraPlaftform,caesar2164/edx-platform,devs1991/test_edx_docmode,kamalx/edx-platform,caesar2164/edx-platform,jswope00/griffinx,IONISx/edx-platform,mushtaqak/edx-platform,rismalrv/edx-platform,LICEF/edx-platform,angelapper/edx-platform,nttks/jenkins-test,SravanthiSinha/edx-platform,doismellburning/edx-platform,martynovp/edx-platform,JCBarahona/edX,cecep-edu/edx-platform,zerobatu/edx-platform,vikas1885/test1,TeachAtTUM/edx-platform,chand3040/cloud_that,alexthered/kienhoc-platform,doismellburning/edx-platform,hkawasaki/kawasaki-aio8-2,jazkarta/edx-platform,don-github/edx-platform,antonve/s4-project-mooc,jbzdak/edx-platform,DefyVentures/edx-platform,AkA84/edx-platform,IONISx/edx-platform,proversity-org/edx-platform,jjmiranda/edx-platform,jazkarta/edx-platform-for-isc,Stanford-Online/edx-platform,longmen21/edx-platform,hkawasaki/kawasaki-aio8-0,bdero/edx-platform,don-github/edx-platform,jbassen/edx-platform,Endika/edx-platform,shurihell/testasia,MakeHer/edx-platform,Livit/Livit.Learn.EdX,mahendra-r/edx-platform,utecuy/edx-platform,jswope00/GAI,DNFcode/edx-platform,abdoosh00/edraak,mcgachey/edx-platform,pabloborrego93/edx-platform,eestay/edx-platform,itsjeyd/edx-platform,beacloudgenius/edx-platform,simbs/edx-platform,Livit/Livit.Learn.EdX,pku9104038/edx-platform,polimediaupv/edx-platform,dcosentino/edx-platform,ampax/edx-platform,jjmiranda/edx-platform,kmoocdev/edx-platform,motion2015/a3,jazztpt/edx-platform,jswope00/griffinx,carsongee/edx-platform,gsehub/edx-platform,cognitiveclass/edx-platform,appsembler/edx-platform,ubc/edx-platform,EDUlib/edx-platform,naresh21/synergetics-edx-platform,zofuthan/edx-platform,amir-qayyum-khan/edx-platform,prarthitm/edxplatform,nanolearning/edx-platform,DefyVentures/edx-platform,zhenzhai/edx-platform,longmen21/edx-platform,hkawasaki/kawasaki-aio8-1,arifsetiawan/edx-platform,arbrandes/edx-platform,hkawasaki/kawasaki-aio8-2,simbs/edx-platform,edx-solutions/edx-platform,cyanna/edx-platform,IndonesiaX/edx-platform,DefyVentures/edx-platform,gymnasium/edx-platform,hamzehd/edx-platform,motion2015/a3,nanolearningllc/edx-platform-cypress,chand3040/cloud_that,4eek/edx-platform,andyzsf/edx,bigdatauniversity/edx-platform,longmen21/edx-platform,mjirayu/sit_academy,louyihua/edx-platform,marcore/edx-platform,jswope00/griffinx,olexiim/edx-platform,xuxiao19910803/edx-platform,CourseTalk/edx-platform,cpennington/edx-platform,zadgroup/edx-platform,Ayub-Khan/edx-platform,auferack08/edx-platform,leansoft/edx-platform,kmoocdev2/edx-platform,rue89-tech/edx-platform,mjirayu/sit_academy,motion2015/a3,shubhdev/openedx,jbzdak/edx-platform,romain-li/edx-platform,bitifirefly/edx-platform,sameetb-cuelogic/edx-platform-test,ferabra/edx-platform,msegado/edx-platform,Kalyzee/edx-platform,SivilTaram/edx-platform,cselis86/edx-platform,zhenzhai/edx-platform,tanmaykm/edx-platform,analyseuc3m/ANALYSE-v1,benpatterson/edx-platform,valtech-mooc/edx-platform,JioEducation/edx-platform,andyzsf/edx,mjirayu/sit_academy,jazkarta/edx-platform-for-isc,polimediaupv/edx-platform,prarthitm/edxplatform,adoosii/edx-platform,ampax/edx-platform,bdero/edx-platform,franosincic/edx-platform,appliedx/edx-platform,don-github/edx-platform,franosincic/edx-platform,gsehub/edx-platform,iivic/BoiseStateX,mcgachey/edx-platform,rismalrv/edx-platform,msegado/edx-platform,ESOedX/edx-platform,shurihell/testasia,B-MOOC/edx-platform,inares/edx-platform,nagyistoce/edx-platform,jamesblunt/edx-platform,xinjiguaike/edx-platform,chauhanhardik/populo_2,UOMx/edx-platform,dsajkl/123,olexiim/edx-platform,motion2015/edx-platform,shabab12/edx-platform,atsolakid/edx-platform,hamzehd/edx-platform,carsongee/edx-platform,simbs/edx-platform,jazztpt/edx-platform,DefyVentures/edx-platform,EDUlib/edx-platform,fly19890211/edx-platform,kmoocdev2/edx-platform,olexiim/edx-platform,amir-qayyum-khan/edx-platform,ampax/edx-platform-backup,nttks/edx-platform,jazkarta/edx-platform,TeachAtTUM/edx-platform,beacloudgenius/edx-platform,itsjeyd/edx-platform,halvertoluke/edx-platform,ovnicraft/edx-platform,don-github/edx-platform,amir-qayyum-khan/edx-platform,J861449197/edx-platform,mtlchun/edx,raccoongang/edx-platform,shabab12/edx-platform,ovnicraft/edx-platform,defance/edx-platform,pelikanchik/edx-platform,jelugbo/tundex,ovnicraft/edx-platform,proversity-org/edx-platform,a-parhom/edx-platform,alexthered/kienhoc-platform,ZLLab-Mooc/edx-platform,rhndg/openedx,mcgachey/edx-platform,zadgroup/edx-platform,nanolearning/edx-platform,ferabra/edx-platform,atsolakid/edx-platform,longmen21/edx-platform,defance/edx-platform,lduarte1991/edx-platform,CourseTalk/edx-platform,dsajkl/123,etzhou/edx-platform,mitocw/edx-platform,rue89-tech/edx-platform,chauhanhardik/populo,vasyarv/edx-platform,edx/edx-platform,Softmotions/edx-platform,angelapper/edx-platform,tanmaykm/edx-platform,DNFcode/edx-platform,pabloborrego93/edx-platform,mitocw/edx-platform,motion2015/edx-platform,doismellburning/edx-platform,zubair-arbi/edx-platform,nanolearningllc/edx-platform-cypress,Semi-global/edx-platform,LICEF/edx-platform,chand3040/cloud_that,naresh21/synergetics-edx-platform,arifsetiawan/edx-platform,wwj718/ANALYSE,UXE/local-edx,mtlchun/edx,mahendra-r/edx-platform,polimediaupv/edx-platform,Edraak/edraak-platform,RPI-OPENEDX/edx-platform,devs1991/test_edx_docmode,kmoocdev/edx-platform,halvertoluke/edx-platform,IONISx/edx-platform,ahmadiga/min_edx,waheedahmed/edx-platform,stvstnfrd/edx-platform,Shrhawk/edx-platform,appliedx/edx-platform,ahmadio/edx-platform,Semi-global/edx-platform,ESOedX/edx-platform,jbzdak/edx-platform,nagyistoce/edx-platform,J861449197/edx-platform,jelugbo/tundex,wwj718/ANALYSE,halvertoluke/edx-platform,mbareta/edx-platform-ft,cecep-edu/edx-platform,kursitet/edx-platform,leansoft/edx-platform,antonve/s4-project-mooc,xinjiguaike/edx-platform,y12uc231/edx-platform,jruiperezv/ANALYSE,jonathan-beard/edx-platform,vismartltd/edx-platform,shabab12/edx-platform,miptliot/edx-platform,adoosii/edx-platform,dsajkl/reqiop,appsembler/edx-platform,peterm-itr/edx-platform,edry/edx-platform,carsongee/edx-platform,antoviaque/edx-platform,marcore/edx-platform,nanolearning/edx-platform,pabloborrego93/edx-platform,inares/edx-platform,a-parhom/edx-platform,ahmadio/edx-platform,mitocw/edx-platform,shubhdev/edxOnBaadal,morenopc/edx-platform,MSOpenTech/edx-platform,hamzehd/edx-platform,ahmadiga/min_edx,doismellburning/edx-platform,nagyistoce/edx-platform,kursitet/edx-platform,motion2015/a3,philanthropy-u/edx-platform,raccoongang/edx-platform,fintech-circle/edx-platform,kamalx/edx-platform,romain-li/edx-platform,sameetb-cuelogic/edx-platform-test,mitocw/edx-platform,WatanabeYasumasa/edx-platform,xingyepei/edx-platform,knehez/edx-platform,mjg2203/edx-platform-seas,xuxiao19910803/edx-platform,appliedx/edx-platform,dkarakats/edx-platform,kursitet/edx-platform,beni55/edx-platform,iivic/BoiseStateX,jzoldak/edx-platform,nikolas/edx-platform,eestay/edx-platform,SivilTaram/edx-platform,chauhanhardik/populo,hkawasaki/kawasaki-aio8-1,Livit/Livit.Learn.EdX,EDUlib/edx-platform,Unow/edx-platform,pomegranited/edx-platform,teltek/edx-platform,UXE/local-edx,ahmadio/edx-platform,ovnicraft/edx-platform,cognitiveclass/edx-platform,SivilTaram/edx-platform,hastexo/edx-platform,fintech-circle/edx-platform,jamiefolsom/edx-platform,shubhdev/edx-platform,caesar2164/edx-platform,eemirtekin/edx-platform,devs1991/test_edx_docmode,antonve/s4-project-mooc,eduNEXT/edx-platform,marcore/edx-platform,nttks/jenkins-test,solashirai/edx-platform,jjmiranda/edx-platform,halvertoluke/edx-platform,rhndg/openedx,solashirai/edx-platform,jswope00/griffinx,jonathan-beard/edx-platform,CredoReference/edx-platform,cognitiveclass/edx-platform,hmcmooc/muddx-platform,dcosentino/edx-platform,Semi-global/edx-platform,shubhdev/openedx,deepsrijit1105/edx-platform,hkawasaki/kawasaki-aio8-0,fly19890211/edx-platform,benpatterson/edx-platform,mcgachey/edx-platform,lduarte1991/edx-platform,MakeHer/edx-platform,adoosii/edx-platform,ahmedaljazzar/edx-platform,teltek/edx-platform,Lektorium-LLC/edx-platform,vasyarv/edx-platform,zerobatu/edx-platform,SravanthiSinha/edx-platform,morenopc/edx-platform,eduNEXT/edx-platform,ZLLab-Mooc/edx-platform,SivilTaram/edx-platform,MSOpenTech/edx-platform,BehavioralInsightsTeam/edx-platform,MakeHer/edx-platform,jswope00/GAI,deepsrijit1105/edx-platform,UOMx/edx-platform,CredoReference/edx-platform,longmen21/edx-platform,Edraak/edraak-platform,xingyepei/edx-platform,vikas1885/test1,torchingloom/edx-platform,abdoosh00/edraak,ZLLab-Mooc/edx-platform,kxliugang/edx-platform,doganov/edx-platform,playm2mboy/edx-platform,ESOedX/edx-platform,wwj718/edx-platform,valtech-mooc/edx-platform,chauhanhardik/populo_2,jamiefolsom/edx-platform,kmoocdev/edx-platform,Shrhawk/edx-platform,Softmotions/edx-platform,edry/edx-platform,zadgroup/edx-platform,etzhou/edx-platform,zhenzhai/edx-platform,auferack08/edx-platform,rue89-tech/edx-platform,WatanabeYasumasa/edx-platform,nttks/edx-platform,yokose-ks/edx-platform,raccoongang/edx-platform,utecuy/edx-platform,arifsetiawan/edx-platform,SravanthiSinha/edx-platform,shashank971/edx-platform,shubhdev/edxOnBaadal,playm2mboy/edx-platform,OmarIthawi/edx-platform,J861449197/edx-platform,inares/edx-platform,tiagochiavericosta/edx-platform,ahmadiga/min_edx,UXE/local-edx,shubhdev/edxOnBaadal,jzoldak/edx-platform,wwj718/ANALYSE,kmoocdev/edx-platform,vismartltd/edx-platform,ahmadiga/min_edx
|
Add a command to transfer students from one course to another.
Move only active students and also update cert item for verified students.
|
from optparse import make_option
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from student.models import CourseEnrollment
from shoppingcart.models import CertificateItem
class Command(BaseCommand):
help = """
This command takes two course ids as input and transfers
all students enrolled in one course into the other. This will
remove them from the first class and enroll them in the second
class in the same mode as the first one. eg. honor, verified,
audit.
example:
# Transfer students from the old demoX class to a new one.
manage.py ... transfer_students -f edX/Open_DemoX/edx_demo_course -t edX/Open_DemoX/new_demoX
"""
option_list = BaseCommand.option_list + (
make_option('-f', '--from',
metavar='SOURCE_COURSE',
dest='source_course',
help='The course to transfer students from.'),
make_option('-t', '--to',
metavar='DEST_COURSE',
dest='dest_course',
help='The new course to enroll the student into.'),
)
def handle(self, *args, **options):
source = options['source_course']
dest = options['dest_course']
source_students = User.objects.filter(
courseenrollment__course_id=source,
courseenrollment__is_active=True)
for user in source_students:
print("Moving {}.".format(user.username))
# Find the old enrollment.
enrollment = CourseEnrollment.objects.get(user=user,
course_id=source)
# Move the Student between the classes.
mode = enrollment.mode
CourseEnrollment.unenroll(user,source)
CourseEnrollment.enroll(user, dest, mode=mode)
if mode == 'verified':
try:
certificate_item = CertificateItem.objects.get(
course_id=source,
course_enrollment=enrollment)
except CertificateItem.DoesNotExist:
print("No certificate for {}".format(user))
continue
new_enrollment = CourseEnrollment.objects.get(user=user,
course_id=dest)
certificate_item.course_id = dest
certificate_item.course_enrollment = new_enrollment
certificate_item.save()
|
<commit_before><commit_msg>Add a command to transfer students from one course to another.
Move only active students and also update cert item for verified students.<commit_after>
|
from optparse import make_option
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from student.models import CourseEnrollment
from shoppingcart.models import CertificateItem
class Command(BaseCommand):
help = """
This command takes two course ids as input and transfers
all students enrolled in one course into the other. This will
remove them from the first class and enroll them in the second
class in the same mode as the first one. eg. honor, verified,
audit.
example:
# Transfer students from the old demoX class to a new one.
manage.py ... transfer_students -f edX/Open_DemoX/edx_demo_course -t edX/Open_DemoX/new_demoX
"""
option_list = BaseCommand.option_list + (
make_option('-f', '--from',
metavar='SOURCE_COURSE',
dest='source_course',
help='The course to transfer students from.'),
make_option('-t', '--to',
metavar='DEST_COURSE',
dest='dest_course',
help='The new course to enroll the student into.'),
)
def handle(self, *args, **options):
source = options['source_course']
dest = options['dest_course']
source_students = User.objects.filter(
courseenrollment__course_id=source,
courseenrollment__is_active=True)
for user in source_students:
print("Moving {}.".format(user.username))
# Find the old enrollment.
enrollment = CourseEnrollment.objects.get(user=user,
course_id=source)
# Move the Student between the classes.
mode = enrollment.mode
CourseEnrollment.unenroll(user,source)
CourseEnrollment.enroll(user, dest, mode=mode)
if mode == 'verified':
try:
certificate_item = CertificateItem.objects.get(
course_id=source,
course_enrollment=enrollment)
except CertificateItem.DoesNotExist:
print("No certificate for {}".format(user))
continue
new_enrollment = CourseEnrollment.objects.get(user=user,
course_id=dest)
certificate_item.course_id = dest
certificate_item.course_enrollment = new_enrollment
certificate_item.save()
|
Add a command to transfer students from one course to another.
Move only active students and also update cert item for verified students.from optparse import make_option
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from student.models import CourseEnrollment
from shoppingcart.models import CertificateItem
class Command(BaseCommand):
help = """
This command takes two course ids as input and transfers
all students enrolled in one course into the other. This will
remove them from the first class and enroll them in the second
class in the same mode as the first one. eg. honor, verified,
audit.
example:
# Transfer students from the old demoX class to a new one.
manage.py ... transfer_students -f edX/Open_DemoX/edx_demo_course -t edX/Open_DemoX/new_demoX
"""
option_list = BaseCommand.option_list + (
make_option('-f', '--from',
metavar='SOURCE_COURSE',
dest='source_course',
help='The course to transfer students from.'),
make_option('-t', '--to',
metavar='DEST_COURSE',
dest='dest_course',
help='The new course to enroll the student into.'),
)
def handle(self, *args, **options):
source = options['source_course']
dest = options['dest_course']
source_students = User.objects.filter(
courseenrollment__course_id=source,
courseenrollment__is_active=True)
for user in source_students:
print("Moving {}.".format(user.username))
# Find the old enrollment.
enrollment = CourseEnrollment.objects.get(user=user,
course_id=source)
# Move the Student between the classes.
mode = enrollment.mode
CourseEnrollment.unenroll(user,source)
CourseEnrollment.enroll(user, dest, mode=mode)
if mode == 'verified':
try:
certificate_item = CertificateItem.objects.get(
course_id=source,
course_enrollment=enrollment)
except CertificateItem.DoesNotExist:
print("No certificate for {}".format(user))
continue
new_enrollment = CourseEnrollment.objects.get(user=user,
course_id=dest)
certificate_item.course_id = dest
certificate_item.course_enrollment = new_enrollment
certificate_item.save()
|
<commit_before><commit_msg>Add a command to transfer students from one course to another.
Move only active students and also update cert item for verified students.<commit_after>from optparse import make_option
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from student.models import CourseEnrollment
from shoppingcart.models import CertificateItem
class Command(BaseCommand):
help = """
This command takes two course ids as input and transfers
all students enrolled in one course into the other. This will
remove them from the first class and enroll them in the second
class in the same mode as the first one. eg. honor, verified,
audit.
example:
# Transfer students from the old demoX class to a new one.
manage.py ... transfer_students -f edX/Open_DemoX/edx_demo_course -t edX/Open_DemoX/new_demoX
"""
option_list = BaseCommand.option_list + (
make_option('-f', '--from',
metavar='SOURCE_COURSE',
dest='source_course',
help='The course to transfer students from.'),
make_option('-t', '--to',
metavar='DEST_COURSE',
dest='dest_course',
help='The new course to enroll the student into.'),
)
def handle(self, *args, **options):
source = options['source_course']
dest = options['dest_course']
source_students = User.objects.filter(
courseenrollment__course_id=source,
courseenrollment__is_active=True)
for user in source_students:
print("Moving {}.".format(user.username))
# Find the old enrollment.
enrollment = CourseEnrollment.objects.get(user=user,
course_id=source)
# Move the Student between the classes.
mode = enrollment.mode
CourseEnrollment.unenroll(user,source)
CourseEnrollment.enroll(user, dest, mode=mode)
if mode == 'verified':
try:
certificate_item = CertificateItem.objects.get(
course_id=source,
course_enrollment=enrollment)
except CertificateItem.DoesNotExist:
print("No certificate for {}".format(user))
continue
new_enrollment = CourseEnrollment.objects.get(user=user,
course_id=dest)
certificate_item.course_id = dest
certificate_item.course_enrollment = new_enrollment
certificate_item.save()
|
|
ccdcb20abc5f0f33ab60d4fa976a8b4bd856bb0e
|
datascripts/get_PHX_city.py
|
datascripts/get_PHX_city.py
|
import json
import pymysql
def GetReviewsByID(business_id):
conn = pymysql.Connect(host='localhost', user='root', passwd='',charset='utf8', db='yelpdb')
cursor = conn.cursor()
cursor.execute( "SELECT stars, date FROM review WHERE business_id = %s", business_id )
conn.commit()
result = cursor.fetchall()
return result
def GetAZResturants():
conn = pymysql.Connect(host='localhost', user='root', passwd='',charset='utf8', db='yelpdb')
cursor = conn.cursor()
cursor.execute( "SELECT business_id, name, latitude, longitude, stars FROM business where city='Phoenix' and review_count > 5" )
conn.commit()
results = cursor.fetchall()
conn.close()
return results
if __name__ == '__main__':
r = GetAZResturants()
ids = []
finalResults = {}
for i in r:
finalResults[i[0]] = GetReviewsByID(i[0])
f = open('az100.json','w')
f.write(json.dumps(r, sort_keys=True, indent=4))
f.close()
f = open('reviews.json','w')
f.write(json.dumps(finalResults, sort_keys=True, indent=4))
f.close()
|
Add script to get all Phoenix City data
|
Add script to get all Phoenix City data
|
Python
|
mit
|
zhewang/restauranthunter,zhewang/restauranthunter,zhewang/restauranthunter
|
Add script to get all Phoenix City data
|
import json
import pymysql
def GetReviewsByID(business_id):
conn = pymysql.Connect(host='localhost', user='root', passwd='',charset='utf8', db='yelpdb')
cursor = conn.cursor()
cursor.execute( "SELECT stars, date FROM review WHERE business_id = %s", business_id )
conn.commit()
result = cursor.fetchall()
return result
def GetAZResturants():
conn = pymysql.Connect(host='localhost', user='root', passwd='',charset='utf8', db='yelpdb')
cursor = conn.cursor()
cursor.execute( "SELECT business_id, name, latitude, longitude, stars FROM business where city='Phoenix' and review_count > 5" )
conn.commit()
results = cursor.fetchall()
conn.close()
return results
if __name__ == '__main__':
r = GetAZResturants()
ids = []
finalResults = {}
for i in r:
finalResults[i[0]] = GetReviewsByID(i[0])
f = open('az100.json','w')
f.write(json.dumps(r, sort_keys=True, indent=4))
f.close()
f = open('reviews.json','w')
f.write(json.dumps(finalResults, sort_keys=True, indent=4))
f.close()
|
<commit_before><commit_msg>Add script to get all Phoenix City data<commit_after>
|
import json
import pymysql
def GetReviewsByID(business_id):
conn = pymysql.Connect(host='localhost', user='root', passwd='',charset='utf8', db='yelpdb')
cursor = conn.cursor()
cursor.execute( "SELECT stars, date FROM review WHERE business_id = %s", business_id )
conn.commit()
result = cursor.fetchall()
return result
def GetAZResturants():
conn = pymysql.Connect(host='localhost', user='root', passwd='',charset='utf8', db='yelpdb')
cursor = conn.cursor()
cursor.execute( "SELECT business_id, name, latitude, longitude, stars FROM business where city='Phoenix' and review_count > 5" )
conn.commit()
results = cursor.fetchall()
conn.close()
return results
if __name__ == '__main__':
r = GetAZResturants()
ids = []
finalResults = {}
for i in r:
finalResults[i[0]] = GetReviewsByID(i[0])
f = open('az100.json','w')
f.write(json.dumps(r, sort_keys=True, indent=4))
f.close()
f = open('reviews.json','w')
f.write(json.dumps(finalResults, sort_keys=True, indent=4))
f.close()
|
Add script to get all Phoenix City dataimport json
import pymysql
def GetReviewsByID(business_id):
conn = pymysql.Connect(host='localhost', user='root', passwd='',charset='utf8', db='yelpdb')
cursor = conn.cursor()
cursor.execute( "SELECT stars, date FROM review WHERE business_id = %s", business_id )
conn.commit()
result = cursor.fetchall()
return result
def GetAZResturants():
conn = pymysql.Connect(host='localhost', user='root', passwd='',charset='utf8', db='yelpdb')
cursor = conn.cursor()
cursor.execute( "SELECT business_id, name, latitude, longitude, stars FROM business where city='Phoenix' and review_count > 5" )
conn.commit()
results = cursor.fetchall()
conn.close()
return results
if __name__ == '__main__':
r = GetAZResturants()
ids = []
finalResults = {}
for i in r:
finalResults[i[0]] = GetReviewsByID(i[0])
f = open('az100.json','w')
f.write(json.dumps(r, sort_keys=True, indent=4))
f.close()
f = open('reviews.json','w')
f.write(json.dumps(finalResults, sort_keys=True, indent=4))
f.close()
|
<commit_before><commit_msg>Add script to get all Phoenix City data<commit_after>import json
import pymysql
def GetReviewsByID(business_id):
conn = pymysql.Connect(host='localhost', user='root', passwd='',charset='utf8', db='yelpdb')
cursor = conn.cursor()
cursor.execute( "SELECT stars, date FROM review WHERE business_id = %s", business_id )
conn.commit()
result = cursor.fetchall()
return result
def GetAZResturants():
conn = pymysql.Connect(host='localhost', user='root', passwd='',charset='utf8', db='yelpdb')
cursor = conn.cursor()
cursor.execute( "SELECT business_id, name, latitude, longitude, stars FROM business where city='Phoenix' and review_count > 5" )
conn.commit()
results = cursor.fetchall()
conn.close()
return results
if __name__ == '__main__':
r = GetAZResturants()
ids = []
finalResults = {}
for i in r:
finalResults[i[0]] = GetReviewsByID(i[0])
f = open('az100.json','w')
f.write(json.dumps(r, sort_keys=True, indent=4))
f.close()
f = open('reviews.json','w')
f.write(json.dumps(finalResults, sort_keys=True, indent=4))
f.close()
|
|
110ce926fa0e460640f3e674623475144f847dbe
|
forge.py
|
forge.py
|
# Generate known SiRF binary data for testing purposes
import sys, struct, math
def nul(n): return chr(0) * n
def frame(mid, body):
payload = chr(mid) + body
n = len(payload)
assert(n < (1 << 11))
xsum = sum(ord(b) for b in payload) & 0xFFFF
header = '\xA0\xA2' + chr(n >> 8) + chr(n & 0xFF)
footer = chr(xsum >> 8) + chr(xsum & 0xFF) + '\xB0\xB3'
return header + payload + footer
# Parameters are standard Python floats
def mid41(lat, lon):
ilat = int(lat * 1e7)
ilon = int(lon * 1e7)
coord_bytes = struct.pack('>ii', ilat, ilon)
msg = nul(20) + coord_bytes + nul(90 - 28)
return frame(41, msg)
def mid66(pdop, hdop, vdop):
data_bytes = struct.pack('>HHH', pdop, hdop, vdop)
msg = nul(6) + data_bytes + nul(2)
return frame(66, msg)
if __name__ == '__main__':
out = sys.stdout
for n in range(30):
lat = n * math.pi / 30
lon = -lat
out.write( mid41(lat, lon) )
out.write( mid66(n, n + 1, n + 5) )
out.write( mid41(200.5, 100.5) )
out.flush()
|
Add script to generate SiRF data for testing
|
Add script to generate SiRF data for testing
|
Python
|
apache-2.0
|
tryan/sirf,tryan/sirf
|
Add script to generate SiRF data for testing
|
# Generate known SiRF binary data for testing purposes
import sys, struct, math
def nul(n): return chr(0) * n
def frame(mid, body):
payload = chr(mid) + body
n = len(payload)
assert(n < (1 << 11))
xsum = sum(ord(b) for b in payload) & 0xFFFF
header = '\xA0\xA2' + chr(n >> 8) + chr(n & 0xFF)
footer = chr(xsum >> 8) + chr(xsum & 0xFF) + '\xB0\xB3'
return header + payload + footer
# Parameters are standard Python floats
def mid41(lat, lon):
ilat = int(lat * 1e7)
ilon = int(lon * 1e7)
coord_bytes = struct.pack('>ii', ilat, ilon)
msg = nul(20) + coord_bytes + nul(90 - 28)
return frame(41, msg)
def mid66(pdop, hdop, vdop):
data_bytes = struct.pack('>HHH', pdop, hdop, vdop)
msg = nul(6) + data_bytes + nul(2)
return frame(66, msg)
if __name__ == '__main__':
out = sys.stdout
for n in range(30):
lat = n * math.pi / 30
lon = -lat
out.write( mid41(lat, lon) )
out.write( mid66(n, n + 1, n + 5) )
out.write( mid41(200.5, 100.5) )
out.flush()
|
<commit_before><commit_msg>Add script to generate SiRF data for testing<commit_after>
|
# Generate known SiRF binary data for testing purposes
import sys, struct, math
def nul(n): return chr(0) * n
def frame(mid, body):
payload = chr(mid) + body
n = len(payload)
assert(n < (1 << 11))
xsum = sum(ord(b) for b in payload) & 0xFFFF
header = '\xA0\xA2' + chr(n >> 8) + chr(n & 0xFF)
footer = chr(xsum >> 8) + chr(xsum & 0xFF) + '\xB0\xB3'
return header + payload + footer
# Parameters are standard Python floats
def mid41(lat, lon):
ilat = int(lat * 1e7)
ilon = int(lon * 1e7)
coord_bytes = struct.pack('>ii', ilat, ilon)
msg = nul(20) + coord_bytes + nul(90 - 28)
return frame(41, msg)
def mid66(pdop, hdop, vdop):
data_bytes = struct.pack('>HHH', pdop, hdop, vdop)
msg = nul(6) + data_bytes + nul(2)
return frame(66, msg)
if __name__ == '__main__':
out = sys.stdout
for n in range(30):
lat = n * math.pi / 30
lon = -lat
out.write( mid41(lat, lon) )
out.write( mid66(n, n + 1, n + 5) )
out.write( mid41(200.5, 100.5) )
out.flush()
|
Add script to generate SiRF data for testing# Generate known SiRF binary data for testing purposes
import sys, struct, math
def nul(n): return chr(0) * n
def frame(mid, body):
payload = chr(mid) + body
n = len(payload)
assert(n < (1 << 11))
xsum = sum(ord(b) for b in payload) & 0xFFFF
header = '\xA0\xA2' + chr(n >> 8) + chr(n & 0xFF)
footer = chr(xsum >> 8) + chr(xsum & 0xFF) + '\xB0\xB3'
return header + payload + footer
# Parameters are standard Python floats
def mid41(lat, lon):
ilat = int(lat * 1e7)
ilon = int(lon * 1e7)
coord_bytes = struct.pack('>ii', ilat, ilon)
msg = nul(20) + coord_bytes + nul(90 - 28)
return frame(41, msg)
def mid66(pdop, hdop, vdop):
data_bytes = struct.pack('>HHH', pdop, hdop, vdop)
msg = nul(6) + data_bytes + nul(2)
return frame(66, msg)
if __name__ == '__main__':
out = sys.stdout
for n in range(30):
lat = n * math.pi / 30
lon = -lat
out.write( mid41(lat, lon) )
out.write( mid66(n, n + 1, n + 5) )
out.write( mid41(200.5, 100.5) )
out.flush()
|
<commit_before><commit_msg>Add script to generate SiRF data for testing<commit_after># Generate known SiRF binary data for testing purposes
import sys, struct, math
def nul(n): return chr(0) * n
def frame(mid, body):
payload = chr(mid) + body
n = len(payload)
assert(n < (1 << 11))
xsum = sum(ord(b) for b in payload) & 0xFFFF
header = '\xA0\xA2' + chr(n >> 8) + chr(n & 0xFF)
footer = chr(xsum >> 8) + chr(xsum & 0xFF) + '\xB0\xB3'
return header + payload + footer
# Parameters are standard Python floats
def mid41(lat, lon):
ilat = int(lat * 1e7)
ilon = int(lon * 1e7)
coord_bytes = struct.pack('>ii', ilat, ilon)
msg = nul(20) + coord_bytes + nul(90 - 28)
return frame(41, msg)
def mid66(pdop, hdop, vdop):
data_bytes = struct.pack('>HHH', pdop, hdop, vdop)
msg = nul(6) + data_bytes + nul(2)
return frame(66, msg)
if __name__ == '__main__':
out = sys.stdout
for n in range(30):
lat = n * math.pi / 30
lon = -lat
out.write( mid41(lat, lon) )
out.write( mid66(n, n + 1, n + 5) )
out.write( mid41(200.5, 100.5) )
out.flush()
|
|
df5ba98be707df7c3ec48fae986cf702b9d5a66e
|
bayespy/inference/vmp/nodes/tests/test_gaussian.py
|
bayespy/inference/vmp/nodes/tests/test_gaussian.py
|
######################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Unit tests for `gaussian` module.
"""
import unittest
import numpy as np
import scipy
from numpy import testing
from ..gaussian import GaussianArrayARD#, Gaussian
#from ..normal import Normal
from ...vmp import VB
from bayespy.utils import utils
from bayespy.utils import linalg
from bayespy.utils import random
from bayespy.utils.utils import TestCase
class TestGaussianArrayARD(TestCase):
def test_parent_validity(self):
"""
Test that the parent nodes are validated properly in the constructor
"""
# Create from constant parents
GaussianArrayARD(0,
1)
GaussianArrayARD(np.ones((2,)),
np.ones((2,)))
|
Add a simple test for Gaussian arrays node (GaussianArrayARD).
|
TST: Add a simple test for Gaussian arrays node (GaussianArrayARD).
|
Python
|
mit
|
bayespy/bayespy,fivejjs/bayespy,jluttine/bayespy,SalemAmeen/bayespy
|
TST: Add a simple test for Gaussian arrays node (GaussianArrayARD).
|
######################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Unit tests for `gaussian` module.
"""
import unittest
import numpy as np
import scipy
from numpy import testing
from ..gaussian import GaussianArrayARD#, Gaussian
#from ..normal import Normal
from ...vmp import VB
from bayespy.utils import utils
from bayespy.utils import linalg
from bayespy.utils import random
from bayespy.utils.utils import TestCase
class TestGaussianArrayARD(TestCase):
def test_parent_validity(self):
"""
Test that the parent nodes are validated properly in the constructor
"""
# Create from constant parents
GaussianArrayARD(0,
1)
GaussianArrayARD(np.ones((2,)),
np.ones((2,)))
|
<commit_before><commit_msg>TST: Add a simple test for Gaussian arrays node (GaussianArrayARD).<commit_after>
|
######################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Unit tests for `gaussian` module.
"""
import unittest
import numpy as np
import scipy
from numpy import testing
from ..gaussian import GaussianArrayARD#, Gaussian
#from ..normal import Normal
from ...vmp import VB
from bayespy.utils import utils
from bayespy.utils import linalg
from bayespy.utils import random
from bayespy.utils.utils import TestCase
class TestGaussianArrayARD(TestCase):
def test_parent_validity(self):
"""
Test that the parent nodes are validated properly in the constructor
"""
# Create from constant parents
GaussianArrayARD(0,
1)
GaussianArrayARD(np.ones((2,)),
np.ones((2,)))
|
TST: Add a simple test for Gaussian arrays node (GaussianArrayARD).######################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Unit tests for `gaussian` module.
"""
import unittest
import numpy as np
import scipy
from numpy import testing
from ..gaussian import GaussianArrayARD#, Gaussian
#from ..normal import Normal
from ...vmp import VB
from bayespy.utils import utils
from bayespy.utils import linalg
from bayespy.utils import random
from bayespy.utils.utils import TestCase
class TestGaussianArrayARD(TestCase):
def test_parent_validity(self):
"""
Test that the parent nodes are validated properly in the constructor
"""
# Create from constant parents
GaussianArrayARD(0,
1)
GaussianArrayARD(np.ones((2,)),
np.ones((2,)))
|
<commit_before><commit_msg>TST: Add a simple test for Gaussian arrays node (GaussianArrayARD).<commit_after>######################################################################
# Copyright (C) 2013 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Unit tests for `gaussian` module.
"""
import unittest
import numpy as np
import scipy
from numpy import testing
from ..gaussian import GaussianArrayARD#, Gaussian
#from ..normal import Normal
from ...vmp import VB
from bayespy.utils import utils
from bayespy.utils import linalg
from bayespy.utils import random
from bayespy.utils.utils import TestCase
class TestGaussianArrayARD(TestCase):
def test_parent_validity(self):
"""
Test that the parent nodes are validated properly in the constructor
"""
# Create from constant parents
GaussianArrayARD(0,
1)
GaussianArrayARD(np.ones((2,)),
np.ones((2,)))
|
|
23a70b0c91aa0438027efb1907cd56eae008c0f6
|
comics/comics/kalscartoon.py
|
comics/comics/kalscartoon.py
|
from dateutil.parser import parse
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0]
if pub_date != parse(date.text_content()).date():
continue
anchor = blockdate.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
|
Add crawler for KAL's cartoon
|
Add crawler for KAL's cartoon
|
Python
|
agpl-3.0
|
jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,klette/comics,klette/comics,datagutten/comics,jodal/comics,klette/comics
|
Add crawler for KAL's cartoon
|
from dateutil.parser import parse
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0]
if pub_date != parse(date.text_content()).date():
continue
anchor = blockdate.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
|
<commit_before><commit_msg>Add crawler for KAL's cartoon<commit_after>
|
from dateutil.parser import parse
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0]
if pub_date != parse(date.text_content()).date():
continue
anchor = blockdate.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
|
Add crawler for KAL's cartoonfrom dateutil.parser import parse
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0]
if pub_date != parse(date.text_content()).date():
continue
anchor = blockdate.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
|
<commit_before><commit_msg>Add crawler for KAL's cartoon<commit_after>from dateutil.parser import parse
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = "KAL's Cartoon"
language = 'en'
url = 'http://www.economist.com'
start_date = '2006-01-05'
rights = 'Kevin Kallaugher'
class Crawler(CrawlerBase):
history_capable_days = 1000
schedule = 'Th'
def crawl(self, pub_date):
article_list = self.parse_page('http://www.economist.com/research/articlesBySubject/display.cfm?id=8717275&startRow=1&endrow=500')
article_list.remove('.web-only')
for block in article_list.root.cssselect('.article-list .block'):
date = block.cssselect('.date')[0]
if pub_date != parse(date.text_content()).date():
continue
anchor = blockdate.cssselect('h2 a')[0]
if "KAL's cartoon" not in anchor.text_content():
continue
page = self.parse_page(anchor.get('href'))
return CrawlerResult(page.src('.content-image-full img'))
|
|
6679bc398dc65b161895a1d1c5556eca38c398ba
|
mozillians/users/migrations/0040_auto_20180920_0954.py
|
mozillians/users/migrations/0040_auto_20180920_0954.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-09-20 16:54
from __future__ import unicode_literals
from django.db import migrations
def set_primary_auth0_user_id(apps, schema_editor):
IdpProfile = apps.get_model('users', 'IdpProfile')
for idp in IdpProfile.objects.all():
if idp.primary:
idp.profile.auth0_user_id = idp.auth0_user_id
idp.profile.save()
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('users', '0039_userprofile_auth0_user_id'),
]
operations = [
migrations.RunPython(set_primary_auth0_user_id, backwards),
]
|
Migrate Auth0 data from Idp to UserProfiles.
|
Migrate Auth0 data from Idp to UserProfiles.
|
Python
|
bsd-3-clause
|
mozilla/mozillians,akatsoulas/mozillians,mozilla/mozillians,mozilla/mozillians,mozilla/mozillians,akatsoulas/mozillians,akatsoulas/mozillians,akatsoulas/mozillians
|
Migrate Auth0 data from Idp to UserProfiles.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-09-20 16:54
from __future__ import unicode_literals
from django.db import migrations
def set_primary_auth0_user_id(apps, schema_editor):
IdpProfile = apps.get_model('users', 'IdpProfile')
for idp in IdpProfile.objects.all():
if idp.primary:
idp.profile.auth0_user_id = idp.auth0_user_id
idp.profile.save()
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('users', '0039_userprofile_auth0_user_id'),
]
operations = [
migrations.RunPython(set_primary_auth0_user_id, backwards),
]
|
<commit_before><commit_msg>Migrate Auth0 data from Idp to UserProfiles.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-09-20 16:54
from __future__ import unicode_literals
from django.db import migrations
def set_primary_auth0_user_id(apps, schema_editor):
IdpProfile = apps.get_model('users', 'IdpProfile')
for idp in IdpProfile.objects.all():
if idp.primary:
idp.profile.auth0_user_id = idp.auth0_user_id
idp.profile.save()
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('users', '0039_userprofile_auth0_user_id'),
]
operations = [
migrations.RunPython(set_primary_auth0_user_id, backwards),
]
|
Migrate Auth0 data from Idp to UserProfiles.# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-09-20 16:54
from __future__ import unicode_literals
from django.db import migrations
def set_primary_auth0_user_id(apps, schema_editor):
IdpProfile = apps.get_model('users', 'IdpProfile')
for idp in IdpProfile.objects.all():
if idp.primary:
idp.profile.auth0_user_id = idp.auth0_user_id
idp.profile.save()
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('users', '0039_userprofile_auth0_user_id'),
]
operations = [
migrations.RunPython(set_primary_auth0_user_id, backwards),
]
|
<commit_before><commit_msg>Migrate Auth0 data from Idp to UserProfiles.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-09-20 16:54
from __future__ import unicode_literals
from django.db import migrations
def set_primary_auth0_user_id(apps, schema_editor):
IdpProfile = apps.get_model('users', 'IdpProfile')
for idp in IdpProfile.objects.all():
if idp.primary:
idp.profile.auth0_user_id = idp.auth0_user_id
idp.profile.save()
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('users', '0039_userprofile_auth0_user_id'),
]
operations = [
migrations.RunPython(set_primary_auth0_user_id, backwards),
]
|
|
fd8dcaa25fe2f9a14bfe64851554903cfb6976e1
|
pdc/apps/release/migrations/0007_add_release_variant_type_module.py
|
pdc/apps/release/migrations/0007_add_release_variant_type_module.py
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
from __future__ import unicode_literals
from django.db import models, migrations
def create_variant_type_module(apps, schema_editor):
VariantType = apps.get_model('release', 'VariantType')
VariantType.objects.create(name='module')
class Migration(migrations.Migration):
dependencies = [
('release', '0006_auto_20160512_0515'),
]
operations = [
migrations.RunPython(create_variant_type_module),
]
|
Add release variant type 'module'
|
Add release variant type 'module'
Signed-off-by: Nils Philippsen <ad3fa8d847df2b57853a376ad688e4be8041ecd4@redhat.com>
|
Python
|
mit
|
release-engineering/product-definition-center,product-definition-center/product-definition-center,product-definition-center/product-definition-center,product-definition-center/product-definition-center,product-definition-center/product-definition-center,release-engineering/product-definition-center,release-engineering/product-definition-center,release-engineering/product-definition-center
|
Add release variant type 'module'
Signed-off-by: Nils Philippsen <ad3fa8d847df2b57853a376ad688e4be8041ecd4@redhat.com>
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
from __future__ import unicode_literals
from django.db import models, migrations
def create_variant_type_module(apps, schema_editor):
VariantType = apps.get_model('release', 'VariantType')
VariantType.objects.create(name='module')
class Migration(migrations.Migration):
dependencies = [
('release', '0006_auto_20160512_0515'),
]
operations = [
migrations.RunPython(create_variant_type_module),
]
|
<commit_before><commit_msg>Add release variant type 'module'
Signed-off-by: Nils Philippsen <ad3fa8d847df2b57853a376ad688e4be8041ecd4@redhat.com><commit_after>
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
from __future__ import unicode_literals
from django.db import models, migrations
def create_variant_type_module(apps, schema_editor):
VariantType = apps.get_model('release', 'VariantType')
VariantType.objects.create(name='module')
class Migration(migrations.Migration):
dependencies = [
('release', '0006_auto_20160512_0515'),
]
operations = [
migrations.RunPython(create_variant_type_module),
]
|
Add release variant type 'module'
Signed-off-by: Nils Philippsen <ad3fa8d847df2b57853a376ad688e4be8041ecd4@redhat.com># -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
from __future__ import unicode_literals
from django.db import models, migrations
def create_variant_type_module(apps, schema_editor):
VariantType = apps.get_model('release', 'VariantType')
VariantType.objects.create(name='module')
class Migration(migrations.Migration):
dependencies = [
('release', '0006_auto_20160512_0515'),
]
operations = [
migrations.RunPython(create_variant_type_module),
]
|
<commit_before><commit_msg>Add release variant type 'module'
Signed-off-by: Nils Philippsen <ad3fa8d847df2b57853a376ad688e4be8041ecd4@redhat.com><commit_after># -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
from __future__ import unicode_literals
from django.db import models, migrations
def create_variant_type_module(apps, schema_editor):
VariantType = apps.get_model('release', 'VariantType')
VariantType.objects.create(name='module')
class Migration(migrations.Migration):
dependencies = [
('release', '0006_auto_20160512_0515'),
]
operations = [
migrations.RunPython(create_variant_type_module),
]
|
|
7229e8fa6bea9cf748d1e171d50aeaab69919922
|
misc/decode-mirax.py
|
misc/decode-mirax.py
|
#!/usr/bin/python
import struct, sys
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or int(possible_lineno) != possible_lineno:
print "%11d" % (n)
else:
print "%11d %10d" % (n, possible_lineno)
except:
pass
|
Add a little script to help figure out MIRAX
|
Add a little script to help figure out MIRAX
|
Python
|
lgpl-2.1
|
openslide/openslide,openslide/openslide,openslide/openslide,openslide/openslide
|
Add a little script to help figure out MIRAX
|
#!/usr/bin/python
import struct, sys
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or int(possible_lineno) != possible_lineno:
print "%11d" % (n)
else:
print "%11d %10d" % (n, possible_lineno)
except:
pass
|
<commit_before><commit_msg>Add a little script to help figure out MIRAX<commit_after>
|
#!/usr/bin/python
import struct, sys
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or int(possible_lineno) != possible_lineno:
print "%11d" % (n)
else:
print "%11d %10d" % (n, possible_lineno)
except:
pass
|
Add a little script to help figure out MIRAX#!/usr/bin/python
import struct, sys
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or int(possible_lineno) != possible_lineno:
print "%11d" % (n)
else:
print "%11d %10d" % (n, possible_lineno)
except:
pass
|
<commit_before><commit_msg>Add a little script to help figure out MIRAX<commit_after>#!/usr/bin/python
import struct, sys
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or int(possible_lineno) != possible_lineno:
print "%11d" % (n)
else:
print "%11d %10d" % (n, possible_lineno)
except:
pass
|
|
96280e7c1d6b8f705d6dfbc1fac3d379acbd0b04
|
utils.py
|
utils.py
|
import numpy as np
def overwrite_labels(y):
classes = np.unique(y)
y[y==classes[0]] = -1
y[y==classes[1]] = 1
return y
|
Add utility functions for tests
|
Add utility functions for tests
|
Python
|
mit
|
IshitaTakeshi/SCW
|
Add utility functions for tests
|
import numpy as np
def overwrite_labels(y):
classes = np.unique(y)
y[y==classes[0]] = -1
y[y==classes[1]] = 1
return y
|
<commit_before><commit_msg>Add utility functions for tests<commit_after>
|
import numpy as np
def overwrite_labels(y):
classes = np.unique(y)
y[y==classes[0]] = -1
y[y==classes[1]] = 1
return y
|
Add utility functions for testsimport numpy as np
def overwrite_labels(y):
classes = np.unique(y)
y[y==classes[0]] = -1
y[y==classes[1]] = 1
return y
|
<commit_before><commit_msg>Add utility functions for tests<commit_after>import numpy as np
def overwrite_labels(y):
classes = np.unique(y)
y[y==classes[0]] = -1
y[y==classes[1]] = 1
return y
|
|
8ddca17d5b0e4b8b683b596af2f99938d0977401
|
demo_tests.py
|
demo_tests.py
|
# This is an example unit test suite for creating an AI
from game import GameRunner, Board, Player
from demo import BattleshipsAI
from unittest import TestCase
class TestPlayer(Player):
""" Mock Player which records ships placed and shots taken """
def __init__(self):
super(TestPlayer, self).__init__()
self.placed_ships = []
self.shots_taken = 0
def place_ship(self, size, x, y, direction):
self.placed_ships.append(size)
super(TestPlayer, self).place_ship(size, x, y, direction)
def take_shot(self, x, y):
self.shots_taken += 1
class TestBattleshipsAI(TestCase):
def setUp(self):
self.ai = BattleshipsAI()
def test_places_all_ships(self):
""" Test that all ships are placed. """
player = TestPlayer()
self.ai.place_ships(player)
sorted_ships = sorted(player.placed_ships)
self.assertEqual([2, 3, 3, 4, 5], sorted_ships)
def test_takes_shot(self):
""" Test that the AI takes a shot. """
player = TestPlayer()
self.ai.take_shot(player)
self.assertEqual(1, player.shots_taken)
|
Add very simple demo unit tests
|
Add very simple demo unit tests
|
Python
|
mit
|
southampton-code-dojo/battleships,southampton-code-dojo/battleships,southampton-code-dojo/battleships,southampton-code-dojo/battleships
|
Add very simple demo unit tests
|
# This is an example unit test suite for creating an AI
from game import GameRunner, Board, Player
from demo import BattleshipsAI
from unittest import TestCase
class TestPlayer(Player):
""" Mock Player which records ships placed and shots taken """
def __init__(self):
super(TestPlayer, self).__init__()
self.placed_ships = []
self.shots_taken = 0
def place_ship(self, size, x, y, direction):
self.placed_ships.append(size)
super(TestPlayer, self).place_ship(size, x, y, direction)
def take_shot(self, x, y):
self.shots_taken += 1
class TestBattleshipsAI(TestCase):
def setUp(self):
self.ai = BattleshipsAI()
def test_places_all_ships(self):
""" Test that all ships are placed. """
player = TestPlayer()
self.ai.place_ships(player)
sorted_ships = sorted(player.placed_ships)
self.assertEqual([2, 3, 3, 4, 5], sorted_ships)
def test_takes_shot(self):
""" Test that the AI takes a shot. """
player = TestPlayer()
self.ai.take_shot(player)
self.assertEqual(1, player.shots_taken)
|
<commit_before><commit_msg>Add very simple demo unit tests<commit_after>
|
# This is an example unit test suite for creating an AI
from game import GameRunner, Board, Player
from demo import BattleshipsAI
from unittest import TestCase
class TestPlayer(Player):
""" Mock Player which records ships placed and shots taken """
def __init__(self):
super(TestPlayer, self).__init__()
self.placed_ships = []
self.shots_taken = 0
def place_ship(self, size, x, y, direction):
self.placed_ships.append(size)
super(TestPlayer, self).place_ship(size, x, y, direction)
def take_shot(self, x, y):
self.shots_taken += 1
class TestBattleshipsAI(TestCase):
def setUp(self):
self.ai = BattleshipsAI()
def test_places_all_ships(self):
""" Test that all ships are placed. """
player = TestPlayer()
self.ai.place_ships(player)
sorted_ships = sorted(player.placed_ships)
self.assertEqual([2, 3, 3, 4, 5], sorted_ships)
def test_takes_shot(self):
""" Test that the AI takes a shot. """
player = TestPlayer()
self.ai.take_shot(player)
self.assertEqual(1, player.shots_taken)
|
Add very simple demo unit tests# This is an example unit test suite for creating an AI
from game import GameRunner, Board, Player
from demo import BattleshipsAI
from unittest import TestCase
class TestPlayer(Player):
""" Mock Player which records ships placed and shots taken """
def __init__(self):
super(TestPlayer, self).__init__()
self.placed_ships = []
self.shots_taken = 0
def place_ship(self, size, x, y, direction):
self.placed_ships.append(size)
super(TestPlayer, self).place_ship(size, x, y, direction)
def take_shot(self, x, y):
self.shots_taken += 1
class TestBattleshipsAI(TestCase):
def setUp(self):
self.ai = BattleshipsAI()
def test_places_all_ships(self):
""" Test that all ships are placed. """
player = TestPlayer()
self.ai.place_ships(player)
sorted_ships = sorted(player.placed_ships)
self.assertEqual([2, 3, 3, 4, 5], sorted_ships)
def test_takes_shot(self):
""" Test that the AI takes a shot. """
player = TestPlayer()
self.ai.take_shot(player)
self.assertEqual(1, player.shots_taken)
|
<commit_before><commit_msg>Add very simple demo unit tests<commit_after># This is an example unit test suite for creating an AI
from game import GameRunner, Board, Player
from demo import BattleshipsAI
from unittest import TestCase
class TestPlayer(Player):
""" Mock Player which records ships placed and shots taken """
def __init__(self):
super(TestPlayer, self).__init__()
self.placed_ships = []
self.shots_taken = 0
def place_ship(self, size, x, y, direction):
self.placed_ships.append(size)
super(TestPlayer, self).place_ship(size, x, y, direction)
def take_shot(self, x, y):
self.shots_taken += 1
class TestBattleshipsAI(TestCase):
def setUp(self):
self.ai = BattleshipsAI()
def test_places_all_ships(self):
""" Test that all ships are placed. """
player = TestPlayer()
self.ai.place_ships(player)
sorted_ships = sorted(player.placed_ships)
self.assertEqual([2, 3, 3, 4, 5], sorted_ships)
def test_takes_shot(self):
""" Test that the AI takes a shot. """
player = TestPlayer()
self.ai.take_shot(player)
self.assertEqual(1, player.shots_taken)
|
|
df494797db7b4c9d62a7ef64fea74c7a231f35bd
|
func_utils.py
|
func_utils.py
|
'''
Function helper functions.
by Craig Buchek
These took a lot of trial and error to get right, but seem to be working well now.
I read through a lot of various other attempts to implement these (separately).
I don't claim to understand Python decorators well - just well enough to get these to work like I wanted.
'''
class memoized_property(object):
'''
Simple memoization (caching) decorator for a method that takes no arguments and is used as a property.
NOTE: This works only for read-only properties; it does not handle getters, setters, or deleters like the built-in @property decorator.
Usage:
class MyClass(object):
@memoized_property
def name_of_method(self):
value = something_that_takes_a_while_to_compute_or_has_size_effects()
return value
my_obj = MyClass()
my_obj.name_of_method
'''
def __init__(self, func):
self.func = func
def __call__(self, obj):
if not hasattr(obj, '_memoization_cache'):
obj._memoization_cache = {}
if not obj._memoization_cache.has_key(self.func.__name__):
obj._memoization_cache[self.func.__name__] = self.func(obj)
return obj._memoization_cache[self.func.__name__]
def __get__(self, obj, objtype):
return self.__call__(obj)
class memoized_class_property(classmethod):
'''
Simple memoization (caching) decorator for a class method that takes no arguments and is used as a property.
NOTE: This works only for read-only properties; it does not handle getters, setters, or deleters like the built-in @property decorator.
Usage:
class MyClass(object):
@memoized_class_property
def name_of_method(cls):
value = something_that_takes_a_while_to_compute_or_has_size_effects()
return value
MyClass.name_of_method
'''
def __init__(self, func):
self.func = func
def __call__(self, obj):
if not hasattr(obj, '_memoization_cache'):
obj._memoization_cache = {}
if not obj._memoization_cache.has_key(self.func.__name__):
obj._memoization_cache[self.func.__name__] = self.func(obj)
return obj._memoization_cache[self.func.__name__]
def __get__(self, obj, objtype):
return self.__call__(objtype)
|
Add @memoized_property and @memoized_class_property decorators.
|
Add @memoized_property and @memoized_class_property decorators.
|
Python
|
mit
|
booch/python-utils
|
Add @memoized_property and @memoized_class_property decorators.
|
'''
Function helper functions.
by Craig Buchek
These took a lot of trial and error to get right, but seem to be working well now.
I read through a lot of various other attempts to implement these (separately).
I don't claim to understand Python decorators well - just well enough to get these to work like I wanted.
'''
class memoized_property(object):
'''
Simple memoization (caching) decorator for a method that takes no arguments and is used as a property.
NOTE: This works only for read-only properties; it does not handle getters, setters, or deleters like the built-in @property decorator.
Usage:
class MyClass(object):
@memoized_property
def name_of_method(self):
value = something_that_takes_a_while_to_compute_or_has_size_effects()
return value
my_obj = MyClass()
my_obj.name_of_method
'''
def __init__(self, func):
self.func = func
def __call__(self, obj):
if not hasattr(obj, '_memoization_cache'):
obj._memoization_cache = {}
if not obj._memoization_cache.has_key(self.func.__name__):
obj._memoization_cache[self.func.__name__] = self.func(obj)
return obj._memoization_cache[self.func.__name__]
def __get__(self, obj, objtype):
return self.__call__(obj)
class memoized_class_property(classmethod):
'''
Simple memoization (caching) decorator for a class method that takes no arguments and is used as a property.
NOTE: This works only for read-only properties; it does not handle getters, setters, or deleters like the built-in @property decorator.
Usage:
class MyClass(object):
@memoized_class_property
def name_of_method(cls):
value = something_that_takes_a_while_to_compute_or_has_size_effects()
return value
MyClass.name_of_method
'''
def __init__(self, func):
self.func = func
def __call__(self, obj):
if not hasattr(obj, '_memoization_cache'):
obj._memoization_cache = {}
if not obj._memoization_cache.has_key(self.func.__name__):
obj._memoization_cache[self.func.__name__] = self.func(obj)
return obj._memoization_cache[self.func.__name__]
def __get__(self, obj, objtype):
return self.__call__(objtype)
|
<commit_before><commit_msg>Add @memoized_property and @memoized_class_property decorators.<commit_after>
|
'''
Function helper functions.
by Craig Buchek
These took a lot of trial and error to get right, but seem to be working well now.
I read through a lot of various other attempts to implement these (separately).
I don't claim to understand Python decorators well - just well enough to get these to work like I wanted.
'''
class memoized_property(object):
'''
Simple memoization (caching) decorator for a method that takes no arguments and is used as a property.
NOTE: This works only for read-only properties; it does not handle getters, setters, or deleters like the built-in @property decorator.
Usage:
class MyClass(object):
@memoized_property
def name_of_method(self):
value = something_that_takes_a_while_to_compute_or_has_size_effects()
return value
my_obj = MyClass()
my_obj.name_of_method
'''
def __init__(self, func):
self.func = func
def __call__(self, obj):
if not hasattr(obj, '_memoization_cache'):
obj._memoization_cache = {}
if not obj._memoization_cache.has_key(self.func.__name__):
obj._memoization_cache[self.func.__name__] = self.func(obj)
return obj._memoization_cache[self.func.__name__]
def __get__(self, obj, objtype):
return self.__call__(obj)
class memoized_class_property(classmethod):
'''
Simple memoization (caching) decorator for a class method that takes no arguments and is used as a property.
NOTE: This works only for read-only properties; it does not handle getters, setters, or deleters like the built-in @property decorator.
Usage:
class MyClass(object):
@memoized_class_property
def name_of_method(cls):
value = something_that_takes_a_while_to_compute_or_has_size_effects()
return value
MyClass.name_of_method
'''
def __init__(self, func):
self.func = func
def __call__(self, obj):
if not hasattr(obj, '_memoization_cache'):
obj._memoization_cache = {}
if not obj._memoization_cache.has_key(self.func.__name__):
obj._memoization_cache[self.func.__name__] = self.func(obj)
return obj._memoization_cache[self.func.__name__]
def __get__(self, obj, objtype):
return self.__call__(objtype)
|
Add @memoized_property and @memoized_class_property decorators.'''
Function helper functions.
by Craig Buchek
These took a lot of trial and error to get right, but seem to be working well now.
I read through a lot of various other attempts to implement these (separately).
I don't claim to understand Python decorators well - just well enough to get these to work like I wanted.
'''
class memoized_property(object):
'''
Simple memoization (caching) decorator for a method that takes no arguments and is used as a property.
NOTE: This works only for read-only properties; it does not handle getters, setters, or deleters like the built-in @property decorator.
Usage:
class MyClass(object):
@memoized_property
def name_of_method(self):
value = something_that_takes_a_while_to_compute_or_has_size_effects()
return value
my_obj = MyClass()
my_obj.name_of_method
'''
def __init__(self, func):
self.func = func
def __call__(self, obj):
if not hasattr(obj, '_memoization_cache'):
obj._memoization_cache = {}
if not obj._memoization_cache.has_key(self.func.__name__):
obj._memoization_cache[self.func.__name__] = self.func(obj)
return obj._memoization_cache[self.func.__name__]
def __get__(self, obj, objtype):
return self.__call__(obj)
class memoized_class_property(classmethod):
'''
Simple memoization (caching) decorator for a class method that takes no arguments and is used as a property.
NOTE: This works only for read-only properties; it does not handle getters, setters, or deleters like the built-in @property decorator.
Usage:
class MyClass(object):
@memoized_class_property
def name_of_method(cls):
value = something_that_takes_a_while_to_compute_or_has_size_effects()
return value
MyClass.name_of_method
'''
def __init__(self, func):
self.func = func
def __call__(self, obj):
if not hasattr(obj, '_memoization_cache'):
obj._memoization_cache = {}
if not obj._memoization_cache.has_key(self.func.__name__):
obj._memoization_cache[self.func.__name__] = self.func(obj)
return obj._memoization_cache[self.func.__name__]
def __get__(self, obj, objtype):
return self.__call__(objtype)
|
<commit_before><commit_msg>Add @memoized_property and @memoized_class_property decorators.<commit_after>'''
Function helper functions.
by Craig Buchek
These took a lot of trial and error to get right, but seem to be working well now.
I read through a lot of various other attempts to implement these (separately).
I don't claim to understand Python decorators well - just well enough to get these to work like I wanted.
'''
class memoized_property(object):
'''
Simple memoization (caching) decorator for a method that takes no arguments and is used as a property.
NOTE: This works only for read-only properties; it does not handle getters, setters, or deleters like the built-in @property decorator.
Usage:
class MyClass(object):
@memoized_property
def name_of_method(self):
value = something_that_takes_a_while_to_compute_or_has_size_effects()
return value
my_obj = MyClass()
my_obj.name_of_method
'''
def __init__(self, func):
self.func = func
def __call__(self, obj):
if not hasattr(obj, '_memoization_cache'):
obj._memoization_cache = {}
if not obj._memoization_cache.has_key(self.func.__name__):
obj._memoization_cache[self.func.__name__] = self.func(obj)
return obj._memoization_cache[self.func.__name__]
def __get__(self, obj, objtype):
return self.__call__(obj)
class memoized_class_property(classmethod):
'''
Simple memoization (caching) decorator for a class method that takes no arguments and is used as a property.
NOTE: This works only for read-only properties; it does not handle getters, setters, or deleters like the built-in @property decorator.
Usage:
class MyClass(object):
@memoized_class_property
def name_of_method(cls):
value = something_that_takes_a_while_to_compute_or_has_size_effects()
return value
MyClass.name_of_method
'''
def __init__(self, func):
self.func = func
def __call__(self, obj):
if not hasattr(obj, '_memoization_cache'):
obj._memoization_cache = {}
if not obj._memoization_cache.has_key(self.func.__name__):
obj._memoization_cache[self.func.__name__] = self.func(obj)
return obj._memoization_cache[self.func.__name__]
def __get__(self, obj, objtype):
return self.__call__(objtype)
|
|
3c9b592b8b41302953c13bc0ef46b979a751fafb
|
test/Incremental/gen-output-file-map.py
|
test/Incremental/gen-output-file-map.py
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import json
import os
import sys
def fatal(msg):
print(msg, file=sys.stderr)
sys.exit(1)
def find_swift_files(path):
for parent, dirs, files in os.walk(path, topdown=True):
for filename in files:
if not filename.endswith('.swift'):
continue
yield filename
def main(arguments):
parser = argparse.ArgumentParser(
description='Generate an output file map for the given directory')
parser.add_argument('-o', dest='output_dir',
help='Directory to which the file map will be emitted')
parser.add_argument('input_dir', help='a directory of swift files')
args = parser.parse_args(arguments)
if not args.output_dir:
fatal("output directory is required")
# Create the output directory if it doesn't already exist.
if not os.path.isdir(args.output_dir):
os.makedirs(args.output_dir)
output_path = os.path.join(args.output_dir, 'output.json')
if not os.path.isdir(args.input_dir):
fatal("input directory does not exist, or is not a directory")
swift_files = find_swift_files(args.input_dir)
if not swift_files:
fatal("no swift files in the given input directory")
all_records = {}
for swift_file in swift_files:
file_name = os.path.splitext(swift_file)[0]
all_records['./' + swift_file] = {
'object': './' + file_name + '.o',
'swift-dependencies': './' + file_name + '.swiftdeps',
}
all_records[""] = {
'swift-dependencies': './main-buildrecord.swiftdeps'
}
with open(output_path, 'w') as f:
json.dump(all_records, f)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
Add a utility for generating simple output file maps
|
[Incremental] Add a utility for generating simple output file maps
|
Python
|
apache-2.0
|
atrick/swift,glessard/swift,tkremenek/swift,parkera/swift,stephentyrone/swift,JGiola/swift,atrick/swift,roambotics/swift,glessard/swift,jckarter/swift,apple/swift,hooman/swift,aschwaighofer/swift,harlanhaskins/swift,allevato/swift,harlanhaskins/swift,jmgc/swift,jckarter/swift,harlanhaskins/swift,airspeedswift/swift,nathawes/swift,airspeedswift/swift,stephentyrone/swift,roambotics/swift,airspeedswift/swift,airspeedswift/swift,atrick/swift,jckarter/swift,harlanhaskins/swift,hooman/swift,rudkx/swift,jmgc/swift,nathawes/swift,CodaFi/swift,ahoppen/swift,hooman/swift,parkera/swift,allevato/swift,allevato/swift,aschwaighofer/swift,glessard/swift,ahoppen/swift,allevato/swift,apple/swift,gregomni/swift,stephentyrone/swift,nathawes/swift,JGiola/swift,tkremenek/swift,benlangmuir/swift,atrick/swift,CodaFi/swift,allevato/swift,parkera/swift,CodaFi/swift,glessard/swift,gregomni/swift,jckarter/swift,xwu/swift,ahoppen/swift,aschwaighofer/swift,jckarter/swift,benlangmuir/swift,aschwaighofer/swift,stephentyrone/swift,jmgc/swift,xwu/swift,jmgc/swift,JGiola/swift,roambotics/swift,aschwaighofer/swift,hooman/swift,hooman/swift,ahoppen/swift,tkremenek/swift,glessard/swift,apple/swift,CodaFi/swift,parkera/swift,hooman/swift,gregomni/swift,tkremenek/swift,atrick/swift,aschwaighofer/swift,apple/swift,airspeedswift/swift,JGiola/swift,CodaFi/swift,gregomni/swift,jmgc/swift,gregomni/swift,hooman/swift,xwu/swift,JGiola/swift,apple/swift,xwu/swift,aschwaighofer/swift,stephentyrone/swift,allevato/swift,glessard/swift,CodaFi/swift,jmgc/swift,benlangmuir/swift,stephentyrone/swift,roambotics/swift,rudkx/swift,tkremenek/swift,parkera/swift,airspeedswift/swift,ahoppen/swift,jckarter/swift,jmgc/swift,harlanhaskins/swift,rudkx/swift,airspeedswift/swift,apple/swift,rudkx/swift,xwu/swift,roambotics/swift,stephentyrone/swift,rudkx/swift,xwu/swift,harlanhaskins/swift,parkera/swift,gregomni/swift,parkera/swift,benlangmuir/swift,tkremenek/swift,nathawes/swift,benlangmuir/swift,ahoppen/swift,nathawes/swift,parkera/swift,nathawes/swift,rudkx/swift,allevato/swift,JGiola/swift,atrick/swift,xwu/swift,roambotics/swift,harlanhaskins/swift,tkremenek/swift,benlangmuir/swift,CodaFi/swift,jckarter/swift,nathawes/swift
|
[Incremental] Add a utility for generating simple output file maps
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import json
import os
import sys
def fatal(msg):
print(msg, file=sys.stderr)
sys.exit(1)
def find_swift_files(path):
for parent, dirs, files in os.walk(path, topdown=True):
for filename in files:
if not filename.endswith('.swift'):
continue
yield filename
def main(arguments):
parser = argparse.ArgumentParser(
description='Generate an output file map for the given directory')
parser.add_argument('-o', dest='output_dir',
help='Directory to which the file map will be emitted')
parser.add_argument('input_dir', help='a directory of swift files')
args = parser.parse_args(arguments)
if not args.output_dir:
fatal("output directory is required")
# Create the output directory if it doesn't already exist.
if not os.path.isdir(args.output_dir):
os.makedirs(args.output_dir)
output_path = os.path.join(args.output_dir, 'output.json')
if not os.path.isdir(args.input_dir):
fatal("input directory does not exist, or is not a directory")
swift_files = find_swift_files(args.input_dir)
if not swift_files:
fatal("no swift files in the given input directory")
all_records = {}
for swift_file in swift_files:
file_name = os.path.splitext(swift_file)[0]
all_records['./' + swift_file] = {
'object': './' + file_name + '.o',
'swift-dependencies': './' + file_name + '.swiftdeps',
}
all_records[""] = {
'swift-dependencies': './main-buildrecord.swiftdeps'
}
with open(output_path, 'w') as f:
json.dump(all_records, f)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
<commit_before><commit_msg>[Incremental] Add a utility for generating simple output file maps<commit_after>
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import json
import os
import sys
def fatal(msg):
print(msg, file=sys.stderr)
sys.exit(1)
def find_swift_files(path):
for parent, dirs, files in os.walk(path, topdown=True):
for filename in files:
if not filename.endswith('.swift'):
continue
yield filename
def main(arguments):
parser = argparse.ArgumentParser(
description='Generate an output file map for the given directory')
parser.add_argument('-o', dest='output_dir',
help='Directory to which the file map will be emitted')
parser.add_argument('input_dir', help='a directory of swift files')
args = parser.parse_args(arguments)
if not args.output_dir:
fatal("output directory is required")
# Create the output directory if it doesn't already exist.
if not os.path.isdir(args.output_dir):
os.makedirs(args.output_dir)
output_path = os.path.join(args.output_dir, 'output.json')
if not os.path.isdir(args.input_dir):
fatal("input directory does not exist, or is not a directory")
swift_files = find_swift_files(args.input_dir)
if not swift_files:
fatal("no swift files in the given input directory")
all_records = {}
for swift_file in swift_files:
file_name = os.path.splitext(swift_file)[0]
all_records['./' + swift_file] = {
'object': './' + file_name + '.o',
'swift-dependencies': './' + file_name + '.swiftdeps',
}
all_records[""] = {
'swift-dependencies': './main-buildrecord.swiftdeps'
}
with open(output_path, 'w') as f:
json.dump(all_records, f)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
[Incremental] Add a utility for generating simple output file maps#!/usr/bin/env python
from __future__ import print_function
import argparse
import json
import os
import sys
def fatal(msg):
print(msg, file=sys.stderr)
sys.exit(1)
def find_swift_files(path):
for parent, dirs, files in os.walk(path, topdown=True):
for filename in files:
if not filename.endswith('.swift'):
continue
yield filename
def main(arguments):
parser = argparse.ArgumentParser(
description='Generate an output file map for the given directory')
parser.add_argument('-o', dest='output_dir',
help='Directory to which the file map will be emitted')
parser.add_argument('input_dir', help='a directory of swift files')
args = parser.parse_args(arguments)
if not args.output_dir:
fatal("output directory is required")
# Create the output directory if it doesn't already exist.
if not os.path.isdir(args.output_dir):
os.makedirs(args.output_dir)
output_path = os.path.join(args.output_dir, 'output.json')
if not os.path.isdir(args.input_dir):
fatal("input directory does not exist, or is not a directory")
swift_files = find_swift_files(args.input_dir)
if not swift_files:
fatal("no swift files in the given input directory")
all_records = {}
for swift_file in swift_files:
file_name = os.path.splitext(swift_file)[0]
all_records['./' + swift_file] = {
'object': './' + file_name + '.o',
'swift-dependencies': './' + file_name + '.swiftdeps',
}
all_records[""] = {
'swift-dependencies': './main-buildrecord.swiftdeps'
}
with open(output_path, 'w') as f:
json.dump(all_records, f)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
<commit_before><commit_msg>[Incremental] Add a utility for generating simple output file maps<commit_after>#!/usr/bin/env python
from __future__ import print_function
import argparse
import json
import os
import sys
def fatal(msg):
print(msg, file=sys.stderr)
sys.exit(1)
def find_swift_files(path):
for parent, dirs, files in os.walk(path, topdown=True):
for filename in files:
if not filename.endswith('.swift'):
continue
yield filename
def main(arguments):
parser = argparse.ArgumentParser(
description='Generate an output file map for the given directory')
parser.add_argument('-o', dest='output_dir',
help='Directory to which the file map will be emitted')
parser.add_argument('input_dir', help='a directory of swift files')
args = parser.parse_args(arguments)
if not args.output_dir:
fatal("output directory is required")
# Create the output directory if it doesn't already exist.
if not os.path.isdir(args.output_dir):
os.makedirs(args.output_dir)
output_path = os.path.join(args.output_dir, 'output.json')
if not os.path.isdir(args.input_dir):
fatal("input directory does not exist, or is not a directory")
swift_files = find_swift_files(args.input_dir)
if not swift_files:
fatal("no swift files in the given input directory")
all_records = {}
for swift_file in swift_files:
file_name = os.path.splitext(swift_file)[0]
all_records['./' + swift_file] = {
'object': './' + file_name + '.o',
'swift-dependencies': './' + file_name + '.swiftdeps',
}
all_records[""] = {
'swift-dependencies': './main-buildrecord.swiftdeps'
}
with open(output_path, 'w') as f:
json.dump(all_records, f)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
|
34d64cc5eaf5db73791d8b21d58c58bc8c1f3fff
|
greenfan/management/commands/wait-for-non-build-nodes.py
|
greenfan/management/commands/wait-for-non-build-nodes.py
|
#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tempfile
import urlparse
from subprocess import Popen
from time import sleep, time
from django.core.management.base import BaseCommand
from django.template import Context, Template
from fabric.api import env as fabric_env
from fabric.api import run, local, sudo, put
from greenfan import utils
from greenfan.models import Configuration, TestSpecification, Server
def run_cmd(args):
proc = Popen(args)
return proc.communicate()
class Command(BaseCommand):
def handle(self, job_id, **options):
job = TestSpecification.objects.get(id=job_id)
config = Configuration.get()
fabric_env.host_string = '%s@%s' % (config.admin_user, job.build_node().ip)
fabric_env.password = config.admin_password
fabric_env.abort_on_prompts = True
fabric_env.sudo_prefix = 'sudo -H -S -p \'%(sudo_prompt)s\' '
timeout = time() + 60*60
expected_set = set([node.fqdn() for node in job.nodes()])
while timeout > time():
out = sudo('cd /var/lib/puppet/reports ; ls | cat')
actual_set = set([name.strip() for name in out.split('\n')])
if actual_set == expected_set:
return ''
print 'Not done yet. %d seconds left' % (timeout - time(),)
sleep(5)
raise Exception('Timed out')
|
Add script to wait for puppet run to finish on non-build-nodes
|
Add script to wait for puppet run to finish on non-build-nodes
|
Python
|
apache-2.0
|
sorenh/python-django-greenfan,sorenh/python-django-greenfan
|
Add script to wait for puppet run to finish on non-build-nodes
|
#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tempfile
import urlparse
from subprocess import Popen
from time import sleep, time
from django.core.management.base import BaseCommand
from django.template import Context, Template
from fabric.api import env as fabric_env
from fabric.api import run, local, sudo, put
from greenfan import utils
from greenfan.models import Configuration, TestSpecification, Server
def run_cmd(args):
proc = Popen(args)
return proc.communicate()
class Command(BaseCommand):
def handle(self, job_id, **options):
job = TestSpecification.objects.get(id=job_id)
config = Configuration.get()
fabric_env.host_string = '%s@%s' % (config.admin_user, job.build_node().ip)
fabric_env.password = config.admin_password
fabric_env.abort_on_prompts = True
fabric_env.sudo_prefix = 'sudo -H -S -p \'%(sudo_prompt)s\' '
timeout = time() + 60*60
expected_set = set([node.fqdn() for node in job.nodes()])
while timeout > time():
out = sudo('cd /var/lib/puppet/reports ; ls | cat')
actual_set = set([name.strip() for name in out.split('\n')])
if actual_set == expected_set:
return ''
print 'Not done yet. %d seconds left' % (timeout - time(),)
sleep(5)
raise Exception('Timed out')
|
<commit_before><commit_msg>Add script to wait for puppet run to finish on non-build-nodes<commit_after>
|
#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tempfile
import urlparse
from subprocess import Popen
from time import sleep, time
from django.core.management.base import BaseCommand
from django.template import Context, Template
from fabric.api import env as fabric_env
from fabric.api import run, local, sudo, put
from greenfan import utils
from greenfan.models import Configuration, TestSpecification, Server
def run_cmd(args):
proc = Popen(args)
return proc.communicate()
class Command(BaseCommand):
def handle(self, job_id, **options):
job = TestSpecification.objects.get(id=job_id)
config = Configuration.get()
fabric_env.host_string = '%s@%s' % (config.admin_user, job.build_node().ip)
fabric_env.password = config.admin_password
fabric_env.abort_on_prompts = True
fabric_env.sudo_prefix = 'sudo -H -S -p \'%(sudo_prompt)s\' '
timeout = time() + 60*60
expected_set = set([node.fqdn() for node in job.nodes()])
while timeout > time():
out = sudo('cd /var/lib/puppet/reports ; ls | cat')
actual_set = set([name.strip() for name in out.split('\n')])
if actual_set == expected_set:
return ''
print 'Not done yet. %d seconds left' % (timeout - time(),)
sleep(5)
raise Exception('Timed out')
|
Add script to wait for puppet run to finish on non-build-nodes#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tempfile
import urlparse
from subprocess import Popen
from time import sleep, time
from django.core.management.base import BaseCommand
from django.template import Context, Template
from fabric.api import env as fabric_env
from fabric.api import run, local, sudo, put
from greenfan import utils
from greenfan.models import Configuration, TestSpecification, Server
def run_cmd(args):
proc = Popen(args)
return proc.communicate()
class Command(BaseCommand):
def handle(self, job_id, **options):
job = TestSpecification.objects.get(id=job_id)
config = Configuration.get()
fabric_env.host_string = '%s@%s' % (config.admin_user, job.build_node().ip)
fabric_env.password = config.admin_password
fabric_env.abort_on_prompts = True
fabric_env.sudo_prefix = 'sudo -H -S -p \'%(sudo_prompt)s\' '
timeout = time() + 60*60
expected_set = set([node.fqdn() for node in job.nodes()])
while timeout > time():
out = sudo('cd /var/lib/puppet/reports ; ls | cat')
actual_set = set([name.strip() for name in out.split('\n')])
if actual_set == expected_set:
return ''
print 'Not done yet. %d seconds left' % (timeout - time(),)
sleep(5)
raise Exception('Timed out')
|
<commit_before><commit_msg>Add script to wait for puppet run to finish on non-build-nodes<commit_after>#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tempfile
import urlparse
from subprocess import Popen
from time import sleep, time
from django.core.management.base import BaseCommand
from django.template import Context, Template
from fabric.api import env as fabric_env
from fabric.api import run, local, sudo, put
from greenfan import utils
from greenfan.models import Configuration, TestSpecification, Server
def run_cmd(args):
proc = Popen(args)
return proc.communicate()
class Command(BaseCommand):
def handle(self, job_id, **options):
job = TestSpecification.objects.get(id=job_id)
config = Configuration.get()
fabric_env.host_string = '%s@%s' % (config.admin_user, job.build_node().ip)
fabric_env.password = config.admin_password
fabric_env.abort_on_prompts = True
fabric_env.sudo_prefix = 'sudo -H -S -p \'%(sudo_prompt)s\' '
timeout = time() + 60*60
expected_set = set([node.fqdn() for node in job.nodes()])
while timeout > time():
out = sudo('cd /var/lib/puppet/reports ; ls | cat')
actual_set = set([name.strip() for name in out.split('\n')])
if actual_set == expected_set:
return ''
print 'Not done yet. %d seconds left' % (timeout - time(),)
sleep(5)
raise Exception('Timed out')
|
|
aaa9f661554121ec545763664d09431f3ae529b1
|
benchexec/tools/btor2c.py
|
benchexec/tools/btor2c.py
|
# This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import logging
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool2):
"""
Tool info for BTOR2C: A Converter from BTOR2 models to C programs
URL: https://gitlab.com/sosy-lab/software/btor2c
"""
def executable(self, tool_locator):
return tool_locator.find_executable("btor2code", subdir="build")
def name(self):
return "BTOR2C"
def cmdline(self, executable, options, task, rlimits):
return [executable] + options + [task.single_input_file]
def get_value_from_output(self, output, identifier):
# search for the text in output and get its value,
# search the first line, that starts with the searched text
# warn if there are more lines (multiple statistics from sequential analysis?)
match = None
for line in output:
if line.lstrip().startswith(identifier):
startPosition = line.find(":") + 1
endPosition = line.find("(", startPosition)
if endPosition == -1:
endPosition = len(line)
if match is None:
match = line[startPosition:endPosition].strip()
else:
logging.warning(
"skipping repeated match for identifier '%s': '%s'",
identifier,
line,
)
return match
|
Add a tool-info module for BTOR2C
|
Add a tool-info module for BTOR2C
- BTOR2C is a converter from BTOR2 models to C programs
|
Python
|
apache-2.0
|
ultimate-pa/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec
|
Add a tool-info module for BTOR2C
- BTOR2C is a converter from BTOR2 models to C programs
|
# This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import logging
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool2):
"""
Tool info for BTOR2C: A Converter from BTOR2 models to C programs
URL: https://gitlab.com/sosy-lab/software/btor2c
"""
def executable(self, tool_locator):
return tool_locator.find_executable("btor2code", subdir="build")
def name(self):
return "BTOR2C"
def cmdline(self, executable, options, task, rlimits):
return [executable] + options + [task.single_input_file]
def get_value_from_output(self, output, identifier):
# search for the text in output and get its value,
# search the first line, that starts with the searched text
# warn if there are more lines (multiple statistics from sequential analysis?)
match = None
for line in output:
if line.lstrip().startswith(identifier):
startPosition = line.find(":") + 1
endPosition = line.find("(", startPosition)
if endPosition == -1:
endPosition = len(line)
if match is None:
match = line[startPosition:endPosition].strip()
else:
logging.warning(
"skipping repeated match for identifier '%s': '%s'",
identifier,
line,
)
return match
|
<commit_before><commit_msg>Add a tool-info module for BTOR2C
- BTOR2C is a converter from BTOR2 models to C programs<commit_after>
|
# This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import logging
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool2):
"""
Tool info for BTOR2C: A Converter from BTOR2 models to C programs
URL: https://gitlab.com/sosy-lab/software/btor2c
"""
def executable(self, tool_locator):
return tool_locator.find_executable("btor2code", subdir="build")
def name(self):
return "BTOR2C"
def cmdline(self, executable, options, task, rlimits):
return [executable] + options + [task.single_input_file]
def get_value_from_output(self, output, identifier):
# search for the text in output and get its value,
# search the first line, that starts with the searched text
# warn if there are more lines (multiple statistics from sequential analysis?)
match = None
for line in output:
if line.lstrip().startswith(identifier):
startPosition = line.find(":") + 1
endPosition = line.find("(", startPosition)
if endPosition == -1:
endPosition = len(line)
if match is None:
match = line[startPosition:endPosition].strip()
else:
logging.warning(
"skipping repeated match for identifier '%s': '%s'",
identifier,
line,
)
return match
|
Add a tool-info module for BTOR2C
- BTOR2C is a converter from BTOR2 models to C programs# This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import logging
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool2):
"""
Tool info for BTOR2C: A Converter from BTOR2 models to C programs
URL: https://gitlab.com/sosy-lab/software/btor2c
"""
def executable(self, tool_locator):
return tool_locator.find_executable("btor2code", subdir="build")
def name(self):
return "BTOR2C"
def cmdline(self, executable, options, task, rlimits):
return [executable] + options + [task.single_input_file]
def get_value_from_output(self, output, identifier):
# search for the text in output and get its value,
# search the first line, that starts with the searched text
# warn if there are more lines (multiple statistics from sequential analysis?)
match = None
for line in output:
if line.lstrip().startswith(identifier):
startPosition = line.find(":") + 1
endPosition = line.find("(", startPosition)
if endPosition == -1:
endPosition = len(line)
if match is None:
match = line[startPosition:endPosition].strip()
else:
logging.warning(
"skipping repeated match for identifier '%s': '%s'",
identifier,
line,
)
return match
|
<commit_before><commit_msg>Add a tool-info module for BTOR2C
- BTOR2C is a converter from BTOR2 models to C programs<commit_after># This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import logging
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool2):
"""
Tool info for BTOR2C: A Converter from BTOR2 models to C programs
URL: https://gitlab.com/sosy-lab/software/btor2c
"""
def executable(self, tool_locator):
return tool_locator.find_executable("btor2code", subdir="build")
def name(self):
return "BTOR2C"
def cmdline(self, executable, options, task, rlimits):
return [executable] + options + [task.single_input_file]
def get_value_from_output(self, output, identifier):
# search for the text in output and get its value,
# search the first line, that starts with the searched text
# warn if there are more lines (multiple statistics from sequential analysis?)
match = None
for line in output:
if line.lstrip().startswith(identifier):
startPosition = line.find(":") + 1
endPosition = line.find("(", startPosition)
if endPosition == -1:
endPosition = len(line)
if match is None:
match = line[startPosition:endPosition].strip()
else:
logging.warning(
"skipping repeated match for identifier '%s': '%s'",
identifier,
line,
)
return match
|
|
89278de3f74bbb18d62b90b909d26bd7ef7c156c
|
src/python/utils.py
|
src/python/utils.py
|
def quiet_logs(sc):
logger = sc._jvm.org.apache.log4j
logger.LogManager.getLogger("org").setLevel(logger.Level.ERROR)
logger.LogManager.getLogger("akka").setLevel(logger.Level.ERROR)
|
Move quiet_logs to separate module
|
Move quiet_logs to separate module
|
Python
|
apache-2.0
|
project-asap/telecom-analytics,project-asap/telecom-analytics
|
Move quiet_logs to separate module
|
def quiet_logs(sc):
logger = sc._jvm.org.apache.log4j
logger.LogManager.getLogger("org").setLevel(logger.Level.ERROR)
logger.LogManager.getLogger("akka").setLevel(logger.Level.ERROR)
|
<commit_before><commit_msg>Move quiet_logs to separate module<commit_after>
|
def quiet_logs(sc):
logger = sc._jvm.org.apache.log4j
logger.LogManager.getLogger("org").setLevel(logger.Level.ERROR)
logger.LogManager.getLogger("akka").setLevel(logger.Level.ERROR)
|
Move quiet_logs to separate moduledef quiet_logs(sc):
logger = sc._jvm.org.apache.log4j
logger.LogManager.getLogger("org").setLevel(logger.Level.ERROR)
logger.LogManager.getLogger("akka").setLevel(logger.Level.ERROR)
|
<commit_before><commit_msg>Move quiet_logs to separate module<commit_after>def quiet_logs(sc):
logger = sc._jvm.org.apache.log4j
logger.LogManager.getLogger("org").setLevel(logger.Level.ERROR)
logger.LogManager.getLogger("akka").setLevel(logger.Level.ERROR)
|
|
b819d08c5df0063e691f7b675937ef2d9d7d23bc
|
examples/python/download_packages_with_fastestmirror.py
|
examples/python/download_packages_with_fastestmirror.py
|
#!/usr/bin/env python
"""
librepo - download packages
"""
import os
import os.path
import time
import librepo
CACHE = "fastestmirror.cache"
LIBREPOPKG = "librepo-1.2.1-2.fc20.x86_64.rpm"
LAMEPKG = "lame-3.99.5-2.fc19.x86_64.rpm"
if __name__ == "__main__":
# Setup logging
def debug_function(msg, _):
print msg
#librepo.set_debug_log_handler(debug_function)
# Remove packages if already exists
def remove_pkg(filename):
if os.path.exists(filename):
os.remove(filename)
remove_pkg(LIBREPOPKG)
remove_pkg(LAMEPKG)
# Prepare list of targets
packages = []
# Prepare first target
h1 = librepo.Handle()
h1.metalinkurl = "https://mirrors.fedoraproject.org/metalink?repo=fedora-20&arch=x86_64"
h1.repotype = librepo.YUMREPO
h1.fastestmirror = True
h1.fastestmirrorcache = CACHE
target = librepo.PackageTarget("Packages/l/"+LIBREPOPKG, handle=h1)
packages.append(target)
# Prepare second target
h2 = librepo.Handle()
h2.mirrorlisturl = "http://mirrors.rpmfusion.org/mirrorlist?repo=free-fedora-19&arch=x86_64"
h2.repotype = librepo.YUMREPO
h2.fastestmirror = True
h2.fastestmirrorcache = CACHE
target = librepo.PackageTarget(LAMEPKG, handle=h2)
packages.append(target)
t = time.time()
librepo.download_packages(packages)
print "Download duration: {0}s\n".format((time.time() - t))
for target in packages:
print "### %s: %s" % (target.local_path, target.err or "OK")
print "Local path: ", target.local_path
print "Error: ", target.err
print
|
Add usage example of LRO_FASTESTMIRRORCACHE
|
example: Add usage example of LRO_FASTESTMIRRORCACHE
|
Python
|
lgpl-2.1
|
Tojaj/librepo,rpm-software-management/librepo,bgamari/librepo,rholy/librepo,cgwalters/librepo,Conan-Kudo/librepo,cgwalters/librepo,rpm-software-management/librepo,Conan-Kudo/librepo,bgamari/librepo,cgwalters/librepo,rholy/librepo,Conan-Kudo/librepo,rholy/librepo,bgamari/librepo,Tojaj/librepo,rholy/librepo,Tojaj/librepo,rpm-software-management/librepo,cgwalters/librepo
|
example: Add usage example of LRO_FASTESTMIRRORCACHE
|
#!/usr/bin/env python
"""
librepo - download packages
"""
import os
import os.path
import time
import librepo
CACHE = "fastestmirror.cache"
LIBREPOPKG = "librepo-1.2.1-2.fc20.x86_64.rpm"
LAMEPKG = "lame-3.99.5-2.fc19.x86_64.rpm"
if __name__ == "__main__":
# Setup logging
def debug_function(msg, _):
print msg
#librepo.set_debug_log_handler(debug_function)
# Remove packages if already exists
def remove_pkg(filename):
if os.path.exists(filename):
os.remove(filename)
remove_pkg(LIBREPOPKG)
remove_pkg(LAMEPKG)
# Prepare list of targets
packages = []
# Prepare first target
h1 = librepo.Handle()
h1.metalinkurl = "https://mirrors.fedoraproject.org/metalink?repo=fedora-20&arch=x86_64"
h1.repotype = librepo.YUMREPO
h1.fastestmirror = True
h1.fastestmirrorcache = CACHE
target = librepo.PackageTarget("Packages/l/"+LIBREPOPKG, handle=h1)
packages.append(target)
# Prepare second target
h2 = librepo.Handle()
h2.mirrorlisturl = "http://mirrors.rpmfusion.org/mirrorlist?repo=free-fedora-19&arch=x86_64"
h2.repotype = librepo.YUMREPO
h2.fastestmirror = True
h2.fastestmirrorcache = CACHE
target = librepo.PackageTarget(LAMEPKG, handle=h2)
packages.append(target)
t = time.time()
librepo.download_packages(packages)
print "Download duration: {0}s\n".format((time.time() - t))
for target in packages:
print "### %s: %s" % (target.local_path, target.err or "OK")
print "Local path: ", target.local_path
print "Error: ", target.err
print
|
<commit_before><commit_msg>example: Add usage example of LRO_FASTESTMIRRORCACHE<commit_after>
|
#!/usr/bin/env python
"""
librepo - download packages
"""
import os
import os.path
import time
import librepo
CACHE = "fastestmirror.cache"
LIBREPOPKG = "librepo-1.2.1-2.fc20.x86_64.rpm"
LAMEPKG = "lame-3.99.5-2.fc19.x86_64.rpm"
if __name__ == "__main__":
# Setup logging
def debug_function(msg, _):
print msg
#librepo.set_debug_log_handler(debug_function)
# Remove packages if already exists
def remove_pkg(filename):
if os.path.exists(filename):
os.remove(filename)
remove_pkg(LIBREPOPKG)
remove_pkg(LAMEPKG)
# Prepare list of targets
packages = []
# Prepare first target
h1 = librepo.Handle()
h1.metalinkurl = "https://mirrors.fedoraproject.org/metalink?repo=fedora-20&arch=x86_64"
h1.repotype = librepo.YUMREPO
h1.fastestmirror = True
h1.fastestmirrorcache = CACHE
target = librepo.PackageTarget("Packages/l/"+LIBREPOPKG, handle=h1)
packages.append(target)
# Prepare second target
h2 = librepo.Handle()
h2.mirrorlisturl = "http://mirrors.rpmfusion.org/mirrorlist?repo=free-fedora-19&arch=x86_64"
h2.repotype = librepo.YUMREPO
h2.fastestmirror = True
h2.fastestmirrorcache = CACHE
target = librepo.PackageTarget(LAMEPKG, handle=h2)
packages.append(target)
t = time.time()
librepo.download_packages(packages)
print "Download duration: {0}s\n".format((time.time() - t))
for target in packages:
print "### %s: %s" % (target.local_path, target.err or "OK")
print "Local path: ", target.local_path
print "Error: ", target.err
print
|
example: Add usage example of LRO_FASTESTMIRRORCACHE#!/usr/bin/env python
"""
librepo - download packages
"""
import os
import os.path
import time
import librepo
CACHE = "fastestmirror.cache"
LIBREPOPKG = "librepo-1.2.1-2.fc20.x86_64.rpm"
LAMEPKG = "lame-3.99.5-2.fc19.x86_64.rpm"
if __name__ == "__main__":
# Setup logging
def debug_function(msg, _):
print msg
#librepo.set_debug_log_handler(debug_function)
# Remove packages if already exists
def remove_pkg(filename):
if os.path.exists(filename):
os.remove(filename)
remove_pkg(LIBREPOPKG)
remove_pkg(LAMEPKG)
# Prepare list of targets
packages = []
# Prepare first target
h1 = librepo.Handle()
h1.metalinkurl = "https://mirrors.fedoraproject.org/metalink?repo=fedora-20&arch=x86_64"
h1.repotype = librepo.YUMREPO
h1.fastestmirror = True
h1.fastestmirrorcache = CACHE
target = librepo.PackageTarget("Packages/l/"+LIBREPOPKG, handle=h1)
packages.append(target)
# Prepare second target
h2 = librepo.Handle()
h2.mirrorlisturl = "http://mirrors.rpmfusion.org/mirrorlist?repo=free-fedora-19&arch=x86_64"
h2.repotype = librepo.YUMREPO
h2.fastestmirror = True
h2.fastestmirrorcache = CACHE
target = librepo.PackageTarget(LAMEPKG, handle=h2)
packages.append(target)
t = time.time()
librepo.download_packages(packages)
print "Download duration: {0}s\n".format((time.time() - t))
for target in packages:
print "### %s: %s" % (target.local_path, target.err or "OK")
print "Local path: ", target.local_path
print "Error: ", target.err
print
|
<commit_before><commit_msg>example: Add usage example of LRO_FASTESTMIRRORCACHE<commit_after>#!/usr/bin/env python
"""
librepo - download packages
"""
import os
import os.path
import time
import librepo
CACHE = "fastestmirror.cache"
LIBREPOPKG = "librepo-1.2.1-2.fc20.x86_64.rpm"
LAMEPKG = "lame-3.99.5-2.fc19.x86_64.rpm"
if __name__ == "__main__":
# Setup logging
def debug_function(msg, _):
print msg
#librepo.set_debug_log_handler(debug_function)
# Remove packages if already exists
def remove_pkg(filename):
if os.path.exists(filename):
os.remove(filename)
remove_pkg(LIBREPOPKG)
remove_pkg(LAMEPKG)
# Prepare list of targets
packages = []
# Prepare first target
h1 = librepo.Handle()
h1.metalinkurl = "https://mirrors.fedoraproject.org/metalink?repo=fedora-20&arch=x86_64"
h1.repotype = librepo.YUMREPO
h1.fastestmirror = True
h1.fastestmirrorcache = CACHE
target = librepo.PackageTarget("Packages/l/"+LIBREPOPKG, handle=h1)
packages.append(target)
# Prepare second target
h2 = librepo.Handle()
h2.mirrorlisturl = "http://mirrors.rpmfusion.org/mirrorlist?repo=free-fedora-19&arch=x86_64"
h2.repotype = librepo.YUMREPO
h2.fastestmirror = True
h2.fastestmirrorcache = CACHE
target = librepo.PackageTarget(LAMEPKG, handle=h2)
packages.append(target)
t = time.time()
librepo.download_packages(packages)
print "Download duration: {0}s\n".format((time.time() - t))
for target in packages:
print "### %s: %s" % (target.local_path, target.err or "OK")
print "Local path: ", target.local_path
print "Error: ", target.err
print
|
|
b757a22bd16ff94c20eb5831acdfd74824fd8a0a
|
nb_classifier.py
|
nb_classifier.py
|
from feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
clf = GaussianNB()
clf.fit(features, labels)
print clf.score(features, labels)
|
Create first pass Gaussian Naive Bayes Classifer
|
feat: Create first pass Gaussian Naive Bayes Classifer
|
Python
|
mit
|
rjegankumar/enron_email_fraud_identification
|
feat: Create first pass Gaussian Naive Bayes Classifer
|
from feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
clf = GaussianNB()
clf.fit(features, labels)
print clf.score(features, labels)
|
<commit_before><commit_msg>feat: Create first pass Gaussian Naive Bayes Classifer<commit_after>
|
from feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
clf = GaussianNB()
clf.fit(features, labels)
print clf.score(features, labels)
|
feat: Create first pass Gaussian Naive Bayes Classiferfrom feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
clf = GaussianNB()
clf.fit(features, labels)
print clf.score(features, labels)
|
<commit_before><commit_msg>feat: Create first pass Gaussian Naive Bayes Classifer<commit_after>from feature_format import featureFormat, targetFeatureSplit
import pickle
from sklearn.naive_bayes import GaussianNB
# loading the enron data dictionary
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# removing 'TOTAL' outlier
del data_dict['TOTAL']
# selecting only 2 features - total_stock_value and bonus for now
# total_stock_value - data available for all POIs and second best feature
# bonus - data available for 16 out of 18 POIs and third best feature
features_list = ['poi',
'total_stock_value',
'bonus']
# creating list of labels and list of numpy arrays containing the features
data = featureFormat(data_dict, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
# Fitting and testing Gaussian Naive Bayes Classifier
clf = GaussianNB()
clf.fit(features, labels)
print clf.score(features, labels)
|
|
3a5975852a1b751c018e56bd3f2edadb48bbbcbd
|
scripts/migrate_piwik_base_nodes.py
|
scripts/migrate_piwik_base_nodes.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Update Piwik for nodes that were forked, registered or templated prior to
October 2014.
"""
import datetime
import logging
import sys
import time
from modularodm import Q
from framework.analytics.piwik import _update_node_object
from scripts import utils as scripts_utils
from website.app import init_app
from website.models import Node
logger = logging.getLogger('root')
def get_nodes():
forked = Q('__backrefs.forked.node.forked_from', 'ne', None)
registered = Q('__backrefs.registrations.node.registered_from', 'ne', None)
templated = Q('__backrefs.template_node.node.template_node', 'ne', None)
duplicate = (forked | registered | templated)
return Node.find(
duplicate and Q('date_created', 'lt', datetime.datetime(2014, 10, 31))
)
def main():
init_app('website.settings', set_backends=True, routes=False)
if 'dry' in sys.argv:
if 'list' in sys.argv:
logger.info("=== Nodes ===")
for node in get_nodes():
logger.info(node._id)
else:
logger.info("{} Nodes to be updated".format(get_nodes().count()))
else:
# Log to a file
scripts_utils.add_file_logger(logger, __file__)
nodes = get_nodes()
logger.info("=== Updating {} Nodes ===".format(nodes.count()))
for node in nodes:
# Wait a second between requests to reduce load on Piwik
time.sleep(1)
_update_node_object(node)
logger.info(node._id)
if __name__ == "__main__":
main()
|
Add migration for nodes which may have previous shared a piwik_site_id
|
Add migration for nodes which may have previous shared a piwik_site_id
|
Python
|
apache-2.0
|
leb2dg/osf.io,leb2dg/osf.io,fabianvf/osf.io,MerlinZhang/osf.io,reinaH/osf.io,arpitar/osf.io,himanshuo/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,cslzchen/osf.io,DanielSBrown/osf.io,Nesiehr/osf.io,kch8qx/osf.io,cosenal/osf.io,Nesiehr/osf.io,reinaH/osf.io,monikagrabowska/osf.io,jmcarp/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,billyhunt/osf.io,mluke93/osf.io,jolene-esposito/osf.io,hmoco/osf.io,DanielSBrown/osf.io,haoyuchen1992/osf.io,abought/osf.io,hmoco/osf.io,caseyrollins/osf.io,KAsante95/osf.io,doublebits/osf.io,amyshi188/osf.io,bdyetton/prettychart,barbour-em/osf.io,samanehsan/osf.io,caneruguz/osf.io,mluo613/osf.io,revanthkolli/osf.io,Ghalko/osf.io,alexschiller/osf.io,leb2dg/osf.io,SSJohns/osf.io,asanfilippo7/osf.io,acshi/osf.io,lyndsysimon/osf.io,acshi/osf.io,bdyetton/prettychart,zamattiac/osf.io,haoyuchen1992/osf.io,jmcarp/osf.io,chennan47/osf.io,baylee-d/osf.io,caseyrollins/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,revanthkolli/osf.io,TomBaxter/osf.io,monikagrabowska/osf.io,ZobairAlijan/osf.io,icereval/osf.io,alexschiller/osf.io,haoyuchen1992/osf.io,petermalcolm/osf.io,SSJohns/osf.io,amyshi188/osf.io,GaryKriebel/osf.io,asanfilippo7/osf.io,RomanZWang/osf.io,ZobairAlijan/osf.io,Johnetordoff/osf.io,lamdnhan/osf.io,ZobairAlijan/osf.io,laurenrevere/osf.io,adlius/osf.io,cosenal/osf.io,binoculars/osf.io,abought/osf.io,erinspace/osf.io,samchrisinger/osf.io,RomanZWang/osf.io,Ghalko/osf.io,mluke93/osf.io,arpitar/osf.io,wearpants/osf.io,kushG/osf.io,kwierman/osf.io,HarryRybacki/osf.io,himanshuo/osf.io,cslzchen/osf.io,baylee-d/osf.io,pattisdr/osf.io,sbt9uc/osf.io,cldershem/osf.io,ticklemepierce/osf.io,GageGaskins/osf.io,ckc6cz/osf.io,zkraime/osf.io,kch8qx/osf.io,KAsante95/osf.io,cosenal/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,emetsger/osf.io,jnayak1/osf.io,alexschiller/osf.io,zamattiac/osf.io,wearpants/osf.io,KAsante95/osf.io,fabianvf/osf.io,CenterForOpenScience/osf.io,GageGaskins/osf.io,mfraezz/osf.io,chrisseto/osf.io,SSJohns/osf.io,njantrania/osf.io,ticklemepierce/osf.io,felliott/osf.io,sloria/osf.io,mluke93/osf.io,MerlinZhang/osf.io,cwisecarver/osf.io,wearpants/osf.io,HarryRybacki/osf.io,dplorimer/osf,crcresearch/osf.io,acshi/osf.io,GaryKriebel/osf.io,rdhyee/osf.io,kwierman/osf.io,arpitar/osf.io,zkraime/osf.io,felliott/osf.io,aaxelb/osf.io,felliott/osf.io,billyhunt/osf.io,RomanZWang/osf.io,sloria/osf.io,jolene-esposito/osf.io,cldershem/osf.io,zamattiac/osf.io,dplorimer/osf,acshi/osf.io,monikagrabowska/osf.io,brandonPurvis/osf.io,zachjanicki/osf.io,chennan47/osf.io,danielneis/osf.io,jolene-esposito/osf.io,icereval/osf.io,reinaH/osf.io,mluo613/osf.io,kwierman/osf.io,sbt9uc/osf.io,amyshi188/osf.io,sloria/osf.io,TomHeatwole/osf.io,kushG/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io,lamdnhan/osf.io,jinluyuan/osf.io,danielneis/osf.io,brianjgeiger/osf.io,brandonPurvis/osf.io,zachjanicki/osf.io,GageGaskins/osf.io,Johnetordoff/osf.io,haoyuchen1992/osf.io,barbour-em/osf.io,brandonPurvis/osf.io,petermalcolm/osf.io,himanshuo/osf.io,petermalcolm/osf.io,jeffreyliu3230/osf.io,cslzchen/osf.io,DanielSBrown/osf.io,emetsger/osf.io,mluke93/osf.io,caseyrygt/osf.io,RomanZWang/osf.io,ckc6cz/osf.io,petermalcolm/osf.io,baylee-d/osf.io,Nesiehr/osf.io,TomHeatwole/osf.io,lyndsysimon/osf.io,kushG/osf.io,lyndsysimon/osf.io,chrisseto/osf.io,chrisseto/osf.io,doublebits/osf.io,jnayak1/osf.io,danielneis/osf.io,kch8qx/osf.io,asanfilippo7/osf.io,crcresearch/osf.io,saradbowman/osf.io,samanehsan/osf.io,hmoco/osf.io,caneruguz/osf.io,ticklemepierce/osf.io,TomHeatwole/osf.io,SSJohns/osf.io,KAsante95/osf.io,barbour-em/osf.io,kch8qx/osf.io,mattclark/osf.io,zachjanicki/osf.io,laurenrevere/osf.io,jinluyuan/osf.io,alexschiller/osf.io,abought/osf.io,adlius/osf.io,doublebits/osf.io,lamdnhan/osf.io,wearpants/osf.io,reinaH/osf.io,emetsger/osf.io,kushG/osf.io,rdhyee/osf.io,GageGaskins/osf.io,njantrania/osf.io,ticklemepierce/osf.io,jinluyuan/osf.io,jeffreyliu3230/osf.io,mluo613/osf.io,Ghalko/osf.io,emetsger/osf.io,Ghalko/osf.io,amyshi188/osf.io,icereval/osf.io,HalcyonChimera/osf.io,barbour-em/osf.io,samanehsan/osf.io,njantrania/osf.io,kwierman/osf.io,crcresearch/osf.io,asanfilippo7/osf.io,binoculars/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,zkraime/osf.io,mattclark/osf.io,abought/osf.io,doublebits/osf.io,acshi/osf.io,cwisecarver/osf.io,felliott/osf.io,jnayak1/osf.io,jnayak1/osf.io,erinspace/osf.io,laurenrevere/osf.io,chrisseto/osf.io,GaryKriebel/osf.io,jolene-esposito/osf.io,bdyetton/prettychart,mattclark/osf.io,revanthkolli/osf.io,chennan47/osf.io,adlius/osf.io,mfraezz/osf.io,revanthkolli/osf.io,MerlinZhang/osf.io,jmcarp/osf.io,DanielSBrown/osf.io,lamdnhan/osf.io,ckc6cz/osf.io,jmcarp/osf.io,samchrisinger/osf.io,RomanZWang/osf.io,bdyetton/prettychart,billyhunt/osf.io,himanshuo/osf.io,erinspace/osf.io,dplorimer/osf,kch8qx/osf.io,samanehsan/osf.io,doublebits/osf.io,sbt9uc/osf.io,mfraezz/osf.io,HarryRybacki/osf.io,zachjanicki/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,HalcyonChimera/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,leb2dg/osf.io,GaryKriebel/osf.io,samchrisinger/osf.io,adlius/osf.io,zkraime/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,cosenal/osf.io,aaxelb/osf.io,njantrania/osf.io,cwisecarver/osf.io,rdhyee/osf.io,danielneis/osf.io,jinluyuan/osf.io,GageGaskins/osf.io,brianjgeiger/osf.io,mluo613/osf.io,CenterForOpenScience/osf.io,jeffreyliu3230/osf.io,billyhunt/osf.io,HarryRybacki/osf.io,arpitar/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,dplorimer/osf,pattisdr/osf.io,caseyrollins/osf.io,Nesiehr/osf.io,zamattiac/osf.io,MerlinZhang/osf.io,cldershem/osf.io,caseyrygt/osf.io,alexschiller/osf.io,fabianvf/osf.io,fabianvf/osf.io,caseyrygt/osf.io,caseyrygt/osf.io,mluo613/osf.io,cldershem/osf.io,sbt9uc/osf.io,TomBaxter/osf.io,ckc6cz/osf.io,caneruguz/osf.io,TomHeatwole/osf.io,jeffreyliu3230/osf.io,pattisdr/osf.io,billyhunt/osf.io,lyndsysimon/osf.io,monikagrabowska/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io
|
Add migration for nodes which may have previous shared a piwik_site_id
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Update Piwik for nodes that were forked, registered or templated prior to
October 2014.
"""
import datetime
import logging
import sys
import time
from modularodm import Q
from framework.analytics.piwik import _update_node_object
from scripts import utils as scripts_utils
from website.app import init_app
from website.models import Node
logger = logging.getLogger('root')
def get_nodes():
forked = Q('__backrefs.forked.node.forked_from', 'ne', None)
registered = Q('__backrefs.registrations.node.registered_from', 'ne', None)
templated = Q('__backrefs.template_node.node.template_node', 'ne', None)
duplicate = (forked | registered | templated)
return Node.find(
duplicate and Q('date_created', 'lt', datetime.datetime(2014, 10, 31))
)
def main():
init_app('website.settings', set_backends=True, routes=False)
if 'dry' in sys.argv:
if 'list' in sys.argv:
logger.info("=== Nodes ===")
for node in get_nodes():
logger.info(node._id)
else:
logger.info("{} Nodes to be updated".format(get_nodes().count()))
else:
# Log to a file
scripts_utils.add_file_logger(logger, __file__)
nodes = get_nodes()
logger.info("=== Updating {} Nodes ===".format(nodes.count()))
for node in nodes:
# Wait a second between requests to reduce load on Piwik
time.sleep(1)
_update_node_object(node)
logger.info(node._id)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add migration for nodes which may have previous shared a piwik_site_id<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Update Piwik for nodes that were forked, registered or templated prior to
October 2014.
"""
import datetime
import logging
import sys
import time
from modularodm import Q
from framework.analytics.piwik import _update_node_object
from scripts import utils as scripts_utils
from website.app import init_app
from website.models import Node
logger = logging.getLogger('root')
def get_nodes():
forked = Q('__backrefs.forked.node.forked_from', 'ne', None)
registered = Q('__backrefs.registrations.node.registered_from', 'ne', None)
templated = Q('__backrefs.template_node.node.template_node', 'ne', None)
duplicate = (forked | registered | templated)
return Node.find(
duplicate and Q('date_created', 'lt', datetime.datetime(2014, 10, 31))
)
def main():
init_app('website.settings', set_backends=True, routes=False)
if 'dry' in sys.argv:
if 'list' in sys.argv:
logger.info("=== Nodes ===")
for node in get_nodes():
logger.info(node._id)
else:
logger.info("{} Nodes to be updated".format(get_nodes().count()))
else:
# Log to a file
scripts_utils.add_file_logger(logger, __file__)
nodes = get_nodes()
logger.info("=== Updating {} Nodes ===".format(nodes.count()))
for node in nodes:
# Wait a second between requests to reduce load on Piwik
time.sleep(1)
_update_node_object(node)
logger.info(node._id)
if __name__ == "__main__":
main()
|
Add migration for nodes which may have previous shared a piwik_site_id#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Update Piwik for nodes that were forked, registered or templated prior to
October 2014.
"""
import datetime
import logging
import sys
import time
from modularodm import Q
from framework.analytics.piwik import _update_node_object
from scripts import utils as scripts_utils
from website.app import init_app
from website.models import Node
logger = logging.getLogger('root')
def get_nodes():
forked = Q('__backrefs.forked.node.forked_from', 'ne', None)
registered = Q('__backrefs.registrations.node.registered_from', 'ne', None)
templated = Q('__backrefs.template_node.node.template_node', 'ne', None)
duplicate = (forked | registered | templated)
return Node.find(
duplicate and Q('date_created', 'lt', datetime.datetime(2014, 10, 31))
)
def main():
init_app('website.settings', set_backends=True, routes=False)
if 'dry' in sys.argv:
if 'list' in sys.argv:
logger.info("=== Nodes ===")
for node in get_nodes():
logger.info(node._id)
else:
logger.info("{} Nodes to be updated".format(get_nodes().count()))
else:
# Log to a file
scripts_utils.add_file_logger(logger, __file__)
nodes = get_nodes()
logger.info("=== Updating {} Nodes ===".format(nodes.count()))
for node in nodes:
# Wait a second between requests to reduce load on Piwik
time.sleep(1)
_update_node_object(node)
logger.info(node._id)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add migration for nodes which may have previous shared a piwik_site_id<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Update Piwik for nodes that were forked, registered or templated prior to
October 2014.
"""
import datetime
import logging
import sys
import time
from modularodm import Q
from framework.analytics.piwik import _update_node_object
from scripts import utils as scripts_utils
from website.app import init_app
from website.models import Node
logger = logging.getLogger('root')
def get_nodes():
forked = Q('__backrefs.forked.node.forked_from', 'ne', None)
registered = Q('__backrefs.registrations.node.registered_from', 'ne', None)
templated = Q('__backrefs.template_node.node.template_node', 'ne', None)
duplicate = (forked | registered | templated)
return Node.find(
duplicate and Q('date_created', 'lt', datetime.datetime(2014, 10, 31))
)
def main():
init_app('website.settings', set_backends=True, routes=False)
if 'dry' in sys.argv:
if 'list' in sys.argv:
logger.info("=== Nodes ===")
for node in get_nodes():
logger.info(node._id)
else:
logger.info("{} Nodes to be updated".format(get_nodes().count()))
else:
# Log to a file
scripts_utils.add_file_logger(logger, __file__)
nodes = get_nodes()
logger.info("=== Updating {} Nodes ===".format(nodes.count()))
for node in nodes:
# Wait a second between requests to reduce load on Piwik
time.sleep(1)
_update_node_object(node)
logger.info(node._id)
if __name__ == "__main__":
main()
|
|
2fe04ba1d96b25f26a2651fe74a6271fe5991bb2
|
migrations/versions/cf62ec87d973_agency_description_to_agency_request_.py
|
migrations/versions/cf62ec87d973_agency_description_to_agency_request_.py
|
"""agency_description to agency_request_summary
Revision ID: cf62ec87d973
Revises: 971f341c0204
Create Date: 2017-05-31 16:29:17.341283
"""
# revision identifiers, used by Alembic.
revision = 'cf62ec87d973'
down_revision = '971f341c0204'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('requests', sa.Column('agency_request_summary', sa.String(length=5000), nullable=True))
op.add_column('requests', sa.Column('agency_request_summary_release_date', sa.DateTime(), nullable=True))
op.drop_column('requests', 'agency_description_release_date')
op.drop_column('requests', 'agency_description')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('requests', sa.Column('agency_description', sa.VARCHAR(length=5000), autoincrement=False, nullable=True))
op.add_column('requests', sa.Column('agency_description_release_date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.drop_column('requests', 'agency_request_summary_release_date')
op.drop_column('requests', 'agency_request_summary')
### end Alembic commands ###
|
Add Database Migration for Column Rename
|
Add Database Migration for Column Rename
Migrate the column `requests.agency_description` to `requests.agency_request_summary` and `requests.agency_description_release_date` to `requests.agency_request_summary_release_date`
Signed-off-by: Joel Castillo <877468732c93e7fff2a7305e99430be238218100@records.nyc.gov>
|
Python
|
apache-2.0
|
CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords
|
Add Database Migration for Column Rename
Migrate the column `requests.agency_description` to `requests.agency_request_summary` and `requests.agency_description_release_date` to `requests.agency_request_summary_release_date`
Signed-off-by: Joel Castillo <877468732c93e7fff2a7305e99430be238218100@records.nyc.gov>
|
"""agency_description to agency_request_summary
Revision ID: cf62ec87d973
Revises: 971f341c0204
Create Date: 2017-05-31 16:29:17.341283
"""
# revision identifiers, used by Alembic.
revision = 'cf62ec87d973'
down_revision = '971f341c0204'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('requests', sa.Column('agency_request_summary', sa.String(length=5000), nullable=True))
op.add_column('requests', sa.Column('agency_request_summary_release_date', sa.DateTime(), nullable=True))
op.drop_column('requests', 'agency_description_release_date')
op.drop_column('requests', 'agency_description')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('requests', sa.Column('agency_description', sa.VARCHAR(length=5000), autoincrement=False, nullable=True))
op.add_column('requests', sa.Column('agency_description_release_date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.drop_column('requests', 'agency_request_summary_release_date')
op.drop_column('requests', 'agency_request_summary')
### end Alembic commands ###
|
<commit_before><commit_msg>Add Database Migration for Column Rename
Migrate the column `requests.agency_description` to `requests.agency_request_summary` and `requests.agency_description_release_date` to `requests.agency_request_summary_release_date`
Signed-off-by: Joel Castillo <877468732c93e7fff2a7305e99430be238218100@records.nyc.gov><commit_after>
|
"""agency_description to agency_request_summary
Revision ID: cf62ec87d973
Revises: 971f341c0204
Create Date: 2017-05-31 16:29:17.341283
"""
# revision identifiers, used by Alembic.
revision = 'cf62ec87d973'
down_revision = '971f341c0204'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('requests', sa.Column('agency_request_summary', sa.String(length=5000), nullable=True))
op.add_column('requests', sa.Column('agency_request_summary_release_date', sa.DateTime(), nullable=True))
op.drop_column('requests', 'agency_description_release_date')
op.drop_column('requests', 'agency_description')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('requests', sa.Column('agency_description', sa.VARCHAR(length=5000), autoincrement=False, nullable=True))
op.add_column('requests', sa.Column('agency_description_release_date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.drop_column('requests', 'agency_request_summary_release_date')
op.drop_column('requests', 'agency_request_summary')
### end Alembic commands ###
|
Add Database Migration for Column Rename
Migrate the column `requests.agency_description` to `requests.agency_request_summary` and `requests.agency_description_release_date` to `requests.agency_request_summary_release_date`
Signed-off-by: Joel Castillo <877468732c93e7fff2a7305e99430be238218100@records.nyc.gov>"""agency_description to agency_request_summary
Revision ID: cf62ec87d973
Revises: 971f341c0204
Create Date: 2017-05-31 16:29:17.341283
"""
# revision identifiers, used by Alembic.
revision = 'cf62ec87d973'
down_revision = '971f341c0204'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('requests', sa.Column('agency_request_summary', sa.String(length=5000), nullable=True))
op.add_column('requests', sa.Column('agency_request_summary_release_date', sa.DateTime(), nullable=True))
op.drop_column('requests', 'agency_description_release_date')
op.drop_column('requests', 'agency_description')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('requests', sa.Column('agency_description', sa.VARCHAR(length=5000), autoincrement=False, nullable=True))
op.add_column('requests', sa.Column('agency_description_release_date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.drop_column('requests', 'agency_request_summary_release_date')
op.drop_column('requests', 'agency_request_summary')
### end Alembic commands ###
|
<commit_before><commit_msg>Add Database Migration for Column Rename
Migrate the column `requests.agency_description` to `requests.agency_request_summary` and `requests.agency_description_release_date` to `requests.agency_request_summary_release_date`
Signed-off-by: Joel Castillo <877468732c93e7fff2a7305e99430be238218100@records.nyc.gov><commit_after>"""agency_description to agency_request_summary
Revision ID: cf62ec87d973
Revises: 971f341c0204
Create Date: 2017-05-31 16:29:17.341283
"""
# revision identifiers, used by Alembic.
revision = 'cf62ec87d973'
down_revision = '971f341c0204'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('requests', sa.Column('agency_request_summary', sa.String(length=5000), nullable=True))
op.add_column('requests', sa.Column('agency_request_summary_release_date', sa.DateTime(), nullable=True))
op.drop_column('requests', 'agency_description_release_date')
op.drop_column('requests', 'agency_description')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('requests', sa.Column('agency_description', sa.VARCHAR(length=5000), autoincrement=False, nullable=True))
op.add_column('requests', sa.Column('agency_description_release_date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.drop_column('requests', 'agency_request_summary_release_date')
op.drop_column('requests', 'agency_request_summary')
### end Alembic commands ###
|
|
ffd2ec7f5fa7a3e422b6da7514c4596d5c3fab3c
|
echoclt.py
|
echoclt.py
|
#!/usr/bin/env python3.3
"""Example echo client."""
# Stdlib imports.
import logging
import socket
import sys
import time
# Local imports.
import scheduling
import sockets
def echoclient(host, port):
"""COROUTINE"""
testdata = b'hi hi hi ha ha ha\n'
trans = yield from sockets.create_transport(host, port, af=socket.AF_INET)
ok = yield from trans.send(testdata)
if ok:
response = yield from trans.recv(100)
trans.close()
return response == testdata.upper()
def doit(n):
"""COROUTINE"""
t0 = time.time()
tasks = set()
for i in range(n):
t = scheduling.Task(echoclient('127.0.0.1', 1111), 'client-%d' % i)
t.start()
tasks.add(t)
ok = 0
bad = 0
for t in tasks:
yield from t.wait()
assert not t.alive
if t.result:
ok += 1
else:
bad += 1
t1 = time.time()
print('ok: ', ok)
print('bad:', bad)
print('dt: ', round(t1-t0, 6))
def main():
# Initialize logging.
if '-d' in sys.argv:
level = logging.DEBUG
elif '-v' in sys.argv:
level = logging.INFO
elif '-q' in sys.argv:
level = logging.ERROR
else:
level = logging.WARN
logging.basicConfig(level=level)
# Get integer from command line.
n = 1
for arg in sys.argv[1:]:
if not arg.startswith('-'):
n = int(arg)
break
# Run the main loop.
scheduling.run(doit(n))
if __name__ == '__main__':
main()
|
Add a simple echo client. (Though it has problems.)
|
Add a simple echo client. (Though it has problems.)
|
Python
|
apache-2.0
|
gsb-eng/asyncio,leetreveil/tulip,jashandeep-sohi/asyncio,Martiusweb/asyncio,leetreveil/tulip,jashandeep-sohi/asyncio,manipopopo/asyncio,Martiusweb/asyncio,leetreveil/tulip,vxgmichel/asyncio,gvanrossum/asyncio,overcastcloud/trollius,ajdavis/asyncio,gvanrossum/asyncio,haypo/trollius,jashandeep-sohi/asyncio,manipopopo/asyncio,ajdavis/asyncio,gsb-eng/asyncio,1st1/asyncio,ajdavis/asyncio,haypo/trollius,fallen/asyncio,gsb-eng/asyncio,overcastcloud/trollius,gvanrossum/asyncio,1st1/asyncio,fallen/asyncio,haypo/trollius,Martiusweb/asyncio,vxgmichel/asyncio,overcastcloud/trollius,fallen/asyncio,vxgmichel/asyncio,1st1/asyncio,manipopopo/asyncio
|
Add a simple echo client. (Though it has problems.)
|
#!/usr/bin/env python3.3
"""Example echo client."""
# Stdlib imports.
import logging
import socket
import sys
import time
# Local imports.
import scheduling
import sockets
def echoclient(host, port):
"""COROUTINE"""
testdata = b'hi hi hi ha ha ha\n'
trans = yield from sockets.create_transport(host, port, af=socket.AF_INET)
ok = yield from trans.send(testdata)
if ok:
response = yield from trans.recv(100)
trans.close()
return response == testdata.upper()
def doit(n):
"""COROUTINE"""
t0 = time.time()
tasks = set()
for i in range(n):
t = scheduling.Task(echoclient('127.0.0.1', 1111), 'client-%d' % i)
t.start()
tasks.add(t)
ok = 0
bad = 0
for t in tasks:
yield from t.wait()
assert not t.alive
if t.result:
ok += 1
else:
bad += 1
t1 = time.time()
print('ok: ', ok)
print('bad:', bad)
print('dt: ', round(t1-t0, 6))
def main():
# Initialize logging.
if '-d' in sys.argv:
level = logging.DEBUG
elif '-v' in sys.argv:
level = logging.INFO
elif '-q' in sys.argv:
level = logging.ERROR
else:
level = logging.WARN
logging.basicConfig(level=level)
# Get integer from command line.
n = 1
for arg in sys.argv[1:]:
if not arg.startswith('-'):
n = int(arg)
break
# Run the main loop.
scheduling.run(doit(n))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a simple echo client. (Though it has problems.)<commit_after>
|
#!/usr/bin/env python3.3
"""Example echo client."""
# Stdlib imports.
import logging
import socket
import sys
import time
# Local imports.
import scheduling
import sockets
def echoclient(host, port):
"""COROUTINE"""
testdata = b'hi hi hi ha ha ha\n'
trans = yield from sockets.create_transport(host, port, af=socket.AF_INET)
ok = yield from trans.send(testdata)
if ok:
response = yield from trans.recv(100)
trans.close()
return response == testdata.upper()
def doit(n):
"""COROUTINE"""
t0 = time.time()
tasks = set()
for i in range(n):
t = scheduling.Task(echoclient('127.0.0.1', 1111), 'client-%d' % i)
t.start()
tasks.add(t)
ok = 0
bad = 0
for t in tasks:
yield from t.wait()
assert not t.alive
if t.result:
ok += 1
else:
bad += 1
t1 = time.time()
print('ok: ', ok)
print('bad:', bad)
print('dt: ', round(t1-t0, 6))
def main():
# Initialize logging.
if '-d' in sys.argv:
level = logging.DEBUG
elif '-v' in sys.argv:
level = logging.INFO
elif '-q' in sys.argv:
level = logging.ERROR
else:
level = logging.WARN
logging.basicConfig(level=level)
# Get integer from command line.
n = 1
for arg in sys.argv[1:]:
if not arg.startswith('-'):
n = int(arg)
break
# Run the main loop.
scheduling.run(doit(n))
if __name__ == '__main__':
main()
|
Add a simple echo client. (Though it has problems.)#!/usr/bin/env python3.3
"""Example echo client."""
# Stdlib imports.
import logging
import socket
import sys
import time
# Local imports.
import scheduling
import sockets
def echoclient(host, port):
"""COROUTINE"""
testdata = b'hi hi hi ha ha ha\n'
trans = yield from sockets.create_transport(host, port, af=socket.AF_INET)
ok = yield from trans.send(testdata)
if ok:
response = yield from trans.recv(100)
trans.close()
return response == testdata.upper()
def doit(n):
"""COROUTINE"""
t0 = time.time()
tasks = set()
for i in range(n):
t = scheduling.Task(echoclient('127.0.0.1', 1111), 'client-%d' % i)
t.start()
tasks.add(t)
ok = 0
bad = 0
for t in tasks:
yield from t.wait()
assert not t.alive
if t.result:
ok += 1
else:
bad += 1
t1 = time.time()
print('ok: ', ok)
print('bad:', bad)
print('dt: ', round(t1-t0, 6))
def main():
# Initialize logging.
if '-d' in sys.argv:
level = logging.DEBUG
elif '-v' in sys.argv:
level = logging.INFO
elif '-q' in sys.argv:
level = logging.ERROR
else:
level = logging.WARN
logging.basicConfig(level=level)
# Get integer from command line.
n = 1
for arg in sys.argv[1:]:
if not arg.startswith('-'):
n = int(arg)
break
# Run the main loop.
scheduling.run(doit(n))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a simple echo client. (Though it has problems.)<commit_after>#!/usr/bin/env python3.3
"""Example echo client."""
# Stdlib imports.
import logging
import socket
import sys
import time
# Local imports.
import scheduling
import sockets
def echoclient(host, port):
"""COROUTINE"""
testdata = b'hi hi hi ha ha ha\n'
trans = yield from sockets.create_transport(host, port, af=socket.AF_INET)
ok = yield from trans.send(testdata)
if ok:
response = yield from trans.recv(100)
trans.close()
return response == testdata.upper()
def doit(n):
"""COROUTINE"""
t0 = time.time()
tasks = set()
for i in range(n):
t = scheduling.Task(echoclient('127.0.0.1', 1111), 'client-%d' % i)
t.start()
tasks.add(t)
ok = 0
bad = 0
for t in tasks:
yield from t.wait()
assert not t.alive
if t.result:
ok += 1
else:
bad += 1
t1 = time.time()
print('ok: ', ok)
print('bad:', bad)
print('dt: ', round(t1-t0, 6))
def main():
# Initialize logging.
if '-d' in sys.argv:
level = logging.DEBUG
elif '-v' in sys.argv:
level = logging.INFO
elif '-q' in sys.argv:
level = logging.ERROR
else:
level = logging.WARN
logging.basicConfig(level=level)
# Get integer from command line.
n = 1
for arg in sys.argv[1:]:
if not arg.startswith('-'):
n = int(arg)
break
# Run the main loop.
scheduling.run(doit(n))
if __name__ == '__main__':
main()
|
|
649cc8dd7cce1728db54b35bfd9cf23cbfb268e9
|
seq2seq/decoders/rnn_decoder.py
|
seq2seq/decoders/rnn_decoder.py
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Base class for sequence decoders.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import tensorflow as tf
from tensorflow.python.util import nest
from seq2seq.graph_module import GraphModule
from seq2seq.contrib.seq2seq.decoder import Decoder, dynamic_decode
class DecoderOutput(namedtuple(
"DecoderOutput", ["logits", "predicted_ids", "cell_output"])):
"""Output of a decoder.
Note that we output both the logits and predictions because during
dynamic decoding the predictions may not correspond to max(logits).
For example, we may be sampling from the logits instead.
"""
pass
class RNNDecoder(GraphModule, Decoder):
"""Base class for RNN decoders.
Args:
cell: An instance of ` tf.contrib.rnn.RNNCell`
name: A name for this module
input_fn: A function that generates the next input, e.g. an
instance of `FixedDecoderInputs` or `DynamicDecoderInputs`.
"""
def __init__(self, cell, helper, initial_state, max_decode_length, name):
GraphModule.__init__(self, name)
self.cell = cell
self.max_decode_length = max_decode_length
self.helper = helper
self.initial_state = initial_state
@property
def batch_size(self):
return tf.shape(nest.flatten([self.initial_state])[0])[0]
def _build(self):
return dynamic_decode(
decoder=self,
output_time_major=True,
impute_finished=True,
maximum_iterations=self.max_decode_length)
|
Rename base decoder to RNN decoder
|
Rename base decoder to RNN decoder
|
Python
|
apache-2.0
|
liyi193328/seq2seq,kontact-chan/seq2seq,liyi193328/seq2seq,liyi193328/seq2seq,kontact-chan/seq2seq,google/seq2seq,chunfengh/seq2seq,shashankrajput/seq2seq,google/seq2seq,google/seq2seq,shashankrajput/seq2seq,chunfengh/seq2seq,kontact-chan/seq2seq,google/seq2seq,shashankrajput/seq2seq,chunfengh/seq2seq,liyi193328/seq2seq,liyi193328/seq2seq,shashankrajput/seq2seq,chunfengh/seq2seq,kontact-chan/seq2seq
|
Rename base decoder to RNN decoder
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Base class for sequence decoders.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import tensorflow as tf
from tensorflow.python.util import nest
from seq2seq.graph_module import GraphModule
from seq2seq.contrib.seq2seq.decoder import Decoder, dynamic_decode
class DecoderOutput(namedtuple(
"DecoderOutput", ["logits", "predicted_ids", "cell_output"])):
"""Output of a decoder.
Note that we output both the logits and predictions because during
dynamic decoding the predictions may not correspond to max(logits).
For example, we may be sampling from the logits instead.
"""
pass
class RNNDecoder(GraphModule, Decoder):
"""Base class for RNN decoders.
Args:
cell: An instance of ` tf.contrib.rnn.RNNCell`
name: A name for this module
input_fn: A function that generates the next input, e.g. an
instance of `FixedDecoderInputs` or `DynamicDecoderInputs`.
"""
def __init__(self, cell, helper, initial_state, max_decode_length, name):
GraphModule.__init__(self, name)
self.cell = cell
self.max_decode_length = max_decode_length
self.helper = helper
self.initial_state = initial_state
@property
def batch_size(self):
return tf.shape(nest.flatten([self.initial_state])[0])[0]
def _build(self):
return dynamic_decode(
decoder=self,
output_time_major=True,
impute_finished=True,
maximum_iterations=self.max_decode_length)
|
<commit_before><commit_msg>Rename base decoder to RNN decoder<commit_after>
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Base class for sequence decoders.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import tensorflow as tf
from tensorflow.python.util import nest
from seq2seq.graph_module import GraphModule
from seq2seq.contrib.seq2seq.decoder import Decoder, dynamic_decode
class DecoderOutput(namedtuple(
"DecoderOutput", ["logits", "predicted_ids", "cell_output"])):
"""Output of a decoder.
Note that we output both the logits and predictions because during
dynamic decoding the predictions may not correspond to max(logits).
For example, we may be sampling from the logits instead.
"""
pass
class RNNDecoder(GraphModule, Decoder):
"""Base class for RNN decoders.
Args:
cell: An instance of ` tf.contrib.rnn.RNNCell`
name: A name for this module
input_fn: A function that generates the next input, e.g. an
instance of `FixedDecoderInputs` or `DynamicDecoderInputs`.
"""
def __init__(self, cell, helper, initial_state, max_decode_length, name):
GraphModule.__init__(self, name)
self.cell = cell
self.max_decode_length = max_decode_length
self.helper = helper
self.initial_state = initial_state
@property
def batch_size(self):
return tf.shape(nest.flatten([self.initial_state])[0])[0]
def _build(self):
return dynamic_decode(
decoder=self,
output_time_major=True,
impute_finished=True,
maximum_iterations=self.max_decode_length)
|
Rename base decoder to RNN decoder# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Base class for sequence decoders.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import tensorflow as tf
from tensorflow.python.util import nest
from seq2seq.graph_module import GraphModule
from seq2seq.contrib.seq2seq.decoder import Decoder, dynamic_decode
class DecoderOutput(namedtuple(
"DecoderOutput", ["logits", "predicted_ids", "cell_output"])):
"""Output of a decoder.
Note that we output both the logits and predictions because during
dynamic decoding the predictions may not correspond to max(logits).
For example, we may be sampling from the logits instead.
"""
pass
class RNNDecoder(GraphModule, Decoder):
"""Base class for RNN decoders.
Args:
cell: An instance of ` tf.contrib.rnn.RNNCell`
name: A name for this module
input_fn: A function that generates the next input, e.g. an
instance of `FixedDecoderInputs` or `DynamicDecoderInputs`.
"""
def __init__(self, cell, helper, initial_state, max_decode_length, name):
GraphModule.__init__(self, name)
self.cell = cell
self.max_decode_length = max_decode_length
self.helper = helper
self.initial_state = initial_state
@property
def batch_size(self):
return tf.shape(nest.flatten([self.initial_state])[0])[0]
def _build(self):
return dynamic_decode(
decoder=self,
output_time_major=True,
impute_finished=True,
maximum_iterations=self.max_decode_length)
|
<commit_before><commit_msg>Rename base decoder to RNN decoder<commit_after># Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Base class for sequence decoders.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import tensorflow as tf
from tensorflow.python.util import nest
from seq2seq.graph_module import GraphModule
from seq2seq.contrib.seq2seq.decoder import Decoder, dynamic_decode
class DecoderOutput(namedtuple(
"DecoderOutput", ["logits", "predicted_ids", "cell_output"])):
"""Output of a decoder.
Note that we output both the logits and predictions because during
dynamic decoding the predictions may not correspond to max(logits).
For example, we may be sampling from the logits instead.
"""
pass
class RNNDecoder(GraphModule, Decoder):
"""Base class for RNN decoders.
Args:
cell: An instance of ` tf.contrib.rnn.RNNCell`
name: A name for this module
input_fn: A function that generates the next input, e.g. an
instance of `FixedDecoderInputs` or `DynamicDecoderInputs`.
"""
def __init__(self, cell, helper, initial_state, max_decode_length, name):
GraphModule.__init__(self, name)
self.cell = cell
self.max_decode_length = max_decode_length
self.helper = helper
self.initial_state = initial_state
@property
def batch_size(self):
return tf.shape(nest.flatten([self.initial_state])[0])[0]
def _build(self):
return dynamic_decode(
decoder=self,
output_time_major=True,
impute_finished=True,
maximum_iterations=self.max_decode_length)
|
|
3896cd4e3ea0aee0025dafef13d2f29fe168cf10
|
students/psbriant/final_project/test_clean_data.py
|
students/psbriant/final_project/test_clean_data.py
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data as cd
import pandas
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
def test_plot_zipcode():
"""
"""
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data as cd
import matplotlib.pyplot as plt
import pandas
import pytest
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
@pytest.mark.mpl_image_compare
def test_plot_zipcode():
"""
Test zipcode based wateruse graphs.
"""
data = get_data()
cleaned = cd.clean(data)
wateruse = cd.plot_zipcode(cleaned, "90012")
return wateruse
|
Create test for plot_zipcode to compare generated graphs.
|
Create test for plot_zipcode to compare generated graphs.
|
Python
|
unlicense
|
UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data as cd
import pandas
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
def test_plot_zipcode():
"""
"""
Create test for plot_zipcode to compare generated graphs.
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data as cd
import matplotlib.pyplot as plt
import pandas
import pytest
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
@pytest.mark.mpl_image_compare
def test_plot_zipcode():
"""
Test zipcode based wateruse graphs.
"""
data = get_data()
cleaned = cd.clean(data)
wateruse = cd.plot_zipcode(cleaned, "90012")
return wateruse
|
<commit_before>"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data as cd
import pandas
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
def test_plot_zipcode():
"""
"""
<commit_msg>Create test for plot_zipcode to compare generated graphs.<commit_after>
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data as cd
import matplotlib.pyplot as plt
import pandas
import pytest
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
@pytest.mark.mpl_image_compare
def test_plot_zipcode():
"""
Test zipcode based wateruse graphs.
"""
data = get_data()
cleaned = cd.clean(data)
wateruse = cd.plot_zipcode(cleaned, "90012")
return wateruse
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data as cd
import pandas
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
def test_plot_zipcode():
"""
"""
Create test for plot_zipcode to compare generated graphs."""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data as cd
import matplotlib.pyplot as plt
import pandas
import pytest
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
@pytest.mark.mpl_image_compare
def test_plot_zipcode():
"""
Test zipcode based wateruse graphs.
"""
data = get_data()
cleaned = cd.clean(data)
wateruse = cd.plot_zipcode(cleaned, "90012")
return wateruse
|
<commit_before>"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data as cd
import pandas
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
def test_plot_zipcode():
"""
"""
<commit_msg>Create test for plot_zipcode to compare generated graphs.<commit_after>"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data as cd
import matplotlib.pyplot as plt
import pandas
import pytest
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
@pytest.mark.mpl_image_compare
def test_plot_zipcode():
"""
Test zipcode based wateruse graphs.
"""
data = get_data()
cleaned = cd.clean(data)
wateruse = cd.plot_zipcode(cleaned, "90012")
return wateruse
|
3ebb14dfe971c135153a58edea76901a18ff6ca0
|
captura/test_views.py
|
captura/test_views.py
|
from django.core.urlresolvers import reverse
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from splinter import Browser
from django.contrib.auth.models import User
from perfiles_usuario.utils import CAPTURISTA_GROUP
from django.contrib.auth.models import Group
from estudios_socioeconomicos.models import Estudio
class TestViewsAdministracion(StaticLiveServerTestCase):
"""Integration test suite for testing the views in the app: administracion.
Test the urls for 'administracion' which make up the administration dashboard.
A user is created in order to test they are displayed.
Attributes
----------
browser : Browser
Driver to navigate through websites and to run integration tests.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
self.browser = Browser('chrome')
test_username = 'thelma'
test_password = 'junipero'
thelma = User.objects.create_user(
username=test_username, email='juan@pablo.com', password=test_password,
first_name='Thelma', last_name='Thelmapellido')
capturista = Group.objects.get_or_create(name=CAPTURISTA_GROUP)[0]
capturista.user_set.add(thelma)
capturista.save()
self.browser.visit(self.live_server_url + reverse('tosp_auth:login'))
self.browser.fill('username', test_username)
self.browser.fill('password', test_password)
self.browser.find_by_id('login-submit').click()
def tearDown(self):
"""At the end of tests, close the browser.
"""
self.browser.quit()
def test_main_dashboard(self):
"""Test for url 'captura:capturista_dashboard'.
Visit the url of name 'captura:capturista_dashboard' and check it loads the
content of the captura dashboard panel.
"""
test_url_name = 'captura:capturista_dashboard'
self.browser.visit(self.live_server_url + reverse(test_url_name))
# Check for nav_bar partial
self.assertTrue(self.browser.is_text_present('Instituto Juan Pablo'))
self.assertEqual(Estudio.objects.count(), 0)
# Check that the folling texts are present in the dashboard
self.assertTrue(self.browser.is_text_present('Mis estudios socioeconómicos'))
self.assertTrue(self.browser.is_text_present('Agregar estudio'))
|
Create test to dashboard capturista
|
Create test to dashboard capturista
|
Python
|
mit
|
erikiado/jp2_online,erikiado/jp2_online,erikiado/jp2_online
|
Create test to dashboard capturista
|
from django.core.urlresolvers import reverse
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from splinter import Browser
from django.contrib.auth.models import User
from perfiles_usuario.utils import CAPTURISTA_GROUP
from django.contrib.auth.models import Group
from estudios_socioeconomicos.models import Estudio
class TestViewsAdministracion(StaticLiveServerTestCase):
"""Integration test suite for testing the views in the app: administracion.
Test the urls for 'administracion' which make up the administration dashboard.
A user is created in order to test they are displayed.
Attributes
----------
browser : Browser
Driver to navigate through websites and to run integration tests.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
self.browser = Browser('chrome')
test_username = 'thelma'
test_password = 'junipero'
thelma = User.objects.create_user(
username=test_username, email='juan@pablo.com', password=test_password,
first_name='Thelma', last_name='Thelmapellido')
capturista = Group.objects.get_or_create(name=CAPTURISTA_GROUP)[0]
capturista.user_set.add(thelma)
capturista.save()
self.browser.visit(self.live_server_url + reverse('tosp_auth:login'))
self.browser.fill('username', test_username)
self.browser.fill('password', test_password)
self.browser.find_by_id('login-submit').click()
def tearDown(self):
"""At the end of tests, close the browser.
"""
self.browser.quit()
def test_main_dashboard(self):
"""Test for url 'captura:capturista_dashboard'.
Visit the url of name 'captura:capturista_dashboard' and check it loads the
content of the captura dashboard panel.
"""
test_url_name = 'captura:capturista_dashboard'
self.browser.visit(self.live_server_url + reverse(test_url_name))
# Check for nav_bar partial
self.assertTrue(self.browser.is_text_present('Instituto Juan Pablo'))
self.assertEqual(Estudio.objects.count(), 0)
# Check that the folling texts are present in the dashboard
self.assertTrue(self.browser.is_text_present('Mis estudios socioeconómicos'))
self.assertTrue(self.browser.is_text_present('Agregar estudio'))
|
<commit_before><commit_msg>Create test to dashboard capturista<commit_after>
|
from django.core.urlresolvers import reverse
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from splinter import Browser
from django.contrib.auth.models import User
from perfiles_usuario.utils import CAPTURISTA_GROUP
from django.contrib.auth.models import Group
from estudios_socioeconomicos.models import Estudio
class TestViewsAdministracion(StaticLiveServerTestCase):
"""Integration test suite for testing the views in the app: administracion.
Test the urls for 'administracion' which make up the administration dashboard.
A user is created in order to test they are displayed.
Attributes
----------
browser : Browser
Driver to navigate through websites and to run integration tests.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
self.browser = Browser('chrome')
test_username = 'thelma'
test_password = 'junipero'
thelma = User.objects.create_user(
username=test_username, email='juan@pablo.com', password=test_password,
first_name='Thelma', last_name='Thelmapellido')
capturista = Group.objects.get_or_create(name=CAPTURISTA_GROUP)[0]
capturista.user_set.add(thelma)
capturista.save()
self.browser.visit(self.live_server_url + reverse('tosp_auth:login'))
self.browser.fill('username', test_username)
self.browser.fill('password', test_password)
self.browser.find_by_id('login-submit').click()
def tearDown(self):
"""At the end of tests, close the browser.
"""
self.browser.quit()
def test_main_dashboard(self):
"""Test for url 'captura:capturista_dashboard'.
Visit the url of name 'captura:capturista_dashboard' and check it loads the
content of the captura dashboard panel.
"""
test_url_name = 'captura:capturista_dashboard'
self.browser.visit(self.live_server_url + reverse(test_url_name))
# Check for nav_bar partial
self.assertTrue(self.browser.is_text_present('Instituto Juan Pablo'))
self.assertEqual(Estudio.objects.count(), 0)
# Check that the folling texts are present in the dashboard
self.assertTrue(self.browser.is_text_present('Mis estudios socioeconómicos'))
self.assertTrue(self.browser.is_text_present('Agregar estudio'))
|
Create test to dashboard capturistafrom django.core.urlresolvers import reverse
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from splinter import Browser
from django.contrib.auth.models import User
from perfiles_usuario.utils import CAPTURISTA_GROUP
from django.contrib.auth.models import Group
from estudios_socioeconomicos.models import Estudio
class TestViewsAdministracion(StaticLiveServerTestCase):
"""Integration test suite for testing the views in the app: administracion.
Test the urls for 'administracion' which make up the administration dashboard.
A user is created in order to test they are displayed.
Attributes
----------
browser : Browser
Driver to navigate through websites and to run integration tests.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
self.browser = Browser('chrome')
test_username = 'thelma'
test_password = 'junipero'
thelma = User.objects.create_user(
username=test_username, email='juan@pablo.com', password=test_password,
first_name='Thelma', last_name='Thelmapellido')
capturista = Group.objects.get_or_create(name=CAPTURISTA_GROUP)[0]
capturista.user_set.add(thelma)
capturista.save()
self.browser.visit(self.live_server_url + reverse('tosp_auth:login'))
self.browser.fill('username', test_username)
self.browser.fill('password', test_password)
self.browser.find_by_id('login-submit').click()
def tearDown(self):
"""At the end of tests, close the browser.
"""
self.browser.quit()
def test_main_dashboard(self):
"""Test for url 'captura:capturista_dashboard'.
Visit the url of name 'captura:capturista_dashboard' and check it loads the
content of the captura dashboard panel.
"""
test_url_name = 'captura:capturista_dashboard'
self.browser.visit(self.live_server_url + reverse(test_url_name))
# Check for nav_bar partial
self.assertTrue(self.browser.is_text_present('Instituto Juan Pablo'))
self.assertEqual(Estudio.objects.count(), 0)
# Check that the folling texts are present in the dashboard
self.assertTrue(self.browser.is_text_present('Mis estudios socioeconómicos'))
self.assertTrue(self.browser.is_text_present('Agregar estudio'))
|
<commit_before><commit_msg>Create test to dashboard capturista<commit_after>from django.core.urlresolvers import reverse
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from splinter import Browser
from django.contrib.auth.models import User
from perfiles_usuario.utils import CAPTURISTA_GROUP
from django.contrib.auth.models import Group
from estudios_socioeconomicos.models import Estudio
class TestViewsAdministracion(StaticLiveServerTestCase):
"""Integration test suite for testing the views in the app: administracion.
Test the urls for 'administracion' which make up the administration dashboard.
A user is created in order to test they are displayed.
Attributes
----------
browser : Browser
Driver to navigate through websites and to run integration tests.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
self.browser = Browser('chrome')
test_username = 'thelma'
test_password = 'junipero'
thelma = User.objects.create_user(
username=test_username, email='juan@pablo.com', password=test_password,
first_name='Thelma', last_name='Thelmapellido')
capturista = Group.objects.get_or_create(name=CAPTURISTA_GROUP)[0]
capturista.user_set.add(thelma)
capturista.save()
self.browser.visit(self.live_server_url + reverse('tosp_auth:login'))
self.browser.fill('username', test_username)
self.browser.fill('password', test_password)
self.browser.find_by_id('login-submit').click()
def tearDown(self):
"""At the end of tests, close the browser.
"""
self.browser.quit()
def test_main_dashboard(self):
"""Test for url 'captura:capturista_dashboard'.
Visit the url of name 'captura:capturista_dashboard' and check it loads the
content of the captura dashboard panel.
"""
test_url_name = 'captura:capturista_dashboard'
self.browser.visit(self.live_server_url + reverse(test_url_name))
# Check for nav_bar partial
self.assertTrue(self.browser.is_text_present('Instituto Juan Pablo'))
self.assertEqual(Estudio.objects.count(), 0)
# Check that the folling texts are present in the dashboard
self.assertTrue(self.browser.is_text_present('Mis estudios socioeconómicos'))
self.assertTrue(self.browser.is_text_present('Agregar estudio'))
|
|
6764ae005630053da3b02659448cbde05c682ab9
|
tests/checker_tests.py
|
tests/checker_tests.py
|
from checkinchecker.checker import filter_matches
from unittest import TestCase
class CheckerTests(TestCase):
def test_match_filter_visulite(self):
possibilities = [
{"id": 0, "tags": {"name": "Taste of India"}},
{"id": 0, "tags": {"name": "Staunton"}},
{"id": 1, "tags": {"name": "Visulite Cinema"}},
]
venue_name = "Visulite Cinema - Downtown Staunton"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_iad_airport(self):
possibilities = [
{"id": 0, "tags": {"name": "Dunkin' Donuts"}},
{"id": 1, "tags": {"name": "Ronald Reagan Washington National Airport"}},
{"id": 0, "tags": {"name": "Police"}},
{"id": 0, "tags": {"name": "Faber News"}},
]
venue_name = "Ronald Reagan Washington National Airport (DCA)"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_apartment(self):
possibilities = [
{"id": 0, "tags": {"name": "Berean Baptist Church"}},
{"id": 0, "tags": {"name": "Church of Christ of Albina"}},
{"id": 0, "tags": {"name": "Community Church of God"}},
{"id": 1, "tags": {"name": "The Mason Williams"}},
]
venue_name = "The Mason Williams"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_neighborhood(self):
possibilities = [
{"id": 0, "tags": {"name": "Berean Baptist Church"}},
{"id": 0, "tags": {"name": "Church of Christ of Albina"}},
{"id": 0, "tags": {"name": "Community Church of God"}},
{"id": 1, "tags": {"name": "The Mason Williams"}},
]
venue_name = "Tanjong Pagar"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
|
Add tests for the matcher
|
Add tests for the matcher
|
Python
|
mit
|
osmlab/checkin-checker,osmlab/checkin-checker
|
Add tests for the matcher
|
from checkinchecker.checker import filter_matches
from unittest import TestCase
class CheckerTests(TestCase):
def test_match_filter_visulite(self):
possibilities = [
{"id": 0, "tags": {"name": "Taste of India"}},
{"id": 0, "tags": {"name": "Staunton"}},
{"id": 1, "tags": {"name": "Visulite Cinema"}},
]
venue_name = "Visulite Cinema - Downtown Staunton"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_iad_airport(self):
possibilities = [
{"id": 0, "tags": {"name": "Dunkin' Donuts"}},
{"id": 1, "tags": {"name": "Ronald Reagan Washington National Airport"}},
{"id": 0, "tags": {"name": "Police"}},
{"id": 0, "tags": {"name": "Faber News"}},
]
venue_name = "Ronald Reagan Washington National Airport (DCA)"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_apartment(self):
possibilities = [
{"id": 0, "tags": {"name": "Berean Baptist Church"}},
{"id": 0, "tags": {"name": "Church of Christ of Albina"}},
{"id": 0, "tags": {"name": "Community Church of God"}},
{"id": 1, "tags": {"name": "The Mason Williams"}},
]
venue_name = "The Mason Williams"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_neighborhood(self):
possibilities = [
{"id": 0, "tags": {"name": "Berean Baptist Church"}},
{"id": 0, "tags": {"name": "Church of Christ of Albina"}},
{"id": 0, "tags": {"name": "Community Church of God"}},
{"id": 1, "tags": {"name": "The Mason Williams"}},
]
venue_name = "Tanjong Pagar"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
|
<commit_before><commit_msg>Add tests for the matcher<commit_after>
|
from checkinchecker.checker import filter_matches
from unittest import TestCase
class CheckerTests(TestCase):
def test_match_filter_visulite(self):
possibilities = [
{"id": 0, "tags": {"name": "Taste of India"}},
{"id": 0, "tags": {"name": "Staunton"}},
{"id": 1, "tags": {"name": "Visulite Cinema"}},
]
venue_name = "Visulite Cinema - Downtown Staunton"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_iad_airport(self):
possibilities = [
{"id": 0, "tags": {"name": "Dunkin' Donuts"}},
{"id": 1, "tags": {"name": "Ronald Reagan Washington National Airport"}},
{"id": 0, "tags": {"name": "Police"}},
{"id": 0, "tags": {"name": "Faber News"}},
]
venue_name = "Ronald Reagan Washington National Airport (DCA)"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_apartment(self):
possibilities = [
{"id": 0, "tags": {"name": "Berean Baptist Church"}},
{"id": 0, "tags": {"name": "Church of Christ of Albina"}},
{"id": 0, "tags": {"name": "Community Church of God"}},
{"id": 1, "tags": {"name": "The Mason Williams"}},
]
venue_name = "The Mason Williams"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_neighborhood(self):
possibilities = [
{"id": 0, "tags": {"name": "Berean Baptist Church"}},
{"id": 0, "tags": {"name": "Church of Christ of Albina"}},
{"id": 0, "tags": {"name": "Community Church of God"}},
{"id": 1, "tags": {"name": "The Mason Williams"}},
]
venue_name = "Tanjong Pagar"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
|
Add tests for the matcherfrom checkinchecker.checker import filter_matches
from unittest import TestCase
class CheckerTests(TestCase):
def test_match_filter_visulite(self):
possibilities = [
{"id": 0, "tags": {"name": "Taste of India"}},
{"id": 0, "tags": {"name": "Staunton"}},
{"id": 1, "tags": {"name": "Visulite Cinema"}},
]
venue_name = "Visulite Cinema - Downtown Staunton"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_iad_airport(self):
possibilities = [
{"id": 0, "tags": {"name": "Dunkin' Donuts"}},
{"id": 1, "tags": {"name": "Ronald Reagan Washington National Airport"}},
{"id": 0, "tags": {"name": "Police"}},
{"id": 0, "tags": {"name": "Faber News"}},
]
venue_name = "Ronald Reagan Washington National Airport (DCA)"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_apartment(self):
possibilities = [
{"id": 0, "tags": {"name": "Berean Baptist Church"}},
{"id": 0, "tags": {"name": "Church of Christ of Albina"}},
{"id": 0, "tags": {"name": "Community Church of God"}},
{"id": 1, "tags": {"name": "The Mason Williams"}},
]
venue_name = "The Mason Williams"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_neighborhood(self):
possibilities = [
{"id": 0, "tags": {"name": "Berean Baptist Church"}},
{"id": 0, "tags": {"name": "Church of Christ of Albina"}},
{"id": 0, "tags": {"name": "Community Church of God"}},
{"id": 1, "tags": {"name": "The Mason Williams"}},
]
venue_name = "Tanjong Pagar"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
|
<commit_before><commit_msg>Add tests for the matcher<commit_after>from checkinchecker.checker import filter_matches
from unittest import TestCase
class CheckerTests(TestCase):
def test_match_filter_visulite(self):
possibilities = [
{"id": 0, "tags": {"name": "Taste of India"}},
{"id": 0, "tags": {"name": "Staunton"}},
{"id": 1, "tags": {"name": "Visulite Cinema"}},
]
venue_name = "Visulite Cinema - Downtown Staunton"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_iad_airport(self):
possibilities = [
{"id": 0, "tags": {"name": "Dunkin' Donuts"}},
{"id": 1, "tags": {"name": "Ronald Reagan Washington National Airport"}},
{"id": 0, "tags": {"name": "Police"}},
{"id": 0, "tags": {"name": "Faber News"}},
]
venue_name = "Ronald Reagan Washington National Airport (DCA)"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_apartment(self):
possibilities = [
{"id": 0, "tags": {"name": "Berean Baptist Church"}},
{"id": 0, "tags": {"name": "Church of Christ of Albina"}},
{"id": 0, "tags": {"name": "Community Church of God"}},
{"id": 1, "tags": {"name": "The Mason Williams"}},
]
venue_name = "The Mason Williams"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
def test_match_filter_neighborhood(self):
possibilities = [
{"id": 0, "tags": {"name": "Berean Baptist Church"}},
{"id": 0, "tags": {"name": "Church of Christ of Albina"}},
{"id": 0, "tags": {"name": "Community Church of God"}},
{"id": 1, "tags": {"name": "The Mason Williams"}},
]
venue_name = "Tanjong Pagar"
matches = filter_matches(venue_name, possibilities)
self.assertEqual(1, len(matches))
self.assertEqual(1, matches[0][1]['id'])
|
|
907d96948c80a2bc5396eb73e679b6cd30a0d895
|
testing/test_directory_deletion.py
|
testing/test_directory_deletion.py
|
import sys
import os
sys.path.insert(0, os.path.realpath('.'))
import pytest
import string
import random
import create_movie as c
@pytest.fixture
def tmp_dirname():
return ''.join(random.choice(
string.ascii_uppercase + string.ascii_lowercase)
for _ in range(10))
@pytest.fixture
def dir_path(tmp_dirname, tmpdir):
p = str(tmpdir.join(tmp_dirname))
assert not os.path.isdir(p)
return p
def test_directory_deletion_with_temp_dir():
with c.temporary_directory() as tdir:
assert os.path.isdir(tdir)
assert not os.path.isdir(tdir)
def test_directory_not_deleted_when_requested():
with c.temporary_directory(delete=False) as tdir:
assert os.path.isdir(tdir)
assert os.path.isdir(tdir)
def test_directory_created_with_name_without_deletion(dir_path):
with c.temporary_directory(images_dir=dir_path, delete=False) as tdir:
assert os.path.isdir(tdir) and 'test' in tdir
assert os.path.isdir(tdir)
def test_directory_created_with_name_with_deletion(dir_path, tmp_dirname):
with c.temporary_directory(images_dir=dir_path, delete=True) as tdir:
assert os.path.isdir(tdir) and os.path.split(tdir)[-1] == tmp_dirname
assert os.path.isdir(tdir)
def test_ensure_dir(dir_path):
c.ensure_dir(dir_path)
assert os.path.isdir(dir_path)
|
Add tests for temporary directory and ensure_dir
|
Add tests for temporary directory and ensure_dir
|
Python
|
mit
|
NGTS/frame-movies,NGTS/frame-movies
|
Add tests for temporary directory and ensure_dir
|
import sys
import os
sys.path.insert(0, os.path.realpath('.'))
import pytest
import string
import random
import create_movie as c
@pytest.fixture
def tmp_dirname():
return ''.join(random.choice(
string.ascii_uppercase + string.ascii_lowercase)
for _ in range(10))
@pytest.fixture
def dir_path(tmp_dirname, tmpdir):
p = str(tmpdir.join(tmp_dirname))
assert not os.path.isdir(p)
return p
def test_directory_deletion_with_temp_dir():
with c.temporary_directory() as tdir:
assert os.path.isdir(tdir)
assert not os.path.isdir(tdir)
def test_directory_not_deleted_when_requested():
with c.temporary_directory(delete=False) as tdir:
assert os.path.isdir(tdir)
assert os.path.isdir(tdir)
def test_directory_created_with_name_without_deletion(dir_path):
with c.temporary_directory(images_dir=dir_path, delete=False) as tdir:
assert os.path.isdir(tdir) and 'test' in tdir
assert os.path.isdir(tdir)
def test_directory_created_with_name_with_deletion(dir_path, tmp_dirname):
with c.temporary_directory(images_dir=dir_path, delete=True) as tdir:
assert os.path.isdir(tdir) and os.path.split(tdir)[-1] == tmp_dirname
assert os.path.isdir(tdir)
def test_ensure_dir(dir_path):
c.ensure_dir(dir_path)
assert os.path.isdir(dir_path)
|
<commit_before><commit_msg>Add tests for temporary directory and ensure_dir<commit_after>
|
import sys
import os
sys.path.insert(0, os.path.realpath('.'))
import pytest
import string
import random
import create_movie as c
@pytest.fixture
def tmp_dirname():
return ''.join(random.choice(
string.ascii_uppercase + string.ascii_lowercase)
for _ in range(10))
@pytest.fixture
def dir_path(tmp_dirname, tmpdir):
p = str(tmpdir.join(tmp_dirname))
assert not os.path.isdir(p)
return p
def test_directory_deletion_with_temp_dir():
with c.temporary_directory() as tdir:
assert os.path.isdir(tdir)
assert not os.path.isdir(tdir)
def test_directory_not_deleted_when_requested():
with c.temporary_directory(delete=False) as tdir:
assert os.path.isdir(tdir)
assert os.path.isdir(tdir)
def test_directory_created_with_name_without_deletion(dir_path):
with c.temporary_directory(images_dir=dir_path, delete=False) as tdir:
assert os.path.isdir(tdir) and 'test' in tdir
assert os.path.isdir(tdir)
def test_directory_created_with_name_with_deletion(dir_path, tmp_dirname):
with c.temporary_directory(images_dir=dir_path, delete=True) as tdir:
assert os.path.isdir(tdir) and os.path.split(tdir)[-1] == tmp_dirname
assert os.path.isdir(tdir)
def test_ensure_dir(dir_path):
c.ensure_dir(dir_path)
assert os.path.isdir(dir_path)
|
Add tests for temporary directory and ensure_dirimport sys
import os
sys.path.insert(0, os.path.realpath('.'))
import pytest
import string
import random
import create_movie as c
@pytest.fixture
def tmp_dirname():
return ''.join(random.choice(
string.ascii_uppercase + string.ascii_lowercase)
for _ in range(10))
@pytest.fixture
def dir_path(tmp_dirname, tmpdir):
p = str(tmpdir.join(tmp_dirname))
assert not os.path.isdir(p)
return p
def test_directory_deletion_with_temp_dir():
with c.temporary_directory() as tdir:
assert os.path.isdir(tdir)
assert not os.path.isdir(tdir)
def test_directory_not_deleted_when_requested():
with c.temporary_directory(delete=False) as tdir:
assert os.path.isdir(tdir)
assert os.path.isdir(tdir)
def test_directory_created_with_name_without_deletion(dir_path):
with c.temporary_directory(images_dir=dir_path, delete=False) as tdir:
assert os.path.isdir(tdir) and 'test' in tdir
assert os.path.isdir(tdir)
def test_directory_created_with_name_with_deletion(dir_path, tmp_dirname):
with c.temporary_directory(images_dir=dir_path, delete=True) as tdir:
assert os.path.isdir(tdir) and os.path.split(tdir)[-1] == tmp_dirname
assert os.path.isdir(tdir)
def test_ensure_dir(dir_path):
c.ensure_dir(dir_path)
assert os.path.isdir(dir_path)
|
<commit_before><commit_msg>Add tests for temporary directory and ensure_dir<commit_after>import sys
import os
sys.path.insert(0, os.path.realpath('.'))
import pytest
import string
import random
import create_movie as c
@pytest.fixture
def tmp_dirname():
return ''.join(random.choice(
string.ascii_uppercase + string.ascii_lowercase)
for _ in range(10))
@pytest.fixture
def dir_path(tmp_dirname, tmpdir):
p = str(tmpdir.join(tmp_dirname))
assert not os.path.isdir(p)
return p
def test_directory_deletion_with_temp_dir():
with c.temporary_directory() as tdir:
assert os.path.isdir(tdir)
assert not os.path.isdir(tdir)
def test_directory_not_deleted_when_requested():
with c.temporary_directory(delete=False) as tdir:
assert os.path.isdir(tdir)
assert os.path.isdir(tdir)
def test_directory_created_with_name_without_deletion(dir_path):
with c.temporary_directory(images_dir=dir_path, delete=False) as tdir:
assert os.path.isdir(tdir) and 'test' in tdir
assert os.path.isdir(tdir)
def test_directory_created_with_name_with_deletion(dir_path, tmp_dirname):
with c.temporary_directory(images_dir=dir_path, delete=True) as tdir:
assert os.path.isdir(tdir) and os.path.split(tdir)[-1] == tmp_dirname
assert os.path.isdir(tdir)
def test_ensure_dir(dir_path):
c.ensure_dir(dir_path)
assert os.path.isdir(dir_path)
|
|
798f660859fd802bcac54b5dba3fe77cc053baf9
|
website/utils/forms.py
|
website/utils/forms.py
|
from django.forms import models
from django.forms.fields import ChoiceField
class AdvancedModelChoiceIterator(models.ModelChoiceIterator):
def choice(self, obj):
return (
self.field.prepare_value(obj),
self.field.label_from_instance(obj),
obj
)
class AdvancedModelChoiceField(models.ModelChoiceField):
def _get_choices(self):
if hasattr(self, '_choices'):
return self._choices
return AdvancedModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
class AdvancedModelMultipleChoiceField(models.ModelMultipleChoiceField):
def _get_choices(self):
if hasattr(self, '_choices'):
return self._choices
return AdvancedModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
|
Add ModelChoiceField util to use actual objects
|
Add ModelChoiceField util to use actual objects
|
Python
|
agpl-3.0
|
Dekker1/moore,UTNkar/moore,UTNkar/moore,UTNkar/moore,Dekker1/moore,Dekker1/moore,UTNkar/moore,Dekker1/moore
|
Add ModelChoiceField util to use actual objects
|
from django.forms import models
from django.forms.fields import ChoiceField
class AdvancedModelChoiceIterator(models.ModelChoiceIterator):
def choice(self, obj):
return (
self.field.prepare_value(obj),
self.field.label_from_instance(obj),
obj
)
class AdvancedModelChoiceField(models.ModelChoiceField):
def _get_choices(self):
if hasattr(self, '_choices'):
return self._choices
return AdvancedModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
class AdvancedModelMultipleChoiceField(models.ModelMultipleChoiceField):
def _get_choices(self):
if hasattr(self, '_choices'):
return self._choices
return AdvancedModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
|
<commit_before><commit_msg>Add ModelChoiceField util to use actual objects<commit_after>
|
from django.forms import models
from django.forms.fields import ChoiceField
class AdvancedModelChoiceIterator(models.ModelChoiceIterator):
def choice(self, obj):
return (
self.field.prepare_value(obj),
self.field.label_from_instance(obj),
obj
)
class AdvancedModelChoiceField(models.ModelChoiceField):
def _get_choices(self):
if hasattr(self, '_choices'):
return self._choices
return AdvancedModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
class AdvancedModelMultipleChoiceField(models.ModelMultipleChoiceField):
def _get_choices(self):
if hasattr(self, '_choices'):
return self._choices
return AdvancedModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
|
Add ModelChoiceField util to use actual objectsfrom django.forms import models
from django.forms.fields import ChoiceField
class AdvancedModelChoiceIterator(models.ModelChoiceIterator):
def choice(self, obj):
return (
self.field.prepare_value(obj),
self.field.label_from_instance(obj),
obj
)
class AdvancedModelChoiceField(models.ModelChoiceField):
def _get_choices(self):
if hasattr(self, '_choices'):
return self._choices
return AdvancedModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
class AdvancedModelMultipleChoiceField(models.ModelMultipleChoiceField):
def _get_choices(self):
if hasattr(self, '_choices'):
return self._choices
return AdvancedModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
|
<commit_before><commit_msg>Add ModelChoiceField util to use actual objects<commit_after>from django.forms import models
from django.forms.fields import ChoiceField
class AdvancedModelChoiceIterator(models.ModelChoiceIterator):
def choice(self, obj):
return (
self.field.prepare_value(obj),
self.field.label_from_instance(obj),
obj
)
class AdvancedModelChoiceField(models.ModelChoiceField):
def _get_choices(self):
if hasattr(self, '_choices'):
return self._choices
return AdvancedModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
class AdvancedModelMultipleChoiceField(models.ModelMultipleChoiceField):
def _get_choices(self):
if hasattr(self, '_choices'):
return self._choices
return AdvancedModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
|
|
0ba0a96aaa05f0b71d4f7028145211308d886e2a
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import simiki
entry_points = {
"console_scripts": [
"simiki = simiki.cli:main",
]
}
requires = open("requirements.txt").readlines()
setup(
name = "simiki",
version = simiki.__version__,
url = "https://github.com/tankywoo/simiki",
author = "Tanky Woo",
author_email = "me@tankywoo.com",
description = "Simiki is a simple wiki framework, written in Python.",
keywords = "simiki, wiki, generator",
license = "MIT License",
packages = find_packages(),
include_package_data=True,
install_requires = requires,
entry_points = entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
],
)
|
#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
import simiki
entry_points = {
"console_scripts": [
"simiki = simiki.cli:main",
]
}
with open("requirements.txt") as f:
requires = f.readlines()
setup(
name = "simiki",
version = simiki.__version__,
url = "https://github.com/tankywoo/simiki",
author = "Tanky Woo",
author_email = "me@tankywoo.com",
description = "Simiki is a simple wiki framework, written in Python.",
keywords = "simiki, wiki, generator",
license = "MIT License",
packages = find_packages(),
include_package_data=True,
install_requires = requires,
entry_points = entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
],
)
|
Use with statement to ensure the file is closed by Python
|
Use with statement to ensure the file is closed by Python
|
Python
|
mit
|
zhaochunqi/simiki,9p0le/simiki,tankywoo/simiki,zhaochunqi/simiki,tankywoo/simiki,9p0le/simiki,9p0le/simiki,zhaochunqi/simiki,tankywoo/simiki
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import simiki
entry_points = {
"console_scripts": [
"simiki = simiki.cli:main",
]
}
requires = open("requirements.txt").readlines()
setup(
name = "simiki",
version = simiki.__version__,
url = "https://github.com/tankywoo/simiki",
author = "Tanky Woo",
author_email = "me@tankywoo.com",
description = "Simiki is a simple wiki framework, written in Python.",
keywords = "simiki, wiki, generator",
license = "MIT License",
packages = find_packages(),
include_package_data=True,
install_requires = requires,
entry_points = entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
],
)
Use with statement to ensure the file is closed by Python
|
#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
import simiki
entry_points = {
"console_scripts": [
"simiki = simiki.cli:main",
]
}
with open("requirements.txt") as f:
requires = f.readlines()
setup(
name = "simiki",
version = simiki.__version__,
url = "https://github.com/tankywoo/simiki",
author = "Tanky Woo",
author_email = "me@tankywoo.com",
description = "Simiki is a simple wiki framework, written in Python.",
keywords = "simiki, wiki, generator",
license = "MIT License",
packages = find_packages(),
include_package_data=True,
install_requires = requires,
entry_points = entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
import simiki
entry_points = {
"console_scripts": [
"simiki = simiki.cli:main",
]
}
requires = open("requirements.txt").readlines()
setup(
name = "simiki",
version = simiki.__version__,
url = "https://github.com/tankywoo/simiki",
author = "Tanky Woo",
author_email = "me@tankywoo.com",
description = "Simiki is a simple wiki framework, written in Python.",
keywords = "simiki, wiki, generator",
license = "MIT License",
packages = find_packages(),
include_package_data=True,
install_requires = requires,
entry_points = entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
],
)
<commit_msg>Use with statement to ensure the file is closed by Python<commit_after>
|
#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
import simiki
entry_points = {
"console_scripts": [
"simiki = simiki.cli:main",
]
}
with open("requirements.txt") as f:
requires = f.readlines()
setup(
name = "simiki",
version = simiki.__version__,
url = "https://github.com/tankywoo/simiki",
author = "Tanky Woo",
author_email = "me@tankywoo.com",
description = "Simiki is a simple wiki framework, written in Python.",
keywords = "simiki, wiki, generator",
license = "MIT License",
packages = find_packages(),
include_package_data=True,
install_requires = requires,
entry_points = entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
],
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import simiki
entry_points = {
"console_scripts": [
"simiki = simiki.cli:main",
]
}
requires = open("requirements.txt").readlines()
setup(
name = "simiki",
version = simiki.__version__,
url = "https://github.com/tankywoo/simiki",
author = "Tanky Woo",
author_email = "me@tankywoo.com",
description = "Simiki is a simple wiki framework, written in Python.",
keywords = "simiki, wiki, generator",
license = "MIT License",
packages = find_packages(),
include_package_data=True,
install_requires = requires,
entry_points = entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
],
)
Use with statement to ensure the file is closed by Python#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
import simiki
entry_points = {
"console_scripts": [
"simiki = simiki.cli:main",
]
}
with open("requirements.txt") as f:
requires = f.readlines()
setup(
name = "simiki",
version = simiki.__version__,
url = "https://github.com/tankywoo/simiki",
author = "Tanky Woo",
author_email = "me@tankywoo.com",
description = "Simiki is a simple wiki framework, written in Python.",
keywords = "simiki, wiki, generator",
license = "MIT License",
packages = find_packages(),
include_package_data=True,
install_requires = requires,
entry_points = entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
import simiki
entry_points = {
"console_scripts": [
"simiki = simiki.cli:main",
]
}
requires = open("requirements.txt").readlines()
setup(
name = "simiki",
version = simiki.__version__,
url = "https://github.com/tankywoo/simiki",
author = "Tanky Woo",
author_email = "me@tankywoo.com",
description = "Simiki is a simple wiki framework, written in Python.",
keywords = "simiki, wiki, generator",
license = "MIT License",
packages = find_packages(),
include_package_data=True,
install_requires = requires,
entry_points = entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
],
)
<commit_msg>Use with statement to ensure the file is closed by Python<commit_after>#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
import simiki
entry_points = {
"console_scripts": [
"simiki = simiki.cli:main",
]
}
with open("requirements.txt") as f:
requires = f.readlines()
setup(
name = "simiki",
version = simiki.__version__,
url = "https://github.com/tankywoo/simiki",
author = "Tanky Woo",
author_email = "me@tankywoo.com",
description = "Simiki is a simple wiki framework, written in Python.",
keywords = "simiki, wiki, generator",
license = "MIT License",
packages = find_packages(),
include_package_data=True,
install_requires = requires,
entry_points = entry_points,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
],
)
|
9ae72acc792fe4c1c246256c8da8070969cc246f
|
tests/test_types.py
|
tests/test_types.py
|
"""tests/test_types.py.
Tests the type validators included with hug
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
import hug
from datetime import datetime
def test_number():
hug.types.number('1') == 1
hug.types.number(1) == 1
with pytest.raises(ValueError):
hug.types.number('bacon')
def test_list():
hug.types.list('value') == ['value']
hug.types.list(['value1', 'value2']) == ['value1', 'value2']
def test_comma_separated_list():
hug.types.comma_separated_list('value') == ['value']
hug.types.comma_separated_list('value1,value2') == ['value1', 'value2']
def test_decimal():
hug.types.decimal('1.1') == 1.1
hug.types.decimal('1') == float(1)
hug.types.decimal(1.1) == 1.1
with pytest.raises(ValueError):
hug.types.decimal('bacon')
def test_text():
hug.types.text('1') == '1'
hug.types.text(1) == '1'
hug.types.text('text') == 'text'
def test_inline_dictionary():
hug.types.inline_dictionary('1:2') == {'1': '2'}
hug.types.inline_dictionary('1:2|3:4') == {'1': '2', '3': '4'}
with pytest.raises(ValueError):
hug.types.inline_dictionary('1')
|
Add tests for all hug types
|
Add tests for all hug types
|
Python
|
mit
|
philiptzou/hug,janusnic/hug,yasoob/hug,yasoob/hug,giserh/hug,MuhammadAlkarouri/hug,timothycrosley/hug,shaunstanislaus/hug,STANAPO/hug,gbn972/hug,shaunstanislaus/hug,timothycrosley/hug,origingod/hug,alisaifee/hug,STANAPO/hug,philiptzou/hug,origingod/hug,janusnic/hug,jean/hug,gbn972/hug,timothycrosley/hug,giserh/hug,jean/hug,MuhammadAlkarouri/hug,MuhammadAlkarouri/hug,alisaifee/hug
|
Add tests for all hug types
|
"""tests/test_types.py.
Tests the type validators included with hug
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
import hug
from datetime import datetime
def test_number():
hug.types.number('1') == 1
hug.types.number(1) == 1
with pytest.raises(ValueError):
hug.types.number('bacon')
def test_list():
hug.types.list('value') == ['value']
hug.types.list(['value1', 'value2']) == ['value1', 'value2']
def test_comma_separated_list():
hug.types.comma_separated_list('value') == ['value']
hug.types.comma_separated_list('value1,value2') == ['value1', 'value2']
def test_decimal():
hug.types.decimal('1.1') == 1.1
hug.types.decimal('1') == float(1)
hug.types.decimal(1.1) == 1.1
with pytest.raises(ValueError):
hug.types.decimal('bacon')
def test_text():
hug.types.text('1') == '1'
hug.types.text(1) == '1'
hug.types.text('text') == 'text'
def test_inline_dictionary():
hug.types.inline_dictionary('1:2') == {'1': '2'}
hug.types.inline_dictionary('1:2|3:4') == {'1': '2', '3': '4'}
with pytest.raises(ValueError):
hug.types.inline_dictionary('1')
|
<commit_before><commit_msg>Add tests for all hug types<commit_after>
|
"""tests/test_types.py.
Tests the type validators included with hug
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
import hug
from datetime import datetime
def test_number():
hug.types.number('1') == 1
hug.types.number(1) == 1
with pytest.raises(ValueError):
hug.types.number('bacon')
def test_list():
hug.types.list('value') == ['value']
hug.types.list(['value1', 'value2']) == ['value1', 'value2']
def test_comma_separated_list():
hug.types.comma_separated_list('value') == ['value']
hug.types.comma_separated_list('value1,value2') == ['value1', 'value2']
def test_decimal():
hug.types.decimal('1.1') == 1.1
hug.types.decimal('1') == float(1)
hug.types.decimal(1.1) == 1.1
with pytest.raises(ValueError):
hug.types.decimal('bacon')
def test_text():
hug.types.text('1') == '1'
hug.types.text(1) == '1'
hug.types.text('text') == 'text'
def test_inline_dictionary():
hug.types.inline_dictionary('1:2') == {'1': '2'}
hug.types.inline_dictionary('1:2|3:4') == {'1': '2', '3': '4'}
with pytest.raises(ValueError):
hug.types.inline_dictionary('1')
|
Add tests for all hug types"""tests/test_types.py.
Tests the type validators included with hug
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
import hug
from datetime import datetime
def test_number():
hug.types.number('1') == 1
hug.types.number(1) == 1
with pytest.raises(ValueError):
hug.types.number('bacon')
def test_list():
hug.types.list('value') == ['value']
hug.types.list(['value1', 'value2']) == ['value1', 'value2']
def test_comma_separated_list():
hug.types.comma_separated_list('value') == ['value']
hug.types.comma_separated_list('value1,value2') == ['value1', 'value2']
def test_decimal():
hug.types.decimal('1.1') == 1.1
hug.types.decimal('1') == float(1)
hug.types.decimal(1.1) == 1.1
with pytest.raises(ValueError):
hug.types.decimal('bacon')
def test_text():
hug.types.text('1') == '1'
hug.types.text(1) == '1'
hug.types.text('text') == 'text'
def test_inline_dictionary():
hug.types.inline_dictionary('1:2') == {'1': '2'}
hug.types.inline_dictionary('1:2|3:4') == {'1': '2', '3': '4'}
with pytest.raises(ValueError):
hug.types.inline_dictionary('1')
|
<commit_before><commit_msg>Add tests for all hug types<commit_after>"""tests/test_types.py.
Tests the type validators included with hug
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
import hug
from datetime import datetime
def test_number():
hug.types.number('1') == 1
hug.types.number(1) == 1
with pytest.raises(ValueError):
hug.types.number('bacon')
def test_list():
hug.types.list('value') == ['value']
hug.types.list(['value1', 'value2']) == ['value1', 'value2']
def test_comma_separated_list():
hug.types.comma_separated_list('value') == ['value']
hug.types.comma_separated_list('value1,value2') == ['value1', 'value2']
def test_decimal():
hug.types.decimal('1.1') == 1.1
hug.types.decimal('1') == float(1)
hug.types.decimal(1.1) == 1.1
with pytest.raises(ValueError):
hug.types.decimal('bacon')
def test_text():
hug.types.text('1') == '1'
hug.types.text(1) == '1'
hug.types.text('text') == 'text'
def test_inline_dictionary():
hug.types.inline_dictionary('1:2') == {'1': '2'}
hug.types.inline_dictionary('1:2|3:4') == {'1': '2', '3': '4'}
with pytest.raises(ValueError):
hug.types.inline_dictionary('1')
|
|
dfdfe3d713fa55825f3ec4b3a9f19b923963e558
|
py/beautiful-arrangement.py
|
py/beautiful-arrangement.py
|
class Solution(object):
def countArrangement(self, N):
"""
:type N: int
:rtype: int
"""
table = dict()
def dfs(cur, s):
fs = frozenset(s)
if (cur, fs) not in table:
if not s:
table[cur, fs] = 1
else:
ans = 0
for n in s:
if n % cur == 0 or cur % n == 0:
s.remove(n)
ans += dfs(cur + 1, s)
s.add(n)
table[cur, fs] = ans
return table[cur, fs]
return dfs(1, set(xrange(1, N + 1)))
|
Add py solution for 526. Beautiful Arrangement
|
Add py solution for 526. Beautiful Arrangement
526. Beautiful Arrangement: https://leetcode.com/problems/beautiful-arrangement/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 526. Beautiful Arrangement
526. Beautiful Arrangement: https://leetcode.com/problems/beautiful-arrangement/
|
class Solution(object):
def countArrangement(self, N):
"""
:type N: int
:rtype: int
"""
table = dict()
def dfs(cur, s):
fs = frozenset(s)
if (cur, fs) not in table:
if not s:
table[cur, fs] = 1
else:
ans = 0
for n in s:
if n % cur == 0 or cur % n == 0:
s.remove(n)
ans += dfs(cur + 1, s)
s.add(n)
table[cur, fs] = ans
return table[cur, fs]
return dfs(1, set(xrange(1, N + 1)))
|
<commit_before><commit_msg>Add py solution for 526. Beautiful Arrangement
526. Beautiful Arrangement: https://leetcode.com/problems/beautiful-arrangement/<commit_after>
|
class Solution(object):
def countArrangement(self, N):
"""
:type N: int
:rtype: int
"""
table = dict()
def dfs(cur, s):
fs = frozenset(s)
if (cur, fs) not in table:
if not s:
table[cur, fs] = 1
else:
ans = 0
for n in s:
if n % cur == 0 or cur % n == 0:
s.remove(n)
ans += dfs(cur + 1, s)
s.add(n)
table[cur, fs] = ans
return table[cur, fs]
return dfs(1, set(xrange(1, N + 1)))
|
Add py solution for 526. Beautiful Arrangement
526. Beautiful Arrangement: https://leetcode.com/problems/beautiful-arrangement/class Solution(object):
def countArrangement(self, N):
"""
:type N: int
:rtype: int
"""
table = dict()
def dfs(cur, s):
fs = frozenset(s)
if (cur, fs) not in table:
if not s:
table[cur, fs] = 1
else:
ans = 0
for n in s:
if n % cur == 0 or cur % n == 0:
s.remove(n)
ans += dfs(cur + 1, s)
s.add(n)
table[cur, fs] = ans
return table[cur, fs]
return dfs(1, set(xrange(1, N + 1)))
|
<commit_before><commit_msg>Add py solution for 526. Beautiful Arrangement
526. Beautiful Arrangement: https://leetcode.com/problems/beautiful-arrangement/<commit_after>class Solution(object):
def countArrangement(self, N):
"""
:type N: int
:rtype: int
"""
table = dict()
def dfs(cur, s):
fs = frozenset(s)
if (cur, fs) not in table:
if not s:
table[cur, fs] = 1
else:
ans = 0
for n in s:
if n % cur == 0 or cur % n == 0:
s.remove(n)
ans += dfs(cur + 1, s)
s.add(n)
table[cur, fs] = ans
return table[cur, fs]
return dfs(1, set(xrange(1, N + 1)))
|
|
a7a290d7230715eb58c29f12ad54477da5fe2209
|
chapter03/secondsAndHours.py
|
chapter03/secondsAndHours.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def toSeconds(h, m, s):
return h * 3600 + m * 60 + s
def toHoursMinutesAndSeconds(s):
hours = s / 3600
minutes = s % 3600 / 60
seconds = s % 3600 % 60
return (hours, minutes, seconds)
print "10h 30m and 50s are {0} seconds".format(toSeconds(10, 30, 50))
print toHoursMinutesAndSeconds(37850)
|
Add seconds and hours exercise
|
Add seconds and hours exercise
|
Python
|
apache-2.0
|
MindCookin/python-exercises
|
Add seconds and hours exercise
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def toSeconds(h, m, s):
return h * 3600 + m * 60 + s
def toHoursMinutesAndSeconds(s):
hours = s / 3600
minutes = s % 3600 / 60
seconds = s % 3600 % 60
return (hours, minutes, seconds)
print "10h 30m and 50s are {0} seconds".format(toSeconds(10, 30, 50))
print toHoursMinutesAndSeconds(37850)
|
<commit_before><commit_msg>Add seconds and hours exercise<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def toSeconds(h, m, s):
return h * 3600 + m * 60 + s
def toHoursMinutesAndSeconds(s):
hours = s / 3600
minutes = s % 3600 / 60
seconds = s % 3600 % 60
return (hours, minutes, seconds)
print "10h 30m and 50s are {0} seconds".format(toSeconds(10, 30, 50))
print toHoursMinutesAndSeconds(37850)
|
Add seconds and hours exercise#!/usr/bin/env python
# -*- coding: utf-8 -*-
def toSeconds(h, m, s):
return h * 3600 + m * 60 + s
def toHoursMinutesAndSeconds(s):
hours = s / 3600
minutes = s % 3600 / 60
seconds = s % 3600 % 60
return (hours, minutes, seconds)
print "10h 30m and 50s are {0} seconds".format(toSeconds(10, 30, 50))
print toHoursMinutesAndSeconds(37850)
|
<commit_before><commit_msg>Add seconds and hours exercise<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
def toSeconds(h, m, s):
return h * 3600 + m * 60 + s
def toHoursMinutesAndSeconds(s):
hours = s / 3600
minutes = s % 3600 / 60
seconds = s % 3600 % 60
return (hours, minutes, seconds)
print "10h 30m and 50s are {0} seconds".format(toSeconds(10, 30, 50))
print toHoursMinutesAndSeconds(37850)
|
|
299f6f669afb1cac23090caa7df1b8c2501b84fa
|
test/test_loader.py
|
test/test_loader.py
|
import dirq
import mock
import os
import shutil
import tempfile
import unittest
import apel.db.loader
schema = {"body": "string", "signer": "string",
"empaid": "string?", "error": "string?"}
class LoaderTest(unittest.TestCase):
def setUp(self):
self.queue_path = tempfile.mkdtemp()
mock.patch('apel.db.ApelDb').start()
in_q = dirq.Queue(os.path.join(self.queue_path, 'incoming'),
schema=schema)
in_q.add({"body": "test body", "signer": "test signer",
"empaid": "", "error": ""})
self.loader = apel.db.loader.Loader(self.queue_path, False, 'mysql',
'host', 1234, 'db', 'user', 'pwd',
'somefile')
def test_basic_load_all(self):
"""Check that load_all_msgs runs without problems."""
self.loader.load_all_msgs()
def tearDown(self):
shutil.rmtree(self.queue_path)
mock.patch.stopall()
if __name__ == '__main__':
unittest.main()
|
Add basic test for load_all_msgs
|
Add basic test for load_all_msgs
|
Python
|
apache-2.0
|
stfc/apel,apel/apel,tofu-rocketry/apel,stfc/apel,tofu-rocketry/apel,apel/apel
|
Add basic test for load_all_msgs
|
import dirq
import mock
import os
import shutil
import tempfile
import unittest
import apel.db.loader
schema = {"body": "string", "signer": "string",
"empaid": "string?", "error": "string?"}
class LoaderTest(unittest.TestCase):
def setUp(self):
self.queue_path = tempfile.mkdtemp()
mock.patch('apel.db.ApelDb').start()
in_q = dirq.Queue(os.path.join(self.queue_path, 'incoming'),
schema=schema)
in_q.add({"body": "test body", "signer": "test signer",
"empaid": "", "error": ""})
self.loader = apel.db.loader.Loader(self.queue_path, False, 'mysql',
'host', 1234, 'db', 'user', 'pwd',
'somefile')
def test_basic_load_all(self):
"""Check that load_all_msgs runs without problems."""
self.loader.load_all_msgs()
def tearDown(self):
shutil.rmtree(self.queue_path)
mock.patch.stopall()
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add basic test for load_all_msgs<commit_after>
|
import dirq
import mock
import os
import shutil
import tempfile
import unittest
import apel.db.loader
schema = {"body": "string", "signer": "string",
"empaid": "string?", "error": "string?"}
class LoaderTest(unittest.TestCase):
def setUp(self):
self.queue_path = tempfile.mkdtemp()
mock.patch('apel.db.ApelDb').start()
in_q = dirq.Queue(os.path.join(self.queue_path, 'incoming'),
schema=schema)
in_q.add({"body": "test body", "signer": "test signer",
"empaid": "", "error": ""})
self.loader = apel.db.loader.Loader(self.queue_path, False, 'mysql',
'host', 1234, 'db', 'user', 'pwd',
'somefile')
def test_basic_load_all(self):
"""Check that load_all_msgs runs without problems."""
self.loader.load_all_msgs()
def tearDown(self):
shutil.rmtree(self.queue_path)
mock.patch.stopall()
if __name__ == '__main__':
unittest.main()
|
Add basic test for load_all_msgsimport dirq
import mock
import os
import shutil
import tempfile
import unittest
import apel.db.loader
schema = {"body": "string", "signer": "string",
"empaid": "string?", "error": "string?"}
class LoaderTest(unittest.TestCase):
def setUp(self):
self.queue_path = tempfile.mkdtemp()
mock.patch('apel.db.ApelDb').start()
in_q = dirq.Queue(os.path.join(self.queue_path, 'incoming'),
schema=schema)
in_q.add({"body": "test body", "signer": "test signer",
"empaid": "", "error": ""})
self.loader = apel.db.loader.Loader(self.queue_path, False, 'mysql',
'host', 1234, 'db', 'user', 'pwd',
'somefile')
def test_basic_load_all(self):
"""Check that load_all_msgs runs without problems."""
self.loader.load_all_msgs()
def tearDown(self):
shutil.rmtree(self.queue_path)
mock.patch.stopall()
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add basic test for load_all_msgs<commit_after>import dirq
import mock
import os
import shutil
import tempfile
import unittest
import apel.db.loader
schema = {"body": "string", "signer": "string",
"empaid": "string?", "error": "string?"}
class LoaderTest(unittest.TestCase):
def setUp(self):
self.queue_path = tempfile.mkdtemp()
mock.patch('apel.db.ApelDb').start()
in_q = dirq.Queue(os.path.join(self.queue_path, 'incoming'),
schema=schema)
in_q.add({"body": "test body", "signer": "test signer",
"empaid": "", "error": ""})
self.loader = apel.db.loader.Loader(self.queue_path, False, 'mysql',
'host', 1234, 'db', 'user', 'pwd',
'somefile')
def test_basic_load_all(self):
"""Check that load_all_msgs runs without problems."""
self.loader.load_all_msgs()
def tearDown(self):
shutil.rmtree(self.queue_path)
mock.patch.stopall()
if __name__ == '__main__':
unittest.main()
|
|
05a1eaeabec009874fdf05b598bcde2a4aaabc4f
|
testbed_image_migrate.py
|
testbed_image_migrate.py
|
#!/usr/bin/env python3
"""A scrip for migrating Testbed images from database to external storage."""
import argparse
from pathlib import Path
import psycopg2
def read_images(cursor):
"""Returns the ID, recorded datetime and image content of all Testbed images."""
cursor.execute('SELECT id, recorded, testbed_image FROM observations WHERE '
'testbed_image IS NOT NULL ORDER BY id ASC')
return cursor.fetchall()
def handle_image(image, image_dir, cursor):
"""Creates a directory as needed, writes the image to a file and updates the database."""
name = image[1].strftime('testbed-%Y-%m-%dT%H:%M%z.png')
dir_name = image[1].strftime('%Y-%m-%d')
path = Path('{}/{}'.format(image_dir, dir_name))
if not path.exists():
path.mkdir(mode=0o755)
with open('{}/{}'.format(str(path), name), 'wb') as img:
img.write(image[2])
cursor.execute('UPDATE observations SET tb_image_name = %s WHERE id = %s', (name, image[0]))
def main():
# pylint: disable=missing-docstring
parser = argparse.ArgumentParser(description='Migrate env-logger Testbed images.')
parser.add_argument('db_name', type=str, help='database name')
parser.add_argument('db_user', type=str, help='database user')
parser.add_argument('db_password', type=str, help='database password')
parser.add_argument('image_dest_dir', type=str, help='destination directory for images')
args = parser.parse_args()
with psycopg2.connect(dbname=args.db_name, user=args.db_user,
password=args.db_password) as conn:
with conn.cursor() as cursor:
images = read_images(cursor)
for idx, image in enumerate(images):
if idx % 200 == 0:
print('Processing image number {} of {}'.format(idx, len(images)))
handle_image(image, args.image_dest_dir, cursor)
conn.commit()
if __name__ == '__main__':
main()
|
Add Testbed image migration script
|
Add Testbed image migration script
|
Python
|
mit
|
terop/env-logger,terop/env-logger,terop/env-logger,terop/env-logger,terop/env-logger,terop/env-logger,terop/env-logger
|
Add Testbed image migration script
|
#!/usr/bin/env python3
"""A scrip for migrating Testbed images from database to external storage."""
import argparse
from pathlib import Path
import psycopg2
def read_images(cursor):
"""Returns the ID, recorded datetime and image content of all Testbed images."""
cursor.execute('SELECT id, recorded, testbed_image FROM observations WHERE '
'testbed_image IS NOT NULL ORDER BY id ASC')
return cursor.fetchall()
def handle_image(image, image_dir, cursor):
"""Creates a directory as needed, writes the image to a file and updates the database."""
name = image[1].strftime('testbed-%Y-%m-%dT%H:%M%z.png')
dir_name = image[1].strftime('%Y-%m-%d')
path = Path('{}/{}'.format(image_dir, dir_name))
if not path.exists():
path.mkdir(mode=0o755)
with open('{}/{}'.format(str(path), name), 'wb') as img:
img.write(image[2])
cursor.execute('UPDATE observations SET tb_image_name = %s WHERE id = %s', (name, image[0]))
def main():
# pylint: disable=missing-docstring
parser = argparse.ArgumentParser(description='Migrate env-logger Testbed images.')
parser.add_argument('db_name', type=str, help='database name')
parser.add_argument('db_user', type=str, help='database user')
parser.add_argument('db_password', type=str, help='database password')
parser.add_argument('image_dest_dir', type=str, help='destination directory for images')
args = parser.parse_args()
with psycopg2.connect(dbname=args.db_name, user=args.db_user,
password=args.db_password) as conn:
with conn.cursor() as cursor:
images = read_images(cursor)
for idx, image in enumerate(images):
if idx % 200 == 0:
print('Processing image number {} of {}'.format(idx, len(images)))
handle_image(image, args.image_dest_dir, cursor)
conn.commit()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add Testbed image migration script<commit_after>
|
#!/usr/bin/env python3
"""A scrip for migrating Testbed images from database to external storage."""
import argparse
from pathlib import Path
import psycopg2
def read_images(cursor):
"""Returns the ID, recorded datetime and image content of all Testbed images."""
cursor.execute('SELECT id, recorded, testbed_image FROM observations WHERE '
'testbed_image IS NOT NULL ORDER BY id ASC')
return cursor.fetchall()
def handle_image(image, image_dir, cursor):
"""Creates a directory as needed, writes the image to a file and updates the database."""
name = image[1].strftime('testbed-%Y-%m-%dT%H:%M%z.png')
dir_name = image[1].strftime('%Y-%m-%d')
path = Path('{}/{}'.format(image_dir, dir_name))
if not path.exists():
path.mkdir(mode=0o755)
with open('{}/{}'.format(str(path), name), 'wb') as img:
img.write(image[2])
cursor.execute('UPDATE observations SET tb_image_name = %s WHERE id = %s', (name, image[0]))
def main():
# pylint: disable=missing-docstring
parser = argparse.ArgumentParser(description='Migrate env-logger Testbed images.')
parser.add_argument('db_name', type=str, help='database name')
parser.add_argument('db_user', type=str, help='database user')
parser.add_argument('db_password', type=str, help='database password')
parser.add_argument('image_dest_dir', type=str, help='destination directory for images')
args = parser.parse_args()
with psycopg2.connect(dbname=args.db_name, user=args.db_user,
password=args.db_password) as conn:
with conn.cursor() as cursor:
images = read_images(cursor)
for idx, image in enumerate(images):
if idx % 200 == 0:
print('Processing image number {} of {}'.format(idx, len(images)))
handle_image(image, args.image_dest_dir, cursor)
conn.commit()
if __name__ == '__main__':
main()
|
Add Testbed image migration script#!/usr/bin/env python3
"""A scrip for migrating Testbed images from database to external storage."""
import argparse
from pathlib import Path
import psycopg2
def read_images(cursor):
"""Returns the ID, recorded datetime and image content of all Testbed images."""
cursor.execute('SELECT id, recorded, testbed_image FROM observations WHERE '
'testbed_image IS NOT NULL ORDER BY id ASC')
return cursor.fetchall()
def handle_image(image, image_dir, cursor):
"""Creates a directory as needed, writes the image to a file and updates the database."""
name = image[1].strftime('testbed-%Y-%m-%dT%H:%M%z.png')
dir_name = image[1].strftime('%Y-%m-%d')
path = Path('{}/{}'.format(image_dir, dir_name))
if not path.exists():
path.mkdir(mode=0o755)
with open('{}/{}'.format(str(path), name), 'wb') as img:
img.write(image[2])
cursor.execute('UPDATE observations SET tb_image_name = %s WHERE id = %s', (name, image[0]))
def main():
# pylint: disable=missing-docstring
parser = argparse.ArgumentParser(description='Migrate env-logger Testbed images.')
parser.add_argument('db_name', type=str, help='database name')
parser.add_argument('db_user', type=str, help='database user')
parser.add_argument('db_password', type=str, help='database password')
parser.add_argument('image_dest_dir', type=str, help='destination directory for images')
args = parser.parse_args()
with psycopg2.connect(dbname=args.db_name, user=args.db_user,
password=args.db_password) as conn:
with conn.cursor() as cursor:
images = read_images(cursor)
for idx, image in enumerate(images):
if idx % 200 == 0:
print('Processing image number {} of {}'.format(idx, len(images)))
handle_image(image, args.image_dest_dir, cursor)
conn.commit()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add Testbed image migration script<commit_after>#!/usr/bin/env python3
"""A scrip for migrating Testbed images from database to external storage."""
import argparse
from pathlib import Path
import psycopg2
def read_images(cursor):
"""Returns the ID, recorded datetime and image content of all Testbed images."""
cursor.execute('SELECT id, recorded, testbed_image FROM observations WHERE '
'testbed_image IS NOT NULL ORDER BY id ASC')
return cursor.fetchall()
def handle_image(image, image_dir, cursor):
"""Creates a directory as needed, writes the image to a file and updates the database."""
name = image[1].strftime('testbed-%Y-%m-%dT%H:%M%z.png')
dir_name = image[1].strftime('%Y-%m-%d')
path = Path('{}/{}'.format(image_dir, dir_name))
if not path.exists():
path.mkdir(mode=0o755)
with open('{}/{}'.format(str(path), name), 'wb') as img:
img.write(image[2])
cursor.execute('UPDATE observations SET tb_image_name = %s WHERE id = %s', (name, image[0]))
def main():
# pylint: disable=missing-docstring
parser = argparse.ArgumentParser(description='Migrate env-logger Testbed images.')
parser.add_argument('db_name', type=str, help='database name')
parser.add_argument('db_user', type=str, help='database user')
parser.add_argument('db_password', type=str, help='database password')
parser.add_argument('image_dest_dir', type=str, help='destination directory for images')
args = parser.parse_args()
with psycopg2.connect(dbname=args.db_name, user=args.db_user,
password=args.db_password) as conn:
with conn.cursor() as cursor:
images = read_images(cursor)
for idx, image in enumerate(images):
if idx % 200 == 0:
print('Processing image number {} of {}'.format(idx, len(images)))
handle_image(image, args.image_dest_dir, cursor)
conn.commit()
if __name__ == '__main__':
main()
|
|
c74e5bc942ae2486199070ec213fa6d692a2a605
|
docs/source/examples/test_modify_array_argument_reals.py
|
docs/source/examples/test_modify_array_argument_reals.py
|
#!/usr/bin/env python
import numpy as np
from pych.extern import Chapel
@Chapel()
def printArray(arr=np.ndarray):
"""
arr += 1;
writeln(arr);
"""
return None
if __name__ == "__main__":
arr = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
print arr
printArray(arr);
print arr
import testcase
# contains the general testing method, which allows us to gather output
import os.path
def test_modify_array_argument_reals():
out = testcase.runpy(os.path.realpath(__file__))
# The first time this test is run, it may contain output notifying that
# a temporary file has been created. The important part is that this
# expected output follows it (enabling the test to work for all runs, as
# the temporary file message won't occur in the second run) But that means
# we can't use ==
assert out.endswith("[ 2. 3. 4. 5. 6. 7. 8. 9. 10. 11.]\n");
|
#!/usr/bin/env python
import numpy as np
from pych.extern import Chapel
@Chapel()
def printArray(arr=np.ndarray):
"""
arr += 1;
writeln(arr);
"""
return None
if __name__ == "__main__":
arr = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
print arr
printArray(arr);
print arr
import testcase
# contains the general testing method, which allows us to gather output
import os.path
def test_modify_array_argument_reals():
out = testcase.runpy(os.path.realpath(__file__))
# The first time this test is run, it may contain output notifying that
# a temporary file has been created. The important part is that this
# expected output follows it (enabling the test to work for all runs, as
# the temporary file message won't occur in the second run) But that means
# we can't use ==
assert out.endswith("[ 2. 3. 4. 5. 6. 7. 8. 9. 10. 11.]\n");
|
Fix trivial unit test error that started when numpy version changed
|
Fix trivial unit test error that started when numpy version changed
The version of numpy brought in by pip changed, which changed some
test outputs in meaningless ways (ie, spaces), and
those meaningless changes broke a unit test with
assertEquals( "string literal", test(something) )
This change fixes the unit test error, without at all addressing
the underlying issue (hyper-sensitive string comparison).
|
Python
|
apache-2.0
|
russel/pychapel,russel/pychapel,chapel-lang/pychapel,chapel-lang/pychapel,russel/pychapel,chapel-lang/pychapel
|
#!/usr/bin/env python
import numpy as np
from pych.extern import Chapel
@Chapel()
def printArray(arr=np.ndarray):
"""
arr += 1;
writeln(arr);
"""
return None
if __name__ == "__main__":
arr = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
print arr
printArray(arr);
print arr
import testcase
# contains the general testing method, which allows us to gather output
import os.path
def test_modify_array_argument_reals():
out = testcase.runpy(os.path.realpath(__file__))
# The first time this test is run, it may contain output notifying that
# a temporary file has been created. The important part is that this
# expected output follows it (enabling the test to work for all runs, as
# the temporary file message won't occur in the second run) But that means
# we can't use ==
assert out.endswith("[ 2. 3. 4. 5. 6. 7. 8. 9. 10. 11.]\n");
Fix trivial unit test error that started when numpy version changed
The version of numpy brought in by pip changed, which changed some
test outputs in meaningless ways (ie, spaces), and
those meaningless changes broke a unit test with
assertEquals( "string literal", test(something) )
This change fixes the unit test error, without at all addressing
the underlying issue (hyper-sensitive string comparison).
|
#!/usr/bin/env python
import numpy as np
from pych.extern import Chapel
@Chapel()
def printArray(arr=np.ndarray):
"""
arr += 1;
writeln(arr);
"""
return None
if __name__ == "__main__":
arr = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
print arr
printArray(arr);
print arr
import testcase
# contains the general testing method, which allows us to gather output
import os.path
def test_modify_array_argument_reals():
out = testcase.runpy(os.path.realpath(__file__))
# The first time this test is run, it may contain output notifying that
# a temporary file has been created. The important part is that this
# expected output follows it (enabling the test to work for all runs, as
# the temporary file message won't occur in the second run) But that means
# we can't use ==
assert out.endswith("[ 2. 3. 4. 5. 6. 7. 8. 9. 10. 11.]\n");
|
<commit_before>#!/usr/bin/env python
import numpy as np
from pych.extern import Chapel
@Chapel()
def printArray(arr=np.ndarray):
"""
arr += 1;
writeln(arr);
"""
return None
if __name__ == "__main__":
arr = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
print arr
printArray(arr);
print arr
import testcase
# contains the general testing method, which allows us to gather output
import os.path
def test_modify_array_argument_reals():
out = testcase.runpy(os.path.realpath(__file__))
# The first time this test is run, it may contain output notifying that
# a temporary file has been created. The important part is that this
# expected output follows it (enabling the test to work for all runs, as
# the temporary file message won't occur in the second run) But that means
# we can't use ==
assert out.endswith("[ 2. 3. 4. 5. 6. 7. 8. 9. 10. 11.]\n");
<commit_msg>Fix trivial unit test error that started when numpy version changed
The version of numpy brought in by pip changed, which changed some
test outputs in meaningless ways (ie, spaces), and
those meaningless changes broke a unit test with
assertEquals( "string literal", test(something) )
This change fixes the unit test error, without at all addressing
the underlying issue (hyper-sensitive string comparison).<commit_after>
|
#!/usr/bin/env python
import numpy as np
from pych.extern import Chapel
@Chapel()
def printArray(arr=np.ndarray):
"""
arr += 1;
writeln(arr);
"""
return None
if __name__ == "__main__":
arr = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
print arr
printArray(arr);
print arr
import testcase
# contains the general testing method, which allows us to gather output
import os.path
def test_modify_array_argument_reals():
out = testcase.runpy(os.path.realpath(__file__))
# The first time this test is run, it may contain output notifying that
# a temporary file has been created. The important part is that this
# expected output follows it (enabling the test to work for all runs, as
# the temporary file message won't occur in the second run) But that means
# we can't use ==
assert out.endswith("[ 2. 3. 4. 5. 6. 7. 8. 9. 10. 11.]\n");
|
#!/usr/bin/env python
import numpy as np
from pych.extern import Chapel
@Chapel()
def printArray(arr=np.ndarray):
"""
arr += 1;
writeln(arr);
"""
return None
if __name__ == "__main__":
arr = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
print arr
printArray(arr);
print arr
import testcase
# contains the general testing method, which allows us to gather output
import os.path
def test_modify_array_argument_reals():
out = testcase.runpy(os.path.realpath(__file__))
# The first time this test is run, it may contain output notifying that
# a temporary file has been created. The important part is that this
# expected output follows it (enabling the test to work for all runs, as
# the temporary file message won't occur in the second run) But that means
# we can't use ==
assert out.endswith("[ 2. 3. 4. 5. 6. 7. 8. 9. 10. 11.]\n");
Fix trivial unit test error that started when numpy version changed
The version of numpy brought in by pip changed, which changed some
test outputs in meaningless ways (ie, spaces), and
those meaningless changes broke a unit test with
assertEquals( "string literal", test(something) )
This change fixes the unit test error, without at all addressing
the underlying issue (hyper-sensitive string comparison).#!/usr/bin/env python
import numpy as np
from pych.extern import Chapel
@Chapel()
def printArray(arr=np.ndarray):
"""
arr += 1;
writeln(arr);
"""
return None
if __name__ == "__main__":
arr = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
print arr
printArray(arr);
print arr
import testcase
# contains the general testing method, which allows us to gather output
import os.path
def test_modify_array_argument_reals():
out = testcase.runpy(os.path.realpath(__file__))
# The first time this test is run, it may contain output notifying that
# a temporary file has been created. The important part is that this
# expected output follows it (enabling the test to work for all runs, as
# the temporary file message won't occur in the second run) But that means
# we can't use ==
assert out.endswith("[ 2. 3. 4. 5. 6. 7. 8. 9. 10. 11.]\n");
|
<commit_before>#!/usr/bin/env python
import numpy as np
from pych.extern import Chapel
@Chapel()
def printArray(arr=np.ndarray):
"""
arr += 1;
writeln(arr);
"""
return None
if __name__ == "__main__":
arr = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
print arr
printArray(arr);
print arr
import testcase
# contains the general testing method, which allows us to gather output
import os.path
def test_modify_array_argument_reals():
out = testcase.runpy(os.path.realpath(__file__))
# The first time this test is run, it may contain output notifying that
# a temporary file has been created. The important part is that this
# expected output follows it (enabling the test to work for all runs, as
# the temporary file message won't occur in the second run) But that means
# we can't use ==
assert out.endswith("[ 2. 3. 4. 5. 6. 7. 8. 9. 10. 11.]\n");
<commit_msg>Fix trivial unit test error that started when numpy version changed
The version of numpy brought in by pip changed, which changed some
test outputs in meaningless ways (ie, spaces), and
those meaningless changes broke a unit test with
assertEquals( "string literal", test(something) )
This change fixes the unit test error, without at all addressing
the underlying issue (hyper-sensitive string comparison).<commit_after>#!/usr/bin/env python
import numpy as np
from pych.extern import Chapel
@Chapel()
def printArray(arr=np.ndarray):
"""
arr += 1;
writeln(arr);
"""
return None
if __name__ == "__main__":
arr = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0])
print arr
printArray(arr);
print arr
import testcase
# contains the general testing method, which allows us to gather output
import os.path
def test_modify_array_argument_reals():
out = testcase.runpy(os.path.realpath(__file__))
# The first time this test is run, it may contain output notifying that
# a temporary file has been created. The important part is that this
# expected output follows it (enabling the test to work for all runs, as
# the temporary file message won't occur in the second run) But that means
# we can't use ==
assert out.endswith("[ 2. 3. 4. 5. 6. 7. 8. 9. 10. 11.]\n");
|
6ed487c8356a48f728b0ae66ac7bed4c74499c6f
|
ereuse_devicehub/scripts/updates/fix_add_migrate_from.py
|
ereuse_devicehub/scripts/updates/fix_add_migrate_from.py
|
from datetime import datetime
from bson import ObjectId
from flask import current_app
from pydash import filter_
from pydash import map_
from ereuse_devicehub.resources.device.component.settings import Component
from ereuse_devicehub.resources.device.domain import DeviceDomain
from ereuse_devicehub.resources.event.device import DeviceEventDomain
from ereuse_devicehub.scripts.updates.re_materialize_events_in_devices import ReMaterializeEventsInDevices
class FixAddMigrateFrom(ReMaterializeEventsInDevices):
"""Re-does internally a Migrate that was erroniously partly-erased"""
def execute(self, database):
if database == 'alencop':
DeviceEventDomain.delete({"_id": ObjectId('58e283091f632f56a18ca413')})
ids = list(range(1348, 1448))
ids = [str(_id) for _id in ids]
devices = DeviceDomain.get({'_id': {'$in': ids}})
non_components = filter_(devices, lambda x: x['@type'] not in Component.types)
components = filter_(devices, lambda x: x['@type'] in Component.types)
migrate = {
'_id': ObjectId('58e283091f632f56a18ca413'),
'label': 'BCN Activa a Alencop Març',
'events': [],
'devices': map_(non_components, '_id'),
'components': map_(components, '_id'),
'from': "https://devicehub.ereuse.org/circuit-reutilitza-cat/events/devices/migrate/569c285e75d9351f7f82f668",
"comment": "Transferència de la donació de Barcelona Activa al Circuit Pangea que Alencop va recollir.",
"label": "Barcelona Activa (Març) a Alencop.",
'_updated': datetime.strptime("2017-04-03T17:14:37", current_app.config['DATE_FORMAT']),
'_created': datetime.strptime("2017-04-03T17:14:37", current_app.config['DATE_FORMAT']),
'@type': 'devices:Migrate'
}
DeviceEventDomain.insert(migrate)
super(FixAddMigrateFrom, self).execute(database)
|
Add a fix FixAddMigrateFrom to solve a specific Migrate Problem
|
Add a fix FixAddMigrateFrom to solve a specific Migrate Problem
|
Python
|
agpl-3.0
|
eReuse/DeviceHub,eReuse/DeviceHub
|
Add a fix FixAddMigrateFrom to solve a specific Migrate Problem
|
from datetime import datetime
from bson import ObjectId
from flask import current_app
from pydash import filter_
from pydash import map_
from ereuse_devicehub.resources.device.component.settings import Component
from ereuse_devicehub.resources.device.domain import DeviceDomain
from ereuse_devicehub.resources.event.device import DeviceEventDomain
from ereuse_devicehub.scripts.updates.re_materialize_events_in_devices import ReMaterializeEventsInDevices
class FixAddMigrateFrom(ReMaterializeEventsInDevices):
"""Re-does internally a Migrate that was erroniously partly-erased"""
def execute(self, database):
if database == 'alencop':
DeviceEventDomain.delete({"_id": ObjectId('58e283091f632f56a18ca413')})
ids = list(range(1348, 1448))
ids = [str(_id) for _id in ids]
devices = DeviceDomain.get({'_id': {'$in': ids}})
non_components = filter_(devices, lambda x: x['@type'] not in Component.types)
components = filter_(devices, lambda x: x['@type'] in Component.types)
migrate = {
'_id': ObjectId('58e283091f632f56a18ca413'),
'label': 'BCN Activa a Alencop Març',
'events': [],
'devices': map_(non_components, '_id'),
'components': map_(components, '_id'),
'from': "https://devicehub.ereuse.org/circuit-reutilitza-cat/events/devices/migrate/569c285e75d9351f7f82f668",
"comment": "Transferència de la donació de Barcelona Activa al Circuit Pangea que Alencop va recollir.",
"label": "Barcelona Activa (Març) a Alencop.",
'_updated': datetime.strptime("2017-04-03T17:14:37", current_app.config['DATE_FORMAT']),
'_created': datetime.strptime("2017-04-03T17:14:37", current_app.config['DATE_FORMAT']),
'@type': 'devices:Migrate'
}
DeviceEventDomain.insert(migrate)
super(FixAddMigrateFrom, self).execute(database)
|
<commit_before><commit_msg>Add a fix FixAddMigrateFrom to solve a specific Migrate Problem<commit_after>
|
from datetime import datetime
from bson import ObjectId
from flask import current_app
from pydash import filter_
from pydash import map_
from ereuse_devicehub.resources.device.component.settings import Component
from ereuse_devicehub.resources.device.domain import DeviceDomain
from ereuse_devicehub.resources.event.device import DeviceEventDomain
from ereuse_devicehub.scripts.updates.re_materialize_events_in_devices import ReMaterializeEventsInDevices
class FixAddMigrateFrom(ReMaterializeEventsInDevices):
"""Re-does internally a Migrate that was erroniously partly-erased"""
def execute(self, database):
if database == 'alencop':
DeviceEventDomain.delete({"_id": ObjectId('58e283091f632f56a18ca413')})
ids = list(range(1348, 1448))
ids = [str(_id) for _id in ids]
devices = DeviceDomain.get({'_id': {'$in': ids}})
non_components = filter_(devices, lambda x: x['@type'] not in Component.types)
components = filter_(devices, lambda x: x['@type'] in Component.types)
migrate = {
'_id': ObjectId('58e283091f632f56a18ca413'),
'label': 'BCN Activa a Alencop Març',
'events': [],
'devices': map_(non_components, '_id'),
'components': map_(components, '_id'),
'from': "https://devicehub.ereuse.org/circuit-reutilitza-cat/events/devices/migrate/569c285e75d9351f7f82f668",
"comment": "Transferència de la donació de Barcelona Activa al Circuit Pangea que Alencop va recollir.",
"label": "Barcelona Activa (Març) a Alencop.",
'_updated': datetime.strptime("2017-04-03T17:14:37", current_app.config['DATE_FORMAT']),
'_created': datetime.strptime("2017-04-03T17:14:37", current_app.config['DATE_FORMAT']),
'@type': 'devices:Migrate'
}
DeviceEventDomain.insert(migrate)
super(FixAddMigrateFrom, self).execute(database)
|
Add a fix FixAddMigrateFrom to solve a specific Migrate Problemfrom datetime import datetime
from bson import ObjectId
from flask import current_app
from pydash import filter_
from pydash import map_
from ereuse_devicehub.resources.device.component.settings import Component
from ereuse_devicehub.resources.device.domain import DeviceDomain
from ereuse_devicehub.resources.event.device import DeviceEventDomain
from ereuse_devicehub.scripts.updates.re_materialize_events_in_devices import ReMaterializeEventsInDevices
class FixAddMigrateFrom(ReMaterializeEventsInDevices):
"""Re-does internally a Migrate that was erroniously partly-erased"""
def execute(self, database):
if database == 'alencop':
DeviceEventDomain.delete({"_id": ObjectId('58e283091f632f56a18ca413')})
ids = list(range(1348, 1448))
ids = [str(_id) for _id in ids]
devices = DeviceDomain.get({'_id': {'$in': ids}})
non_components = filter_(devices, lambda x: x['@type'] not in Component.types)
components = filter_(devices, lambda x: x['@type'] in Component.types)
migrate = {
'_id': ObjectId('58e283091f632f56a18ca413'),
'label': 'BCN Activa a Alencop Març',
'events': [],
'devices': map_(non_components, '_id'),
'components': map_(components, '_id'),
'from': "https://devicehub.ereuse.org/circuit-reutilitza-cat/events/devices/migrate/569c285e75d9351f7f82f668",
"comment": "Transferència de la donació de Barcelona Activa al Circuit Pangea que Alencop va recollir.",
"label": "Barcelona Activa (Març) a Alencop.",
'_updated': datetime.strptime("2017-04-03T17:14:37", current_app.config['DATE_FORMAT']),
'_created': datetime.strptime("2017-04-03T17:14:37", current_app.config['DATE_FORMAT']),
'@type': 'devices:Migrate'
}
DeviceEventDomain.insert(migrate)
super(FixAddMigrateFrom, self).execute(database)
|
<commit_before><commit_msg>Add a fix FixAddMigrateFrom to solve a specific Migrate Problem<commit_after>from datetime import datetime
from bson import ObjectId
from flask import current_app
from pydash import filter_
from pydash import map_
from ereuse_devicehub.resources.device.component.settings import Component
from ereuse_devicehub.resources.device.domain import DeviceDomain
from ereuse_devicehub.resources.event.device import DeviceEventDomain
from ereuse_devicehub.scripts.updates.re_materialize_events_in_devices import ReMaterializeEventsInDevices
class FixAddMigrateFrom(ReMaterializeEventsInDevices):
"""Re-does internally a Migrate that was erroniously partly-erased"""
def execute(self, database):
if database == 'alencop':
DeviceEventDomain.delete({"_id": ObjectId('58e283091f632f56a18ca413')})
ids = list(range(1348, 1448))
ids = [str(_id) for _id in ids]
devices = DeviceDomain.get({'_id': {'$in': ids}})
non_components = filter_(devices, lambda x: x['@type'] not in Component.types)
components = filter_(devices, lambda x: x['@type'] in Component.types)
migrate = {
'_id': ObjectId('58e283091f632f56a18ca413'),
'label': 'BCN Activa a Alencop Març',
'events': [],
'devices': map_(non_components, '_id'),
'components': map_(components, '_id'),
'from': "https://devicehub.ereuse.org/circuit-reutilitza-cat/events/devices/migrate/569c285e75d9351f7f82f668",
"comment": "Transferència de la donació de Barcelona Activa al Circuit Pangea que Alencop va recollir.",
"label": "Barcelona Activa (Març) a Alencop.",
'_updated': datetime.strptime("2017-04-03T17:14:37", current_app.config['DATE_FORMAT']),
'_created': datetime.strptime("2017-04-03T17:14:37", current_app.config['DATE_FORMAT']),
'@type': 'devices:Migrate'
}
DeviceEventDomain.insert(migrate)
super(FixAddMigrateFrom, self).execute(database)
|
|
07243c98efa12beb03387adead84e1b9297de2ac
|
evesrp/migrate/versions/337978f8c75_restrict_request_base_payout_to_be_0.py
|
evesrp/migrate/versions/337978f8c75_restrict_request_base_payout_to_be_0.py
|
"""Restrict request.base_payout to be >= 0
Revision ID: 337978f8c75
Revises: c1fc69b629
Create Date: 2014-06-18 14:04:52.963890
"""
# revision identifiers, used by Alembic.
revision = '337978f8c75'
down_revision = 'c1fc69b629'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import update, select, table, column
request = table('request',
column('id', sa.Integer),
column('base_payout', sa.Float),
)
def upgrade():
conn = op.get_bind()
negative_base_payout_id_sel = select([request.c.id])\
.where(request.c.base_payout < 0.0)
negative_ids = conn.execute(negative_base_payout_id_sel)
for result_row in negative_ids:
negative_id = result_row[0]
update_stmt = update(request)\
.where(request.c.id == negative_id)\
.values({
'base_payout': 0.0,
})
conn.execute(update_stmt)
negative_ids.close()
def downgrade():
# This is a lossy upgrade, no downgrading possible
pass
|
Add migration script for enforcing a minimum base_payout of 0
|
Add migration script for enforcing a minimum base_payout of 0
Should've been committed with c817d4df
|
Python
|
bsd-2-clause
|
eskwire/evesrp,paxswill/evesrp,paxswill/evesrp,eskwire/evesrp,eskwire/evesrp,eskwire/evesrp,paxswill/evesrp
|
Add migration script for enforcing a minimum base_payout of 0
Should've been committed with c817d4df
|
"""Restrict request.base_payout to be >= 0
Revision ID: 337978f8c75
Revises: c1fc69b629
Create Date: 2014-06-18 14:04:52.963890
"""
# revision identifiers, used by Alembic.
revision = '337978f8c75'
down_revision = 'c1fc69b629'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import update, select, table, column
request = table('request',
column('id', sa.Integer),
column('base_payout', sa.Float),
)
def upgrade():
conn = op.get_bind()
negative_base_payout_id_sel = select([request.c.id])\
.where(request.c.base_payout < 0.0)
negative_ids = conn.execute(negative_base_payout_id_sel)
for result_row in negative_ids:
negative_id = result_row[0]
update_stmt = update(request)\
.where(request.c.id == negative_id)\
.values({
'base_payout': 0.0,
})
conn.execute(update_stmt)
negative_ids.close()
def downgrade():
# This is a lossy upgrade, no downgrading possible
pass
|
<commit_before><commit_msg>Add migration script for enforcing a minimum base_payout of 0
Should've been committed with c817d4df<commit_after>
|
"""Restrict request.base_payout to be >= 0
Revision ID: 337978f8c75
Revises: c1fc69b629
Create Date: 2014-06-18 14:04:52.963890
"""
# revision identifiers, used by Alembic.
revision = '337978f8c75'
down_revision = 'c1fc69b629'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import update, select, table, column
request = table('request',
column('id', sa.Integer),
column('base_payout', sa.Float),
)
def upgrade():
conn = op.get_bind()
negative_base_payout_id_sel = select([request.c.id])\
.where(request.c.base_payout < 0.0)
negative_ids = conn.execute(negative_base_payout_id_sel)
for result_row in negative_ids:
negative_id = result_row[0]
update_stmt = update(request)\
.where(request.c.id == negative_id)\
.values({
'base_payout': 0.0,
})
conn.execute(update_stmt)
negative_ids.close()
def downgrade():
# This is a lossy upgrade, no downgrading possible
pass
|
Add migration script for enforcing a minimum base_payout of 0
Should've been committed with c817d4df"""Restrict request.base_payout to be >= 0
Revision ID: 337978f8c75
Revises: c1fc69b629
Create Date: 2014-06-18 14:04:52.963890
"""
# revision identifiers, used by Alembic.
revision = '337978f8c75'
down_revision = 'c1fc69b629'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import update, select, table, column
request = table('request',
column('id', sa.Integer),
column('base_payout', sa.Float),
)
def upgrade():
conn = op.get_bind()
negative_base_payout_id_sel = select([request.c.id])\
.where(request.c.base_payout < 0.0)
negative_ids = conn.execute(negative_base_payout_id_sel)
for result_row in negative_ids:
negative_id = result_row[0]
update_stmt = update(request)\
.where(request.c.id == negative_id)\
.values({
'base_payout': 0.0,
})
conn.execute(update_stmt)
negative_ids.close()
def downgrade():
# This is a lossy upgrade, no downgrading possible
pass
|
<commit_before><commit_msg>Add migration script for enforcing a minimum base_payout of 0
Should've been committed with c817d4df<commit_after>"""Restrict request.base_payout to be >= 0
Revision ID: 337978f8c75
Revises: c1fc69b629
Create Date: 2014-06-18 14:04:52.963890
"""
# revision identifiers, used by Alembic.
revision = '337978f8c75'
down_revision = 'c1fc69b629'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import update, select, table, column
request = table('request',
column('id', sa.Integer),
column('base_payout', sa.Float),
)
def upgrade():
conn = op.get_bind()
negative_base_payout_id_sel = select([request.c.id])\
.where(request.c.base_payout < 0.0)
negative_ids = conn.execute(negative_base_payout_id_sel)
for result_row in negative_ids:
negative_id = result_row[0]
update_stmt = update(request)\
.where(request.c.id == negative_id)\
.values({
'base_payout': 0.0,
})
conn.execute(update_stmt)
negative_ids.close()
def downgrade():
# This is a lossy upgrade, no downgrading possible
pass
|
|
c339cf70342df088b920eb42aca4e3094fd96938
|
test/test_action.py
|
test/test_action.py
|
#!/usr/bin/env python2.6
#
# This file is used to test reading and processing of config files
#
import os
#It's ugly I know....
from shinken_test import *
from shinken.action import Action
class TestConfig(ShinkenTest):
#setUp is in shinken_test
#Change ME :)
def test_action(self):
a = Action()
a.timeout = 10
if os.name == 'nt':
a.command = "./dummy_command.cmd"
else:
a.command = "./dummy_command.sh"
self.assert_(a.got_shell_caracters() == False)
a.execute()
self.assert_(a.status == 'launched')
#Give also the max output we want for the command
for i in xrange(1, 100):
if a.status == 'launched':
a.check_finished(8012)
self.assert_(a.exit_status == 0)
self.assert_(a.status == 'done')
self.assert_(a.output == "Hi, I'm for testing only. Please do not use me directly, really ")
self.assert_(a.perf_data == " Hip=99% Bob=34mm")
if __name__ == '__main__':
unittest.main()
|
Add a test for actions.
|
Add a test for actions.
|
Python
|
agpl-3.0
|
staute/shinken_package,xorpaul/shinken,kaji-project/shinken,h4wkmoon/shinken,baloo/shinken,lets-software/shinken,KerkhoffTechnologies/shinken,peeyush-tm/shinken,tal-nino/shinken,naparuba/shinken,staute/shinken_deb,savoirfairelinux/shinken,baloo/shinken,dfranco/shinken,ddurieux/alignak,kaji-project/shinken,h4wkmoon/shinken,geektophe/shinken,staute/shinken_deb,Simage/shinken,naparuba/shinken,fpeyre/shinken,xorpaul/shinken,ddurieux/alignak,KerkhoffTechnologies/shinken,Simage/shinken,claneys/shinken,rednach/krill,mohierf/shinken,KerkhoffTechnologies/shinken,staute/shinken_package,geektophe/shinken,Simage/shinken,geektophe/shinken,dfranco/shinken,baloo/shinken,fpeyre/shinken,fpeyre/shinken,peeyush-tm/shinken,claneys/shinken,h4wkmoon/shinken,peeyush-tm/shinken,Aimage/shinken,rednach/krill,savoirfairelinux/shinken,claneys/shinken,naparuba/shinken,kaji-project/shinken,savoirfairelinux/shinken,KerkhoffTechnologies/shinken,lets-software/shinken,KerkhoffTechnologies/shinken,staute/shinken_package,rledisez/shinken,rledisez/shinken,staute/shinken_deb,kaji-project/shinken,staute/shinken_package,xorpaul/shinken,baloo/shinken,kaji-project/shinken,KerkhoffTechnologies/shinken,rledisez/shinken,naparuba/shinken,mohierf/shinken,peeyush-tm/shinken,lets-software/shinken,ddurieux/alignak,savoirfairelinux/shinken,savoirfairelinux/shinken,Simage/shinken,baloo/shinken,h4wkmoon/shinken,dfranco/shinken,mohierf/shinken,Simage/shinken,h4wkmoon/shinken,titilambert/alignak,xorpaul/shinken,ddurieux/alignak,mohierf/shinken,claneys/shinken,claneys/shinken,Aimage/shinken,naparuba/shinken,titilambert/alignak,staute/shinken_package,fpeyre/shinken,dfranco/shinken,Simage/shinken,rledisez/shinken,rledisez/shinken,claneys/shinken,staute/shinken_deb,baloo/shinken,geektophe/shinken,staute/shinken_deb,staute/shinken_package,gst/alignak,rednach/krill,peeyush-tm/shinken,h4wkmoon/shinken,titilambert/alignak,gst/alignak,rednach/krill,mohierf/shinken,rledisez/shinken,gst/alignak,Aimage/shinken,dfranco/shinken,savoirfairelinux/shinken,lets-software/shinken,kaji-project/shinken,fpeyre/shinken,xorpaul/shinken,staute/shinken_deb,geektophe/shinken,Aimage/shinken,xorpaul/shinken,geektophe/shinken,gst/alignak,ddurieux/alignak,peeyush-tm/shinken,tal-nino/shinken,naparuba/shinken,ddurieux/alignak,mohierf/shinken,xorpaul/shinken,dfranco/shinken,Alignak-monitoring/alignak,titilambert/alignak,Alignak-monitoring/alignak,rednach/krill,h4wkmoon/shinken,tal-nino/shinken,tal-nino/shinken,rednach/krill,tal-nino/shinken,Aimage/shinken,lets-software/shinken,Aimage/shinken,xorpaul/shinken,h4wkmoon/shinken,fpeyre/shinken,lets-software/shinken,kaji-project/shinken,tal-nino/shinken
|
Add a test for actions.
|
#!/usr/bin/env python2.6
#
# This file is used to test reading and processing of config files
#
import os
#It's ugly I know....
from shinken_test import *
from shinken.action import Action
class TestConfig(ShinkenTest):
#setUp is in shinken_test
#Change ME :)
def test_action(self):
a = Action()
a.timeout = 10
if os.name == 'nt':
a.command = "./dummy_command.cmd"
else:
a.command = "./dummy_command.sh"
self.assert_(a.got_shell_caracters() == False)
a.execute()
self.assert_(a.status == 'launched')
#Give also the max output we want for the command
for i in xrange(1, 100):
if a.status == 'launched':
a.check_finished(8012)
self.assert_(a.exit_status == 0)
self.assert_(a.status == 'done')
self.assert_(a.output == "Hi, I'm for testing only. Please do not use me directly, really ")
self.assert_(a.perf_data == " Hip=99% Bob=34mm")
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a test for actions.<commit_after>
|
#!/usr/bin/env python2.6
#
# This file is used to test reading and processing of config files
#
import os
#It's ugly I know....
from shinken_test import *
from shinken.action import Action
class TestConfig(ShinkenTest):
#setUp is in shinken_test
#Change ME :)
def test_action(self):
a = Action()
a.timeout = 10
if os.name == 'nt':
a.command = "./dummy_command.cmd"
else:
a.command = "./dummy_command.sh"
self.assert_(a.got_shell_caracters() == False)
a.execute()
self.assert_(a.status == 'launched')
#Give also the max output we want for the command
for i in xrange(1, 100):
if a.status == 'launched':
a.check_finished(8012)
self.assert_(a.exit_status == 0)
self.assert_(a.status == 'done')
self.assert_(a.output == "Hi, I'm for testing only. Please do not use me directly, really ")
self.assert_(a.perf_data == " Hip=99% Bob=34mm")
if __name__ == '__main__':
unittest.main()
|
Add a test for actions.#!/usr/bin/env python2.6
#
# This file is used to test reading and processing of config files
#
import os
#It's ugly I know....
from shinken_test import *
from shinken.action import Action
class TestConfig(ShinkenTest):
#setUp is in shinken_test
#Change ME :)
def test_action(self):
a = Action()
a.timeout = 10
if os.name == 'nt':
a.command = "./dummy_command.cmd"
else:
a.command = "./dummy_command.sh"
self.assert_(a.got_shell_caracters() == False)
a.execute()
self.assert_(a.status == 'launched')
#Give also the max output we want for the command
for i in xrange(1, 100):
if a.status == 'launched':
a.check_finished(8012)
self.assert_(a.exit_status == 0)
self.assert_(a.status == 'done')
self.assert_(a.output == "Hi, I'm for testing only. Please do not use me directly, really ")
self.assert_(a.perf_data == " Hip=99% Bob=34mm")
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a test for actions.<commit_after>#!/usr/bin/env python2.6
#
# This file is used to test reading and processing of config files
#
import os
#It's ugly I know....
from shinken_test import *
from shinken.action import Action
class TestConfig(ShinkenTest):
#setUp is in shinken_test
#Change ME :)
def test_action(self):
a = Action()
a.timeout = 10
if os.name == 'nt':
a.command = "./dummy_command.cmd"
else:
a.command = "./dummy_command.sh"
self.assert_(a.got_shell_caracters() == False)
a.execute()
self.assert_(a.status == 'launched')
#Give also the max output we want for the command
for i in xrange(1, 100):
if a.status == 'launched':
a.check_finished(8012)
self.assert_(a.exit_status == 0)
self.assert_(a.status == 'done')
self.assert_(a.output == "Hi, I'm for testing only. Please do not use me directly, really ")
self.assert_(a.perf_data == " Hip=99% Bob=34mm")
if __name__ == '__main__':
unittest.main()
|
|
fe40ce2d041f04507c2a6a2e60ec3e7cb83ebca5
|
test/test_inject.py
|
test/test_inject.py
|
import sys
import unittest
from io import StringIO
from unittest.mock import patch
try:
from unittest.mock import MagicMock, mock_open
except ImportError:
from mock import MagicMock, mock_open
from sleuth.inject import _Break, _Call, _Inject, _Log, _Print
class TestInjectionActions(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def _get_test_frame(self):
"""
Return this function's execution frame object for testing purposes.
This function sets the following local variables:
message = 'Hello Sleuth!'
magic_number = 42
"""
message = 'Hello Sleuth!'
magic_number = 42
return sys._getframe()
def test_Print(self):
with patch('sys.stdout', new=StringIO()) as fake_stdout:
# Create the action
fmtStr = 'PRINT INJECTION TEST'
action = _Print(fmtStr)
# Perform the action
frame = self._get_test_frame()
expected_out = fmtStr
action(frame)
self.assertEqual(fake_stdout.getvalue().strip(), expected_out)
def test_Print_formatting(self):
with patch('sys.stdout', new=StringIO()) as fake_stdout:
# Create the action
fmtStr = '{message} {magic_number}'
action = _Print(fmtStr)
# Perform the action
frame = self._get_test_frame()
expected_out = '{message} {magic_number}'.format(**frame.f_locals)
action(frame)
self.assertEqual(fake_stdout.getvalue().strip(), expected_out)
def test_Print_to_file(self):
fake_open = mock_open(mock=MagicMock())
with patch('sleuth.inject.open', fake_open, create=True):
# Create the action
fmtStr = '{message} {magic_number}'
action = _Print(fmtStr, file='junk.txt')
# Perform the action
frame = self._get_test_frame()
expected_out = '{message} {magic_number}'.format(**frame.f_locals)
action(frame)
fake_file = fake_open.return_value.__enter__.return_value
fake_file.write.assert_any_call(expected_out)
def _fake_open(*args, **kwargs):
raise Exception()
return StringIO()
if __name__ == '__main__':
unittest.main()
|
Add tests for print injection.
|
Add tests for print injection.
|
Python
|
mit
|
emrob/sleuth
|
Add tests for print injection.
|
import sys
import unittest
from io import StringIO
from unittest.mock import patch
try:
from unittest.mock import MagicMock, mock_open
except ImportError:
from mock import MagicMock, mock_open
from sleuth.inject import _Break, _Call, _Inject, _Log, _Print
class TestInjectionActions(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def _get_test_frame(self):
"""
Return this function's execution frame object for testing purposes.
This function sets the following local variables:
message = 'Hello Sleuth!'
magic_number = 42
"""
message = 'Hello Sleuth!'
magic_number = 42
return sys._getframe()
def test_Print(self):
with patch('sys.stdout', new=StringIO()) as fake_stdout:
# Create the action
fmtStr = 'PRINT INJECTION TEST'
action = _Print(fmtStr)
# Perform the action
frame = self._get_test_frame()
expected_out = fmtStr
action(frame)
self.assertEqual(fake_stdout.getvalue().strip(), expected_out)
def test_Print_formatting(self):
with patch('sys.stdout', new=StringIO()) as fake_stdout:
# Create the action
fmtStr = '{message} {magic_number}'
action = _Print(fmtStr)
# Perform the action
frame = self._get_test_frame()
expected_out = '{message} {magic_number}'.format(**frame.f_locals)
action(frame)
self.assertEqual(fake_stdout.getvalue().strip(), expected_out)
def test_Print_to_file(self):
fake_open = mock_open(mock=MagicMock())
with patch('sleuth.inject.open', fake_open, create=True):
# Create the action
fmtStr = '{message} {magic_number}'
action = _Print(fmtStr, file='junk.txt')
# Perform the action
frame = self._get_test_frame()
expected_out = '{message} {magic_number}'.format(**frame.f_locals)
action(frame)
fake_file = fake_open.return_value.__enter__.return_value
fake_file.write.assert_any_call(expected_out)
def _fake_open(*args, **kwargs):
raise Exception()
return StringIO()
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add tests for print injection.<commit_after>
|
import sys
import unittest
from io import StringIO
from unittest.mock import patch
try:
from unittest.mock import MagicMock, mock_open
except ImportError:
from mock import MagicMock, mock_open
from sleuth.inject import _Break, _Call, _Inject, _Log, _Print
class TestInjectionActions(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def _get_test_frame(self):
"""
Return this function's execution frame object for testing purposes.
This function sets the following local variables:
message = 'Hello Sleuth!'
magic_number = 42
"""
message = 'Hello Sleuth!'
magic_number = 42
return sys._getframe()
def test_Print(self):
with patch('sys.stdout', new=StringIO()) as fake_stdout:
# Create the action
fmtStr = 'PRINT INJECTION TEST'
action = _Print(fmtStr)
# Perform the action
frame = self._get_test_frame()
expected_out = fmtStr
action(frame)
self.assertEqual(fake_stdout.getvalue().strip(), expected_out)
def test_Print_formatting(self):
with patch('sys.stdout', new=StringIO()) as fake_stdout:
# Create the action
fmtStr = '{message} {magic_number}'
action = _Print(fmtStr)
# Perform the action
frame = self._get_test_frame()
expected_out = '{message} {magic_number}'.format(**frame.f_locals)
action(frame)
self.assertEqual(fake_stdout.getvalue().strip(), expected_out)
def test_Print_to_file(self):
fake_open = mock_open(mock=MagicMock())
with patch('sleuth.inject.open', fake_open, create=True):
# Create the action
fmtStr = '{message} {magic_number}'
action = _Print(fmtStr, file='junk.txt')
# Perform the action
frame = self._get_test_frame()
expected_out = '{message} {magic_number}'.format(**frame.f_locals)
action(frame)
fake_file = fake_open.return_value.__enter__.return_value
fake_file.write.assert_any_call(expected_out)
def _fake_open(*args, **kwargs):
raise Exception()
return StringIO()
if __name__ == '__main__':
unittest.main()
|
Add tests for print injection.import sys
import unittest
from io import StringIO
from unittest.mock import patch
try:
from unittest.mock import MagicMock, mock_open
except ImportError:
from mock import MagicMock, mock_open
from sleuth.inject import _Break, _Call, _Inject, _Log, _Print
class TestInjectionActions(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def _get_test_frame(self):
"""
Return this function's execution frame object for testing purposes.
This function sets the following local variables:
message = 'Hello Sleuth!'
magic_number = 42
"""
message = 'Hello Sleuth!'
magic_number = 42
return sys._getframe()
def test_Print(self):
with patch('sys.stdout', new=StringIO()) as fake_stdout:
# Create the action
fmtStr = 'PRINT INJECTION TEST'
action = _Print(fmtStr)
# Perform the action
frame = self._get_test_frame()
expected_out = fmtStr
action(frame)
self.assertEqual(fake_stdout.getvalue().strip(), expected_out)
def test_Print_formatting(self):
with patch('sys.stdout', new=StringIO()) as fake_stdout:
# Create the action
fmtStr = '{message} {magic_number}'
action = _Print(fmtStr)
# Perform the action
frame = self._get_test_frame()
expected_out = '{message} {magic_number}'.format(**frame.f_locals)
action(frame)
self.assertEqual(fake_stdout.getvalue().strip(), expected_out)
def test_Print_to_file(self):
fake_open = mock_open(mock=MagicMock())
with patch('sleuth.inject.open', fake_open, create=True):
# Create the action
fmtStr = '{message} {magic_number}'
action = _Print(fmtStr, file='junk.txt')
# Perform the action
frame = self._get_test_frame()
expected_out = '{message} {magic_number}'.format(**frame.f_locals)
action(frame)
fake_file = fake_open.return_value.__enter__.return_value
fake_file.write.assert_any_call(expected_out)
def _fake_open(*args, **kwargs):
raise Exception()
return StringIO()
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add tests for print injection.<commit_after>import sys
import unittest
from io import StringIO
from unittest.mock import patch
try:
from unittest.mock import MagicMock, mock_open
except ImportError:
from mock import MagicMock, mock_open
from sleuth.inject import _Break, _Call, _Inject, _Log, _Print
class TestInjectionActions(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def _get_test_frame(self):
"""
Return this function's execution frame object for testing purposes.
This function sets the following local variables:
message = 'Hello Sleuth!'
magic_number = 42
"""
message = 'Hello Sleuth!'
magic_number = 42
return sys._getframe()
def test_Print(self):
with patch('sys.stdout', new=StringIO()) as fake_stdout:
# Create the action
fmtStr = 'PRINT INJECTION TEST'
action = _Print(fmtStr)
# Perform the action
frame = self._get_test_frame()
expected_out = fmtStr
action(frame)
self.assertEqual(fake_stdout.getvalue().strip(), expected_out)
def test_Print_formatting(self):
with patch('sys.stdout', new=StringIO()) as fake_stdout:
# Create the action
fmtStr = '{message} {magic_number}'
action = _Print(fmtStr)
# Perform the action
frame = self._get_test_frame()
expected_out = '{message} {magic_number}'.format(**frame.f_locals)
action(frame)
self.assertEqual(fake_stdout.getvalue().strip(), expected_out)
def test_Print_to_file(self):
fake_open = mock_open(mock=MagicMock())
with patch('sleuth.inject.open', fake_open, create=True):
# Create the action
fmtStr = '{message} {magic_number}'
action = _Print(fmtStr, file='junk.txt')
# Perform the action
frame = self._get_test_frame()
expected_out = '{message} {magic_number}'.format(**frame.f_locals)
action(frame)
fake_file = fake_open.return_value.__enter__.return_value
fake_file.write.assert_any_call(expected_out)
def _fake_open(*args, **kwargs):
raise Exception()
return StringIO()
if __name__ == '__main__':
unittest.main()
|
|
a3043b147b2281867ad55a4816fafc40bcf329fd
|
pyheufybot/utils/webutils.py
|
pyheufybot/utils/webutils.py
|
import time
from urllib import urlencode
from urllib2 import build_opener, Request, urlopen, URLError
from urlparse import urlparse
class URLResponse(object):
def __init__(self, body, domain):
self.body = body
self.domain = domain
def fetchURL(url, extraHeaders=None):
headers = [( "User-agent", "Mozilla/5.0" )]
if extraHeaders:
for header in extraHeaders:
headers.append(extraHeaders)
try:
opener = build_opener()
opener.addheaders = headers
response = opener.open(url)
urlResponse = URLResponse(response.read(), urlparse(response.geturl()).hostname)
return urlResponse
except URLError as e:
today = time.strftime("[%H:%M:%S]")
reason = None
if hasattr(e, "reason"):
reason = "We failed to reach the server, reason: {}".format(e.reason)
elif hasattr(e, "code"):
reason = "The server couldn't fulfill the request, code: {}".format(e.code)
print "{} *** ERROR: Fetch from \"{} \" failed: {}".format(today, url, reason)
def postURL(url, values, extraHeaders=None):
headers = { "User-agent" : "Mozilla/5.0" }
if extraHeaders:
for header in extraHeaders:
headers[header] = extraHeaders[header]
data = urlencode(values)
try:
request = Request(url, data, headers)
response = urlopen(request)
urlResponse = URLResponse(response.read(), urlparse(response.geturl()).hostname)
return urlResponse
except URLError as e:
today = time.strftime("[%H:%M:%S]")
reason = None
if hasattr(e, "reason"):
reason = "We failed to reach the server, reason: {}".format(e.reason)
elif hasattr(e, "code"):
reason = "The server couldn't fulfill the request, code: {}".format(e.code)
print "{} *** ERROR: Post to \"{} \" failed: {}".format(today, url, reason)
|
Add utilities for working with URLs
|
Add utilities for working with URLs
|
Python
|
mit
|
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
|
Add utilities for working with URLs
|
import time
from urllib import urlencode
from urllib2 import build_opener, Request, urlopen, URLError
from urlparse import urlparse
class URLResponse(object):
def __init__(self, body, domain):
self.body = body
self.domain = domain
def fetchURL(url, extraHeaders=None):
headers = [( "User-agent", "Mozilla/5.0" )]
if extraHeaders:
for header in extraHeaders:
headers.append(extraHeaders)
try:
opener = build_opener()
opener.addheaders = headers
response = opener.open(url)
urlResponse = URLResponse(response.read(), urlparse(response.geturl()).hostname)
return urlResponse
except URLError as e:
today = time.strftime("[%H:%M:%S]")
reason = None
if hasattr(e, "reason"):
reason = "We failed to reach the server, reason: {}".format(e.reason)
elif hasattr(e, "code"):
reason = "The server couldn't fulfill the request, code: {}".format(e.code)
print "{} *** ERROR: Fetch from \"{} \" failed: {}".format(today, url, reason)
def postURL(url, values, extraHeaders=None):
headers = { "User-agent" : "Mozilla/5.0" }
if extraHeaders:
for header in extraHeaders:
headers[header] = extraHeaders[header]
data = urlencode(values)
try:
request = Request(url, data, headers)
response = urlopen(request)
urlResponse = URLResponse(response.read(), urlparse(response.geturl()).hostname)
return urlResponse
except URLError as e:
today = time.strftime("[%H:%M:%S]")
reason = None
if hasattr(e, "reason"):
reason = "We failed to reach the server, reason: {}".format(e.reason)
elif hasattr(e, "code"):
reason = "The server couldn't fulfill the request, code: {}".format(e.code)
print "{} *** ERROR: Post to \"{} \" failed: {}".format(today, url, reason)
|
<commit_before><commit_msg>Add utilities for working with URLs<commit_after>
|
import time
from urllib import urlencode
from urllib2 import build_opener, Request, urlopen, URLError
from urlparse import urlparse
class URLResponse(object):
def __init__(self, body, domain):
self.body = body
self.domain = domain
def fetchURL(url, extraHeaders=None):
headers = [( "User-agent", "Mozilla/5.0" )]
if extraHeaders:
for header in extraHeaders:
headers.append(extraHeaders)
try:
opener = build_opener()
opener.addheaders = headers
response = opener.open(url)
urlResponse = URLResponse(response.read(), urlparse(response.geturl()).hostname)
return urlResponse
except URLError as e:
today = time.strftime("[%H:%M:%S]")
reason = None
if hasattr(e, "reason"):
reason = "We failed to reach the server, reason: {}".format(e.reason)
elif hasattr(e, "code"):
reason = "The server couldn't fulfill the request, code: {}".format(e.code)
print "{} *** ERROR: Fetch from \"{} \" failed: {}".format(today, url, reason)
def postURL(url, values, extraHeaders=None):
headers = { "User-agent" : "Mozilla/5.0" }
if extraHeaders:
for header in extraHeaders:
headers[header] = extraHeaders[header]
data = urlencode(values)
try:
request = Request(url, data, headers)
response = urlopen(request)
urlResponse = URLResponse(response.read(), urlparse(response.geturl()).hostname)
return urlResponse
except URLError as e:
today = time.strftime("[%H:%M:%S]")
reason = None
if hasattr(e, "reason"):
reason = "We failed to reach the server, reason: {}".format(e.reason)
elif hasattr(e, "code"):
reason = "The server couldn't fulfill the request, code: {}".format(e.code)
print "{} *** ERROR: Post to \"{} \" failed: {}".format(today, url, reason)
|
Add utilities for working with URLsimport time
from urllib import urlencode
from urllib2 import build_opener, Request, urlopen, URLError
from urlparse import urlparse
class URLResponse(object):
def __init__(self, body, domain):
self.body = body
self.domain = domain
def fetchURL(url, extraHeaders=None):
headers = [( "User-agent", "Mozilla/5.0" )]
if extraHeaders:
for header in extraHeaders:
headers.append(extraHeaders)
try:
opener = build_opener()
opener.addheaders = headers
response = opener.open(url)
urlResponse = URLResponse(response.read(), urlparse(response.geturl()).hostname)
return urlResponse
except URLError as e:
today = time.strftime("[%H:%M:%S]")
reason = None
if hasattr(e, "reason"):
reason = "We failed to reach the server, reason: {}".format(e.reason)
elif hasattr(e, "code"):
reason = "The server couldn't fulfill the request, code: {}".format(e.code)
print "{} *** ERROR: Fetch from \"{} \" failed: {}".format(today, url, reason)
def postURL(url, values, extraHeaders=None):
headers = { "User-agent" : "Mozilla/5.0" }
if extraHeaders:
for header in extraHeaders:
headers[header] = extraHeaders[header]
data = urlencode(values)
try:
request = Request(url, data, headers)
response = urlopen(request)
urlResponse = URLResponse(response.read(), urlparse(response.geturl()).hostname)
return urlResponse
except URLError as e:
today = time.strftime("[%H:%M:%S]")
reason = None
if hasattr(e, "reason"):
reason = "We failed to reach the server, reason: {}".format(e.reason)
elif hasattr(e, "code"):
reason = "The server couldn't fulfill the request, code: {}".format(e.code)
print "{} *** ERROR: Post to \"{} \" failed: {}".format(today, url, reason)
|
<commit_before><commit_msg>Add utilities for working with URLs<commit_after>import time
from urllib import urlencode
from urllib2 import build_opener, Request, urlopen, URLError
from urlparse import urlparse
class URLResponse(object):
def __init__(self, body, domain):
self.body = body
self.domain = domain
def fetchURL(url, extraHeaders=None):
headers = [( "User-agent", "Mozilla/5.0" )]
if extraHeaders:
for header in extraHeaders:
headers.append(extraHeaders)
try:
opener = build_opener()
opener.addheaders = headers
response = opener.open(url)
urlResponse = URLResponse(response.read(), urlparse(response.geturl()).hostname)
return urlResponse
except URLError as e:
today = time.strftime("[%H:%M:%S]")
reason = None
if hasattr(e, "reason"):
reason = "We failed to reach the server, reason: {}".format(e.reason)
elif hasattr(e, "code"):
reason = "The server couldn't fulfill the request, code: {}".format(e.code)
print "{} *** ERROR: Fetch from \"{} \" failed: {}".format(today, url, reason)
def postURL(url, values, extraHeaders=None):
headers = { "User-agent" : "Mozilla/5.0" }
if extraHeaders:
for header in extraHeaders:
headers[header] = extraHeaders[header]
data = urlencode(values)
try:
request = Request(url, data, headers)
response = urlopen(request)
urlResponse = URLResponse(response.read(), urlparse(response.geturl()).hostname)
return urlResponse
except URLError as e:
today = time.strftime("[%H:%M:%S]")
reason = None
if hasattr(e, "reason"):
reason = "We failed to reach the server, reason: {}".format(e.reason)
elif hasattr(e, "code"):
reason = "The server couldn't fulfill the request, code: {}".format(e.code)
print "{} *** ERROR: Post to \"{} \" failed: {}".format(today, url, reason)
|
|
7583dfe2d6fcc45003950c4c5af28b6eb407cc40
|
django/website/contacts/migrations/0004_auto_20160421_1645.py
|
django/website/contacts/migrations/0004_auto_20160421_1645.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def add_user_profiles(apps, schema_editor):
User = apps.get_model('contacts', 'User')
UserPreferences = apps.get_model('contacts', 'UserPreferences')
for user in User.objects.all():
UserPreferences.objects.create(user=user)
class Migration(migrations.Migration):
dependencies = [
('contacts', '0003_auto_20160420_1628'),
]
operations = [
migrations.RunPython(add_user_profiles)
]
|
Add migration to create user profiles
|
Add migration to create user profiles
|
Python
|
agpl-3.0
|
aptivate/alfie,aptivate/kashana,aptivate/alfie,daniell/kashana,daniell/kashana,daniell/kashana,aptivate/kashana,daniell/kashana,aptivate/alfie,aptivate/kashana,aptivate/alfie,aptivate/kashana
|
Add migration to create user profiles
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def add_user_profiles(apps, schema_editor):
User = apps.get_model('contacts', 'User')
UserPreferences = apps.get_model('contacts', 'UserPreferences')
for user in User.objects.all():
UserPreferences.objects.create(user=user)
class Migration(migrations.Migration):
dependencies = [
('contacts', '0003_auto_20160420_1628'),
]
operations = [
migrations.RunPython(add_user_profiles)
]
|
<commit_before><commit_msg>Add migration to create user profiles<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def add_user_profiles(apps, schema_editor):
User = apps.get_model('contacts', 'User')
UserPreferences = apps.get_model('contacts', 'UserPreferences')
for user in User.objects.all():
UserPreferences.objects.create(user=user)
class Migration(migrations.Migration):
dependencies = [
('contacts', '0003_auto_20160420_1628'),
]
operations = [
migrations.RunPython(add_user_profiles)
]
|
Add migration to create user profiles# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def add_user_profiles(apps, schema_editor):
User = apps.get_model('contacts', 'User')
UserPreferences = apps.get_model('contacts', 'UserPreferences')
for user in User.objects.all():
UserPreferences.objects.create(user=user)
class Migration(migrations.Migration):
dependencies = [
('contacts', '0003_auto_20160420_1628'),
]
operations = [
migrations.RunPython(add_user_profiles)
]
|
<commit_before><commit_msg>Add migration to create user profiles<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def add_user_profiles(apps, schema_editor):
User = apps.get_model('contacts', 'User')
UserPreferences = apps.get_model('contacts', 'UserPreferences')
for user in User.objects.all():
UserPreferences.objects.create(user=user)
class Migration(migrations.Migration):
dependencies = [
('contacts', '0003_auto_20160420_1628'),
]
operations = [
migrations.RunPython(add_user_profiles)
]
|
|
863678a76664879971d50d41d52fba5c35f67913
|
RemoveDirectories.py
|
RemoveDirectories.py
|
# -*- coding:utf-8 -*-
""" 這個程式能夠刪除所有在同目錄下的指定資料夾名稱(含子資料夾) """
import os
import shutil
FOLDER_NAME = 'PaxHeaders.20420'
def convert_all_files(path):
for dirPath, dirNames, fileNames in os.walk(path):
for dirName in dirNames:
if dirName == FOLDER_NAME:
shutil.rmtree(os.path.join(dirPath, dirName))
print dirPath, '/', dirName, 'has been removed'
pathMessage = "目前路徑是:「" + os.getcwd() + "」\r\n"
print pathMessage
convert_all_files(os.getcwd())
|
Copy from my project remove-all-sub-directory-with-specific-name.
|
Copy from my project remove-all-sub-directory-with-specific-name.
|
Python
|
mit
|
YiFanChen99/file-walker-for-windows
|
Copy from my project remove-all-sub-directory-with-specific-name.
|
# -*- coding:utf-8 -*-
""" 這個程式能夠刪除所有在同目錄下的指定資料夾名稱(含子資料夾) """
import os
import shutil
FOLDER_NAME = 'PaxHeaders.20420'
def convert_all_files(path):
for dirPath, dirNames, fileNames in os.walk(path):
for dirName in dirNames:
if dirName == FOLDER_NAME:
shutil.rmtree(os.path.join(dirPath, dirName))
print dirPath, '/', dirName, 'has been removed'
pathMessage = "目前路徑是:「" + os.getcwd() + "」\r\n"
print pathMessage
convert_all_files(os.getcwd())
|
<commit_before><commit_msg>Copy from my project remove-all-sub-directory-with-specific-name.<commit_after>
|
# -*- coding:utf-8 -*-
""" 這個程式能夠刪除所有在同目錄下的指定資料夾名稱(含子資料夾) """
import os
import shutil
FOLDER_NAME = 'PaxHeaders.20420'
def convert_all_files(path):
for dirPath, dirNames, fileNames in os.walk(path):
for dirName in dirNames:
if dirName == FOLDER_NAME:
shutil.rmtree(os.path.join(dirPath, dirName))
print dirPath, '/', dirName, 'has been removed'
pathMessage = "目前路徑是:「" + os.getcwd() + "」\r\n"
print pathMessage
convert_all_files(os.getcwd())
|
Copy from my project remove-all-sub-directory-with-specific-name.# -*- coding:utf-8 -*-
""" 這個程式能夠刪除所有在同目錄下的指定資料夾名稱(含子資料夾) """
import os
import shutil
FOLDER_NAME = 'PaxHeaders.20420'
def convert_all_files(path):
for dirPath, dirNames, fileNames in os.walk(path):
for dirName in dirNames:
if dirName == FOLDER_NAME:
shutil.rmtree(os.path.join(dirPath, dirName))
print dirPath, '/', dirName, 'has been removed'
pathMessage = "目前路徑是:「" + os.getcwd() + "」\r\n"
print pathMessage
convert_all_files(os.getcwd())
|
<commit_before><commit_msg>Copy from my project remove-all-sub-directory-with-specific-name.<commit_after># -*- coding:utf-8 -*-
""" 這個程式能夠刪除所有在同目錄下的指定資料夾名稱(含子資料夾) """
import os
import shutil
FOLDER_NAME = 'PaxHeaders.20420'
def convert_all_files(path):
for dirPath, dirNames, fileNames in os.walk(path):
for dirName in dirNames:
if dirName == FOLDER_NAME:
shutil.rmtree(os.path.join(dirPath, dirName))
print dirPath, '/', dirName, 'has been removed'
pathMessage = "目前路徑是:「" + os.getcwd() + "」\r\n"
print pathMessage
convert_all_files(os.getcwd())
|
|
b906d75b5cf010173de4c4f0e68daa2c581b17ad
|
gitcommititerator.py
|
gitcommititerator.py
|
import subprocess
class GitCommitIterator(object):
"""A class for iterating Git commits up their ancestry chain."""
previous = None
current = None
def __init__(self, start):
if not rev_is_valid(start):
raise Exception('No commit exists with ID %s' % start)
self.current = start
def __iter__(self):
return self
def next(self):
"""Takes one step up the ancestry chain."""
parents = get_parents(self.current)
# We're not doing a good job of dealing with merge commits,
# so right now we just choose the first parent. We'll definitely
# need to do something about this in the future.
if len(parents) == 0:
self.current = None
raise StopIteration
else:
self.current = parents[0]
return self.current
def get_parents(rev):
"""Returns a list of the parents of the commit with ID rev."""
git_command = ('git log -n 1 --pretty=%%P %s' % rev)
stdout = execute(git_command)
return [x for x in stdout.split(' ') if x]
def rev_is_valid(rev):
git_command = ('git rev-list -n 1 %s' % rev)
stdout = execute(git_command)
return stdout == rev
def execute(command):
"""Executes the specified command and returns the return code
and stdout output.
"""
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
return process.communicate()[0].strip()
|
Add a GitCommitIterator thing for walking the ancestry chain.
|
Add a GitCommitIterator thing for walking the ancestry chain.
|
Python
|
mit
|
mikeconley/rb-repo
|
Add a GitCommitIterator thing for walking the ancestry chain.
|
import subprocess
class GitCommitIterator(object):
"""A class for iterating Git commits up their ancestry chain."""
previous = None
current = None
def __init__(self, start):
if not rev_is_valid(start):
raise Exception('No commit exists with ID %s' % start)
self.current = start
def __iter__(self):
return self
def next(self):
"""Takes one step up the ancestry chain."""
parents = get_parents(self.current)
# We're not doing a good job of dealing with merge commits,
# so right now we just choose the first parent. We'll definitely
# need to do something about this in the future.
if len(parents) == 0:
self.current = None
raise StopIteration
else:
self.current = parents[0]
return self.current
def get_parents(rev):
"""Returns a list of the parents of the commit with ID rev."""
git_command = ('git log -n 1 --pretty=%%P %s' % rev)
stdout = execute(git_command)
return [x for x in stdout.split(' ') if x]
def rev_is_valid(rev):
git_command = ('git rev-list -n 1 %s' % rev)
stdout = execute(git_command)
return stdout == rev
def execute(command):
"""Executes the specified command and returns the return code
and stdout output.
"""
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
return process.communicate()[0].strip()
|
<commit_before><commit_msg>Add a GitCommitIterator thing for walking the ancestry chain.<commit_after>
|
import subprocess
class GitCommitIterator(object):
"""A class for iterating Git commits up their ancestry chain."""
previous = None
current = None
def __init__(self, start):
if not rev_is_valid(start):
raise Exception('No commit exists with ID %s' % start)
self.current = start
def __iter__(self):
return self
def next(self):
"""Takes one step up the ancestry chain."""
parents = get_parents(self.current)
# We're not doing a good job of dealing with merge commits,
# so right now we just choose the first parent. We'll definitely
# need to do something about this in the future.
if len(parents) == 0:
self.current = None
raise StopIteration
else:
self.current = parents[0]
return self.current
def get_parents(rev):
"""Returns a list of the parents of the commit with ID rev."""
git_command = ('git log -n 1 --pretty=%%P %s' % rev)
stdout = execute(git_command)
return [x for x in stdout.split(' ') if x]
def rev_is_valid(rev):
git_command = ('git rev-list -n 1 %s' % rev)
stdout = execute(git_command)
return stdout == rev
def execute(command):
"""Executes the specified command and returns the return code
and stdout output.
"""
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
return process.communicate()[0].strip()
|
Add a GitCommitIterator thing for walking the ancestry chain.import subprocess
class GitCommitIterator(object):
"""A class for iterating Git commits up their ancestry chain."""
previous = None
current = None
def __init__(self, start):
if not rev_is_valid(start):
raise Exception('No commit exists with ID %s' % start)
self.current = start
def __iter__(self):
return self
def next(self):
"""Takes one step up the ancestry chain."""
parents = get_parents(self.current)
# We're not doing a good job of dealing with merge commits,
# so right now we just choose the first parent. We'll definitely
# need to do something about this in the future.
if len(parents) == 0:
self.current = None
raise StopIteration
else:
self.current = parents[0]
return self.current
def get_parents(rev):
"""Returns a list of the parents of the commit with ID rev."""
git_command = ('git log -n 1 --pretty=%%P %s' % rev)
stdout = execute(git_command)
return [x for x in stdout.split(' ') if x]
def rev_is_valid(rev):
git_command = ('git rev-list -n 1 %s' % rev)
stdout = execute(git_command)
return stdout == rev
def execute(command):
"""Executes the specified command and returns the return code
and stdout output.
"""
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
return process.communicate()[0].strip()
|
<commit_before><commit_msg>Add a GitCommitIterator thing for walking the ancestry chain.<commit_after>import subprocess
class GitCommitIterator(object):
"""A class for iterating Git commits up their ancestry chain."""
previous = None
current = None
def __init__(self, start):
if not rev_is_valid(start):
raise Exception('No commit exists with ID %s' % start)
self.current = start
def __iter__(self):
return self
def next(self):
"""Takes one step up the ancestry chain."""
parents = get_parents(self.current)
# We're not doing a good job of dealing with merge commits,
# so right now we just choose the first parent. We'll definitely
# need to do something about this in the future.
if len(parents) == 0:
self.current = None
raise StopIteration
else:
self.current = parents[0]
return self.current
def get_parents(rev):
"""Returns a list of the parents of the commit with ID rev."""
git_command = ('git log -n 1 --pretty=%%P %s' % rev)
stdout = execute(git_command)
return [x for x in stdout.split(' ') if x]
def rev_is_valid(rev):
git_command = ('git rev-list -n 1 %s' % rev)
stdout = execute(git_command)
return stdout == rev
def execute(command):
"""Executes the specified command and returns the return code
and stdout output.
"""
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
return process.communicate()[0].strip()
|
|
f053735150ea7930eb5da42ac4a04e3505177e08
|
test_queue.py
|
test_queue.py
|
from queue import QueueItem
from queue import Queue
import pytest
def test_enqueue():
queue = Queue()
queue.enqueue("Bacon")
assert queue.size() == 1
def test_enqueue_multi():
queue = Queue()
queue.enqueue("Bacon")
queue.enqueue("Steak")
queue.enqueue("Beer")
assert queue.size() == 3
def test_dequeue():
pass
def test_dequeue_multi():
pass
def test_size():
pass
|
Add test structure and tests for enqueue method
|
Add test structure and tests for enqueue method
|
Python
|
mit
|
jwarren116/data-structures
|
Add test structure and tests for enqueue method
|
from queue import QueueItem
from queue import Queue
import pytest
def test_enqueue():
queue = Queue()
queue.enqueue("Bacon")
assert queue.size() == 1
def test_enqueue_multi():
queue = Queue()
queue.enqueue("Bacon")
queue.enqueue("Steak")
queue.enqueue("Beer")
assert queue.size() == 3
def test_dequeue():
pass
def test_dequeue_multi():
pass
def test_size():
pass
|
<commit_before><commit_msg>Add test structure and tests for enqueue method<commit_after>
|
from queue import QueueItem
from queue import Queue
import pytest
def test_enqueue():
queue = Queue()
queue.enqueue("Bacon")
assert queue.size() == 1
def test_enqueue_multi():
queue = Queue()
queue.enqueue("Bacon")
queue.enqueue("Steak")
queue.enqueue("Beer")
assert queue.size() == 3
def test_dequeue():
pass
def test_dequeue_multi():
pass
def test_size():
pass
|
Add test structure and tests for enqueue methodfrom queue import QueueItem
from queue import Queue
import pytest
def test_enqueue():
queue = Queue()
queue.enqueue("Bacon")
assert queue.size() == 1
def test_enqueue_multi():
queue = Queue()
queue.enqueue("Bacon")
queue.enqueue("Steak")
queue.enqueue("Beer")
assert queue.size() == 3
def test_dequeue():
pass
def test_dequeue_multi():
pass
def test_size():
pass
|
<commit_before><commit_msg>Add test structure and tests for enqueue method<commit_after>from queue import QueueItem
from queue import Queue
import pytest
def test_enqueue():
queue = Queue()
queue.enqueue("Bacon")
assert queue.size() == 1
def test_enqueue_multi():
queue = Queue()
queue.enqueue("Bacon")
queue.enqueue("Steak")
queue.enqueue("Beer")
assert queue.size() == 3
def test_dequeue():
pass
def test_dequeue_multi():
pass
def test_size():
pass
|
|
e4ebbbb010f43612da13691b003f392f56b88fec
|
bitbots_body_behaviour/scripts/cheering_behaviour.py
|
bitbots_body_behaviour/scripts/cheering_behaviour.py
|
#!/usr/bin/env python3
import actionlib
import rospy
from humanoid_league_msgs.msg import PlayAnimationGoal, PlayAnimationAction, GameState
class CheeringBehaviour:
def __init__(self):
rospy.init_node('cheering_behvaiour')
self.opponent_goals = 0
self.animation_running = False
rospy.Subscriber('/gamestate', GameState, self.gamestate_callback, queue_size=10)
self.animation_client = actionlib.SimpleActionClient('animation', PlayAnimationAction)
rospy.spin()
def gamestate_callback(self, msg: GameState):
if msg.rivalScore > self.opponent_goals and not self.animation_running:
goal = PlayAnimationGoal()
goal.animation = 'cheering' # TODO
goal.hcm = False
self.animation_client.send_goal(goal, done_cb=self.animation_done)
self.animation_running = True
def animation_done(self, arg1, arg2):
self.animation_running = False
if __name__ == '__main__':
CheeringBehaviour()
|
Add simple behaviour to cheer when the opponent scores a goal
|
Add simple behaviour to cheer when the opponent scores a goal
|
Python
|
bsd-3-clause
|
bit-bots/bitbots_behaviour
|
Add simple behaviour to cheer when the opponent scores a goal
|
#!/usr/bin/env python3
import actionlib
import rospy
from humanoid_league_msgs.msg import PlayAnimationGoal, PlayAnimationAction, GameState
class CheeringBehaviour:
def __init__(self):
rospy.init_node('cheering_behvaiour')
self.opponent_goals = 0
self.animation_running = False
rospy.Subscriber('/gamestate', GameState, self.gamestate_callback, queue_size=10)
self.animation_client = actionlib.SimpleActionClient('animation', PlayAnimationAction)
rospy.spin()
def gamestate_callback(self, msg: GameState):
if msg.rivalScore > self.opponent_goals and not self.animation_running:
goal = PlayAnimationGoal()
goal.animation = 'cheering' # TODO
goal.hcm = False
self.animation_client.send_goal(goal, done_cb=self.animation_done)
self.animation_running = True
def animation_done(self, arg1, arg2):
self.animation_running = False
if __name__ == '__main__':
CheeringBehaviour()
|
<commit_before><commit_msg>Add simple behaviour to cheer when the opponent scores a goal<commit_after>
|
#!/usr/bin/env python3
import actionlib
import rospy
from humanoid_league_msgs.msg import PlayAnimationGoal, PlayAnimationAction, GameState
class CheeringBehaviour:
def __init__(self):
rospy.init_node('cheering_behvaiour')
self.opponent_goals = 0
self.animation_running = False
rospy.Subscriber('/gamestate', GameState, self.gamestate_callback, queue_size=10)
self.animation_client = actionlib.SimpleActionClient('animation', PlayAnimationAction)
rospy.spin()
def gamestate_callback(self, msg: GameState):
if msg.rivalScore > self.opponent_goals and not self.animation_running:
goal = PlayAnimationGoal()
goal.animation = 'cheering' # TODO
goal.hcm = False
self.animation_client.send_goal(goal, done_cb=self.animation_done)
self.animation_running = True
def animation_done(self, arg1, arg2):
self.animation_running = False
if __name__ == '__main__':
CheeringBehaviour()
|
Add simple behaviour to cheer when the opponent scores a goal#!/usr/bin/env python3
import actionlib
import rospy
from humanoid_league_msgs.msg import PlayAnimationGoal, PlayAnimationAction, GameState
class CheeringBehaviour:
def __init__(self):
rospy.init_node('cheering_behvaiour')
self.opponent_goals = 0
self.animation_running = False
rospy.Subscriber('/gamestate', GameState, self.gamestate_callback, queue_size=10)
self.animation_client = actionlib.SimpleActionClient('animation', PlayAnimationAction)
rospy.spin()
def gamestate_callback(self, msg: GameState):
if msg.rivalScore > self.opponent_goals and not self.animation_running:
goal = PlayAnimationGoal()
goal.animation = 'cheering' # TODO
goal.hcm = False
self.animation_client.send_goal(goal, done_cb=self.animation_done)
self.animation_running = True
def animation_done(self, arg1, arg2):
self.animation_running = False
if __name__ == '__main__':
CheeringBehaviour()
|
<commit_before><commit_msg>Add simple behaviour to cheer when the opponent scores a goal<commit_after>#!/usr/bin/env python3
import actionlib
import rospy
from humanoid_league_msgs.msg import PlayAnimationGoal, PlayAnimationAction, GameState
class CheeringBehaviour:
def __init__(self):
rospy.init_node('cheering_behvaiour')
self.opponent_goals = 0
self.animation_running = False
rospy.Subscriber('/gamestate', GameState, self.gamestate_callback, queue_size=10)
self.animation_client = actionlib.SimpleActionClient('animation', PlayAnimationAction)
rospy.spin()
def gamestate_callback(self, msg: GameState):
if msg.rivalScore > self.opponent_goals and not self.animation_running:
goal = PlayAnimationGoal()
goal.animation = 'cheering' # TODO
goal.hcm = False
self.animation_client.send_goal(goal, done_cb=self.animation_done)
self.animation_running = True
def animation_done(self, arg1, arg2):
self.animation_running = False
if __name__ == '__main__':
CheeringBehaviour()
|
|
ee0b57b0c254546ed123a824b907ee0ef734f3b1
|
aphla_location.py
|
aphla_location.py
|
# Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Import caget and caput
from cothread.catools import caget, caput
# Load the machine
ap.machines.load('SRI21')
print ap.__file__
|
Print the location of the APHLA library
|
Print the location of the APHLA library
|
Python
|
apache-2.0
|
razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects
|
Print the location of the APHLA library
|
# Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Import caget and caput
from cothread.catools import caget, caput
# Load the machine
ap.machines.load('SRI21')
print ap.__file__
|
<commit_before><commit_msg>Print the location of the APHLA library<commit_after>
|
# Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Import caget and caput
from cothread.catools import caget, caput
# Load the machine
ap.machines.load('SRI21')
print ap.__file__
|
Print the location of the APHLA library# Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Import caget and caput
from cothread.catools import caget, caput
# Load the machine
ap.machines.load('SRI21')
print ap.__file__
|
<commit_before><commit_msg>Print the location of the APHLA library<commit_after># Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Import caget and caput
from cothread.catools import caget, caput
# Load the machine
ap.machines.load('SRI21')
print ap.__file__
|
|
b30d2d222397122659104ef0c1f8e8a2810caa05
|
examples/filter_ensemble_average.py
|
examples/filter_ensemble_average.py
|
import numpy as np
import matplotlib.pyplot as plt
import atomic
from ensemble_average import time_dependent_power
if __name__ == '__main__':
times = np.logspace(-7, 0, 50)
temperature = np.logspace(0, 3, 50)
density = 1e19
from atomic.pec import TransitionPool
ad = atomic.element('argon')
tp = TransitionPool.from_adf15('adas_data/pec/*ar*.dat')
ad = tp.filter_energy(2e3, 20e3, 'eV').create_atomic_data(ad)
rt = atomic.RateEquations(ad)
y = rt.solve(times, temperature, density)
taus = np.array([ 1e14, 1e15, 1e16, 1e17, 1e18])/density
plt.figure(1); plt.clf()
from filter_construction import plot_coeffs
plot_coeffs(ad, temperature, 5)
plt.ylim(1e-35, 1e-30)
plt.draw()
plt.figure(2); plt.clf()
time_dependent_power(y, taus)
plt.draw()
plt.figure(3); plt.clf()
time_dependent_power(y, taus, ensemble_average=True)
plt.draw()
plt.show()
|
Add example: filtered ensemble average.
|
Add example: filtered ensemble average.
|
Python
|
mit
|
ezekial4/atomic_neu,ezekial4/atomic_neu
|
Add example: filtered ensemble average.
|
import numpy as np
import matplotlib.pyplot as plt
import atomic
from ensemble_average import time_dependent_power
if __name__ == '__main__':
times = np.logspace(-7, 0, 50)
temperature = np.logspace(0, 3, 50)
density = 1e19
from atomic.pec import TransitionPool
ad = atomic.element('argon')
tp = TransitionPool.from_adf15('adas_data/pec/*ar*.dat')
ad = tp.filter_energy(2e3, 20e3, 'eV').create_atomic_data(ad)
rt = atomic.RateEquations(ad)
y = rt.solve(times, temperature, density)
taus = np.array([ 1e14, 1e15, 1e16, 1e17, 1e18])/density
plt.figure(1); plt.clf()
from filter_construction import plot_coeffs
plot_coeffs(ad, temperature, 5)
plt.ylim(1e-35, 1e-30)
plt.draw()
plt.figure(2); plt.clf()
time_dependent_power(y, taus)
plt.draw()
plt.figure(3); plt.clf()
time_dependent_power(y, taus, ensemble_average=True)
plt.draw()
plt.show()
|
<commit_before><commit_msg>Add example: filtered ensemble average.<commit_after>
|
import numpy as np
import matplotlib.pyplot as plt
import atomic
from ensemble_average import time_dependent_power
if __name__ == '__main__':
times = np.logspace(-7, 0, 50)
temperature = np.logspace(0, 3, 50)
density = 1e19
from atomic.pec import TransitionPool
ad = atomic.element('argon')
tp = TransitionPool.from_adf15('adas_data/pec/*ar*.dat')
ad = tp.filter_energy(2e3, 20e3, 'eV').create_atomic_data(ad)
rt = atomic.RateEquations(ad)
y = rt.solve(times, temperature, density)
taus = np.array([ 1e14, 1e15, 1e16, 1e17, 1e18])/density
plt.figure(1); plt.clf()
from filter_construction import plot_coeffs
plot_coeffs(ad, temperature, 5)
plt.ylim(1e-35, 1e-30)
plt.draw()
plt.figure(2); plt.clf()
time_dependent_power(y, taus)
plt.draw()
plt.figure(3); plt.clf()
time_dependent_power(y, taus, ensemble_average=True)
plt.draw()
plt.show()
|
Add example: filtered ensemble average.import numpy as np
import matplotlib.pyplot as plt
import atomic
from ensemble_average import time_dependent_power
if __name__ == '__main__':
times = np.logspace(-7, 0, 50)
temperature = np.logspace(0, 3, 50)
density = 1e19
from atomic.pec import TransitionPool
ad = atomic.element('argon')
tp = TransitionPool.from_adf15('adas_data/pec/*ar*.dat')
ad = tp.filter_energy(2e3, 20e3, 'eV').create_atomic_data(ad)
rt = atomic.RateEquations(ad)
y = rt.solve(times, temperature, density)
taus = np.array([ 1e14, 1e15, 1e16, 1e17, 1e18])/density
plt.figure(1); plt.clf()
from filter_construction import plot_coeffs
plot_coeffs(ad, temperature, 5)
plt.ylim(1e-35, 1e-30)
plt.draw()
plt.figure(2); plt.clf()
time_dependent_power(y, taus)
plt.draw()
plt.figure(3); plt.clf()
time_dependent_power(y, taus, ensemble_average=True)
plt.draw()
plt.show()
|
<commit_before><commit_msg>Add example: filtered ensemble average.<commit_after>import numpy as np
import matplotlib.pyplot as plt
import atomic
from ensemble_average import time_dependent_power
if __name__ == '__main__':
times = np.logspace(-7, 0, 50)
temperature = np.logspace(0, 3, 50)
density = 1e19
from atomic.pec import TransitionPool
ad = atomic.element('argon')
tp = TransitionPool.from_adf15('adas_data/pec/*ar*.dat')
ad = tp.filter_energy(2e3, 20e3, 'eV').create_atomic_data(ad)
rt = atomic.RateEquations(ad)
y = rt.solve(times, temperature, density)
taus = np.array([ 1e14, 1e15, 1e16, 1e17, 1e18])/density
plt.figure(1); plt.clf()
from filter_construction import plot_coeffs
plot_coeffs(ad, temperature, 5)
plt.ylim(1e-35, 1e-30)
plt.draw()
plt.figure(2); plt.clf()
time_dependent_power(y, taus)
plt.draw()
plt.figure(3); plt.clf()
time_dependent_power(y, taus, ensemble_average=True)
plt.draw()
plt.show()
|
|
ba84856e3d318825865abecb76aa77290d107589
|
numba/tests/test_gdb_dwarf.py
|
numba/tests/test_gdb_dwarf.py
|
"""Tests for gdb interacting with the DWARF numba generates"""
from numba.tests.support import TestCase, linux_only
from numba.tests.gdb_support import needs_gdb, skip_unless_pexpect
@linux_only
@needs_gdb
@skip_unless_pexpect
class TestGDBDwarf(TestCase):
# This runs the tests in numba.tests.gdb, each submodule must contain one
# test class called "Test" and it must contain one test called "test".
# Variation is provided by the module name. The reason this convention exits
# is because gdb tests tend to be line number sensitive (breakpoints etc
# care about this) and doing this prevents constant churn and permits the
# reuse of the existing subprocess_test_runner harness.
_NUMBA_OPT_0_ENV = {'NUMBA_OPT': '0'}
def _subprocess_test_runner(self, test_mod):
themod = f'numba.tests.gdb.{test_mod}'
self.subprocess_test_runner(test_module=themod,
test_class='Test',
test_name='test',
envvars=self._NUMBA_OPT_0_ENV)
def test_basic(self):
self._subprocess_test_runner('test_basic')
def test_array(self):
self._subprocess_test_runner('test_array_arg')
def test_conditional_breakpoint(self):
self._subprocess_test_runner('test_conditional_breakpoint')
|
Add test driver for gdb
|
Add test driver for gdb
|
Python
|
bsd-2-clause
|
numba/numba,seibert/numba,IntelLabs/numba,seibert/numba,IntelLabs/numba,cpcloud/numba,numba/numba,numba/numba,cpcloud/numba,IntelLabs/numba,cpcloud/numba,cpcloud/numba,numba/numba,IntelLabs/numba,IntelLabs/numba,cpcloud/numba,numba/numba,seibert/numba,seibert/numba,seibert/numba
|
Add test driver for gdb
|
"""Tests for gdb interacting with the DWARF numba generates"""
from numba.tests.support import TestCase, linux_only
from numba.tests.gdb_support import needs_gdb, skip_unless_pexpect
@linux_only
@needs_gdb
@skip_unless_pexpect
class TestGDBDwarf(TestCase):
# This runs the tests in numba.tests.gdb, each submodule must contain one
# test class called "Test" and it must contain one test called "test".
# Variation is provided by the module name. The reason this convention exits
# is because gdb tests tend to be line number sensitive (breakpoints etc
# care about this) and doing this prevents constant churn and permits the
# reuse of the existing subprocess_test_runner harness.
_NUMBA_OPT_0_ENV = {'NUMBA_OPT': '0'}
def _subprocess_test_runner(self, test_mod):
themod = f'numba.tests.gdb.{test_mod}'
self.subprocess_test_runner(test_module=themod,
test_class='Test',
test_name='test',
envvars=self._NUMBA_OPT_0_ENV)
def test_basic(self):
self._subprocess_test_runner('test_basic')
def test_array(self):
self._subprocess_test_runner('test_array_arg')
def test_conditional_breakpoint(self):
self._subprocess_test_runner('test_conditional_breakpoint')
|
<commit_before><commit_msg>Add test driver for gdb<commit_after>
|
"""Tests for gdb interacting with the DWARF numba generates"""
from numba.tests.support import TestCase, linux_only
from numba.tests.gdb_support import needs_gdb, skip_unless_pexpect
@linux_only
@needs_gdb
@skip_unless_pexpect
class TestGDBDwarf(TestCase):
# This runs the tests in numba.tests.gdb, each submodule must contain one
# test class called "Test" and it must contain one test called "test".
# Variation is provided by the module name. The reason this convention exits
# is because gdb tests tend to be line number sensitive (breakpoints etc
# care about this) and doing this prevents constant churn and permits the
# reuse of the existing subprocess_test_runner harness.
_NUMBA_OPT_0_ENV = {'NUMBA_OPT': '0'}
def _subprocess_test_runner(self, test_mod):
themod = f'numba.tests.gdb.{test_mod}'
self.subprocess_test_runner(test_module=themod,
test_class='Test',
test_name='test',
envvars=self._NUMBA_OPT_0_ENV)
def test_basic(self):
self._subprocess_test_runner('test_basic')
def test_array(self):
self._subprocess_test_runner('test_array_arg')
def test_conditional_breakpoint(self):
self._subprocess_test_runner('test_conditional_breakpoint')
|
Add test driver for gdb"""Tests for gdb interacting with the DWARF numba generates"""
from numba.tests.support import TestCase, linux_only
from numba.tests.gdb_support import needs_gdb, skip_unless_pexpect
@linux_only
@needs_gdb
@skip_unless_pexpect
class TestGDBDwarf(TestCase):
# This runs the tests in numba.tests.gdb, each submodule must contain one
# test class called "Test" and it must contain one test called "test".
# Variation is provided by the module name. The reason this convention exits
# is because gdb tests tend to be line number sensitive (breakpoints etc
# care about this) and doing this prevents constant churn and permits the
# reuse of the existing subprocess_test_runner harness.
_NUMBA_OPT_0_ENV = {'NUMBA_OPT': '0'}
def _subprocess_test_runner(self, test_mod):
themod = f'numba.tests.gdb.{test_mod}'
self.subprocess_test_runner(test_module=themod,
test_class='Test',
test_name='test',
envvars=self._NUMBA_OPT_0_ENV)
def test_basic(self):
self._subprocess_test_runner('test_basic')
def test_array(self):
self._subprocess_test_runner('test_array_arg')
def test_conditional_breakpoint(self):
self._subprocess_test_runner('test_conditional_breakpoint')
|
<commit_before><commit_msg>Add test driver for gdb<commit_after>"""Tests for gdb interacting with the DWARF numba generates"""
from numba.tests.support import TestCase, linux_only
from numba.tests.gdb_support import needs_gdb, skip_unless_pexpect
@linux_only
@needs_gdb
@skip_unless_pexpect
class TestGDBDwarf(TestCase):
# This runs the tests in numba.tests.gdb, each submodule must contain one
# test class called "Test" and it must contain one test called "test".
# Variation is provided by the module name. The reason this convention exits
# is because gdb tests tend to be line number sensitive (breakpoints etc
# care about this) and doing this prevents constant churn and permits the
# reuse of the existing subprocess_test_runner harness.
_NUMBA_OPT_0_ENV = {'NUMBA_OPT': '0'}
def _subprocess_test_runner(self, test_mod):
themod = f'numba.tests.gdb.{test_mod}'
self.subprocess_test_runner(test_module=themod,
test_class='Test',
test_name='test',
envvars=self._NUMBA_OPT_0_ENV)
def test_basic(self):
self._subprocess_test_runner('test_basic')
def test_array(self):
self._subprocess_test_runner('test_array_arg')
def test_conditional_breakpoint(self):
self._subprocess_test_runner('test_conditional_breakpoint')
|
|
11bbff0c22be7489d28e561952ab7206288fc39e
|
TwoSum.py
|
TwoSum.py
|
# _*_ coding:utf-8 _*_
"""
Given an array of integers, return indices of the two numbers such that they add up to a specific target.
You may assume that each input would have exactly one solution, and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution():
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
dict = {}
for i in range(len(nums)):
if nums[i] in dict:
return [dict[nums[i]], i]
else:
dict[target - nums[i]] = i
if __name__ == "__main__":
sol = Solution()
print sol.twoSum([2,7,11,15], 9)
|
Add two number sum solution
|
Add two number sum solution
|
Python
|
mit
|
cometdlut/leetcode_cometdlut
|
Add two number sum solution
|
# _*_ coding:utf-8 _*_
"""
Given an array of integers, return indices of the two numbers such that they add up to a specific target.
You may assume that each input would have exactly one solution, and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution():
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
dict = {}
for i in range(len(nums)):
if nums[i] in dict:
return [dict[nums[i]], i]
else:
dict[target - nums[i]] = i
if __name__ == "__main__":
sol = Solution()
print sol.twoSum([2,7,11,15], 9)
|
<commit_before><commit_msg>Add two number sum solution<commit_after>
|
# _*_ coding:utf-8 _*_
"""
Given an array of integers, return indices of the two numbers such that they add up to a specific target.
You may assume that each input would have exactly one solution, and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution():
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
dict = {}
for i in range(len(nums)):
if nums[i] in dict:
return [dict[nums[i]], i]
else:
dict[target - nums[i]] = i
if __name__ == "__main__":
sol = Solution()
print sol.twoSum([2,7,11,15], 9)
|
Add two number sum solution# _*_ coding:utf-8 _*_
"""
Given an array of integers, return indices of the two numbers such that they add up to a specific target.
You may assume that each input would have exactly one solution, and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution():
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
dict = {}
for i in range(len(nums)):
if nums[i] in dict:
return [dict[nums[i]], i]
else:
dict[target - nums[i]] = i
if __name__ == "__main__":
sol = Solution()
print sol.twoSum([2,7,11,15], 9)
|
<commit_before><commit_msg>Add two number sum solution<commit_after># _*_ coding:utf-8 _*_
"""
Given an array of integers, return indices of the two numbers such that they add up to a specific target.
You may assume that each input would have exactly one solution, and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution():
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
dict = {}
for i in range(len(nums)):
if nums[i] in dict:
return [dict[nums[i]], i]
else:
dict[target - nums[i]] = i
if __name__ == "__main__":
sol = Solution()
print sol.twoSum([2,7,11,15], 9)
|
|
be5a91d72812c794775e52854af34c7a0bdd51da
|
i8c/tests/test_reserved_provider.py
|
i8c/tests/test_reserved_provider.py
|
from i8c.tests import TestCase
from i8c.exceptions import NameAnnotatorError
SOURCE = """\
define %s::test_reserved_provider returns ptr
extern ptr __some_symbol
"""
class TestReservedProvider(TestCase):
"""Check that reserved provider names are rejected."""
def test_reserved_provider(self):
"""Check that reserved provider names are rejected."""
for provider in ("test", "libpthread", "i8test",
"i8core", "i8", "hello"):
source = SOURCE % provider
if provider.startswith("i8"):
self.assertRaises(NameAnnotatorError, self.compile, source)
else:
tree, output = self.compile(source)
self.assertEqual([], output.operations)
|
Check reserved provider names are rejected
|
Check reserved provider names are rejected
|
Python
|
lgpl-2.1
|
gbenson/i8c
|
Check reserved provider names are rejected
|
from i8c.tests import TestCase
from i8c.exceptions import NameAnnotatorError
SOURCE = """\
define %s::test_reserved_provider returns ptr
extern ptr __some_symbol
"""
class TestReservedProvider(TestCase):
"""Check that reserved provider names are rejected."""
def test_reserved_provider(self):
"""Check that reserved provider names are rejected."""
for provider in ("test", "libpthread", "i8test",
"i8core", "i8", "hello"):
source = SOURCE % provider
if provider.startswith("i8"):
self.assertRaises(NameAnnotatorError, self.compile, source)
else:
tree, output = self.compile(source)
self.assertEqual([], output.operations)
|
<commit_before><commit_msg>Check reserved provider names are rejected<commit_after>
|
from i8c.tests import TestCase
from i8c.exceptions import NameAnnotatorError
SOURCE = """\
define %s::test_reserved_provider returns ptr
extern ptr __some_symbol
"""
class TestReservedProvider(TestCase):
"""Check that reserved provider names are rejected."""
def test_reserved_provider(self):
"""Check that reserved provider names are rejected."""
for provider in ("test", "libpthread", "i8test",
"i8core", "i8", "hello"):
source = SOURCE % provider
if provider.startswith("i8"):
self.assertRaises(NameAnnotatorError, self.compile, source)
else:
tree, output = self.compile(source)
self.assertEqual([], output.operations)
|
Check reserved provider names are rejectedfrom i8c.tests import TestCase
from i8c.exceptions import NameAnnotatorError
SOURCE = """\
define %s::test_reserved_provider returns ptr
extern ptr __some_symbol
"""
class TestReservedProvider(TestCase):
"""Check that reserved provider names are rejected."""
def test_reserved_provider(self):
"""Check that reserved provider names are rejected."""
for provider in ("test", "libpthread", "i8test",
"i8core", "i8", "hello"):
source = SOURCE % provider
if provider.startswith("i8"):
self.assertRaises(NameAnnotatorError, self.compile, source)
else:
tree, output = self.compile(source)
self.assertEqual([], output.operations)
|
<commit_before><commit_msg>Check reserved provider names are rejected<commit_after>from i8c.tests import TestCase
from i8c.exceptions import NameAnnotatorError
SOURCE = """\
define %s::test_reserved_provider returns ptr
extern ptr __some_symbol
"""
class TestReservedProvider(TestCase):
"""Check that reserved provider names are rejected."""
def test_reserved_provider(self):
"""Check that reserved provider names are rejected."""
for provider in ("test", "libpthread", "i8test",
"i8core", "i8", "hello"):
source = SOURCE % provider
if provider.startswith("i8"):
self.assertRaises(NameAnnotatorError, self.compile, source)
else:
tree, output = self.compile(source)
self.assertEqual([], output.operations)
|
|
29d086bc9d31da9c702d9bc27e1ed821d45367ef
|
tests/test_parsing.py
|
tests/test_parsing.py
|
from __future__ import print_function
from nose2.tools import such
from mkerefuse.refuse import RefusePickup
def setup_parser(html_path):
"""
Reads test HTML & instantiates a new `RefusePickup`
:param html_path: Path to HTML file with a test response
:type html_path: str
:return: RefusePickup instance
:rtype: mkerefuse.RefusePickup
"""
with open(html_path, 'r') as infile:
return RefusePickup.from_html(infile.read())
with such.A('successfully fetched response') as it:
with it.having('garbage day'):
@it.has_setup
def setup():
it.parser = setup_parser('tests/data/garbageday.html')
@it.should('have the correct garbage route')
def test(case):
case.assertEqual(
it.parser.route_garbage,
'NA1-2A')
@it.should('have the correct recycle route')
def test(case):
case.assertEqual(
it.parser.route_recycle,
'NR1-2-3')
@it.should('have the correct next garbage pickup')
def test(case):
case.assertEqual(
it.parser.next_pickup_garbage,
'TUESDAY DECEMBER 27, 2016')
@it.should('have the correct next recycle pickup range')
def test(case):
case.assertEqual(
it.parser.next_pickup_recycle_after,
'TUESDAY JANUARY 3, 2017')
case.assertEqual(
it.parser.next_pickup_recycle_before,
'WEDNESDAY JANUARY 4, 2017')
it.createTests(globals())
|
Add Test: on garbage day, known recycle
|
Add Test: on garbage day, known recycle
|
Python
|
unlicense
|
tomislacker/python-mke-trash-pickup,tomislacker/python-mke-trash-pickup
|
Add Test: on garbage day, known recycle
|
from __future__ import print_function
from nose2.tools import such
from mkerefuse.refuse import RefusePickup
def setup_parser(html_path):
"""
Reads test HTML & instantiates a new `RefusePickup`
:param html_path: Path to HTML file with a test response
:type html_path: str
:return: RefusePickup instance
:rtype: mkerefuse.RefusePickup
"""
with open(html_path, 'r') as infile:
return RefusePickup.from_html(infile.read())
with such.A('successfully fetched response') as it:
with it.having('garbage day'):
@it.has_setup
def setup():
it.parser = setup_parser('tests/data/garbageday.html')
@it.should('have the correct garbage route')
def test(case):
case.assertEqual(
it.parser.route_garbage,
'NA1-2A')
@it.should('have the correct recycle route')
def test(case):
case.assertEqual(
it.parser.route_recycle,
'NR1-2-3')
@it.should('have the correct next garbage pickup')
def test(case):
case.assertEqual(
it.parser.next_pickup_garbage,
'TUESDAY DECEMBER 27, 2016')
@it.should('have the correct next recycle pickup range')
def test(case):
case.assertEqual(
it.parser.next_pickup_recycle_after,
'TUESDAY JANUARY 3, 2017')
case.assertEqual(
it.parser.next_pickup_recycle_before,
'WEDNESDAY JANUARY 4, 2017')
it.createTests(globals())
|
<commit_before><commit_msg>Add Test: on garbage day, known recycle<commit_after>
|
from __future__ import print_function
from nose2.tools import such
from mkerefuse.refuse import RefusePickup
def setup_parser(html_path):
"""
Reads test HTML & instantiates a new `RefusePickup`
:param html_path: Path to HTML file with a test response
:type html_path: str
:return: RefusePickup instance
:rtype: mkerefuse.RefusePickup
"""
with open(html_path, 'r') as infile:
return RefusePickup.from_html(infile.read())
with such.A('successfully fetched response') as it:
with it.having('garbage day'):
@it.has_setup
def setup():
it.parser = setup_parser('tests/data/garbageday.html')
@it.should('have the correct garbage route')
def test(case):
case.assertEqual(
it.parser.route_garbage,
'NA1-2A')
@it.should('have the correct recycle route')
def test(case):
case.assertEqual(
it.parser.route_recycle,
'NR1-2-3')
@it.should('have the correct next garbage pickup')
def test(case):
case.assertEqual(
it.parser.next_pickup_garbage,
'TUESDAY DECEMBER 27, 2016')
@it.should('have the correct next recycle pickup range')
def test(case):
case.assertEqual(
it.parser.next_pickup_recycle_after,
'TUESDAY JANUARY 3, 2017')
case.assertEqual(
it.parser.next_pickup_recycle_before,
'WEDNESDAY JANUARY 4, 2017')
it.createTests(globals())
|
Add Test: on garbage day, known recyclefrom __future__ import print_function
from nose2.tools import such
from mkerefuse.refuse import RefusePickup
def setup_parser(html_path):
"""
Reads test HTML & instantiates a new `RefusePickup`
:param html_path: Path to HTML file with a test response
:type html_path: str
:return: RefusePickup instance
:rtype: mkerefuse.RefusePickup
"""
with open(html_path, 'r') as infile:
return RefusePickup.from_html(infile.read())
with such.A('successfully fetched response') as it:
with it.having('garbage day'):
@it.has_setup
def setup():
it.parser = setup_parser('tests/data/garbageday.html')
@it.should('have the correct garbage route')
def test(case):
case.assertEqual(
it.parser.route_garbage,
'NA1-2A')
@it.should('have the correct recycle route')
def test(case):
case.assertEqual(
it.parser.route_recycle,
'NR1-2-3')
@it.should('have the correct next garbage pickup')
def test(case):
case.assertEqual(
it.parser.next_pickup_garbage,
'TUESDAY DECEMBER 27, 2016')
@it.should('have the correct next recycle pickup range')
def test(case):
case.assertEqual(
it.parser.next_pickup_recycle_after,
'TUESDAY JANUARY 3, 2017')
case.assertEqual(
it.parser.next_pickup_recycle_before,
'WEDNESDAY JANUARY 4, 2017')
it.createTests(globals())
|
<commit_before><commit_msg>Add Test: on garbage day, known recycle<commit_after>from __future__ import print_function
from nose2.tools import such
from mkerefuse.refuse import RefusePickup
def setup_parser(html_path):
"""
Reads test HTML & instantiates a new `RefusePickup`
:param html_path: Path to HTML file with a test response
:type html_path: str
:return: RefusePickup instance
:rtype: mkerefuse.RefusePickup
"""
with open(html_path, 'r') as infile:
return RefusePickup.from_html(infile.read())
with such.A('successfully fetched response') as it:
with it.having('garbage day'):
@it.has_setup
def setup():
it.parser = setup_parser('tests/data/garbageday.html')
@it.should('have the correct garbage route')
def test(case):
case.assertEqual(
it.parser.route_garbage,
'NA1-2A')
@it.should('have the correct recycle route')
def test(case):
case.assertEqual(
it.parser.route_recycle,
'NR1-2-3')
@it.should('have the correct next garbage pickup')
def test(case):
case.assertEqual(
it.parser.next_pickup_garbage,
'TUESDAY DECEMBER 27, 2016')
@it.should('have the correct next recycle pickup range')
def test(case):
case.assertEqual(
it.parser.next_pickup_recycle_after,
'TUESDAY JANUARY 3, 2017')
case.assertEqual(
it.parser.next_pickup_recycle_before,
'WEDNESDAY JANUARY 4, 2017')
it.createTests(globals())
|
|
1abeec5c22f22065e377a1567d3677e56fbc1b2c
|
tests/version_test.py
|
tests/version_test.py
|
import collections
import numbers
import os
from sqlalchemy import __version__
from sqlalchemy_imageattach.version import VERSION, VERSION_INFO
def test_version_info():
assert isinstance(VERSION_INFO, collections.Sequence)
assert len(VERSION_INFO) == 3
assert isinstance(VERSION_INFO[0], numbers.Integral)
assert isinstance(VERSION_INFO[1], numbers.Integral)
assert isinstance(VERSION_INFO[2], numbers.Integral)
def test_sqlalchemy_version():
assert list(map(int, __version__.split('.')[:2])) == list(VERSION_INFO[:2])
assert __version__.split('.')[:2] == VERSION.split('.')[:2]
def test_version():
assert isinstance(VERSION, str)
assert list(map(int, VERSION.split('.'))) == list(VERSION_INFO)
def test_print():
with os.popen('python -m sqlalchemy_imageattach.version') as pipe:
printed_version = pipe.read().strip()
assert printed_version == VERSION
|
import collections
import numbers
import os
from sqlalchemy import __version__
from sqlalchemy_imageattach.version import VERSION, VERSION_INFO
def test_version_info():
assert isinstance(VERSION_INFO, collections.Sequence)
assert len(VERSION_INFO) == 3
assert isinstance(VERSION_INFO[0], numbers.Integral)
assert isinstance(VERSION_INFO[1], numbers.Integral)
assert isinstance(VERSION_INFO[2], numbers.Integral)
def test_sqlalchemy_version():
sqla_version_info = list(map(int, __version__.split('.')[:2]))
assert sqla_version_info >= list(VERSION_INFO[:2])
assert __version__.split('.')[:2] >= VERSION.split('.')[:2]
def test_version():
assert isinstance(VERSION, str)
assert list(map(int, VERSION.split('.'))) == list(VERSION_INFO)
def test_print():
with os.popen('python -m sqlalchemy_imageattach.version') as pipe:
printed_version = pipe.read().strip()
assert printed_version == VERSION
|
Support any later versions of SQLAlchemy
|
Support any later versions of SQLAlchemy
|
Python
|
mit
|
youknowone/sqlalchemy-imageattach,dahlia/sqlalchemy-imageattach
|
import collections
import numbers
import os
from sqlalchemy import __version__
from sqlalchemy_imageattach.version import VERSION, VERSION_INFO
def test_version_info():
assert isinstance(VERSION_INFO, collections.Sequence)
assert len(VERSION_INFO) == 3
assert isinstance(VERSION_INFO[0], numbers.Integral)
assert isinstance(VERSION_INFO[1], numbers.Integral)
assert isinstance(VERSION_INFO[2], numbers.Integral)
def test_sqlalchemy_version():
assert list(map(int, __version__.split('.')[:2])) == list(VERSION_INFO[:2])
assert __version__.split('.')[:2] == VERSION.split('.')[:2]
def test_version():
assert isinstance(VERSION, str)
assert list(map(int, VERSION.split('.'))) == list(VERSION_INFO)
def test_print():
with os.popen('python -m sqlalchemy_imageattach.version') as pipe:
printed_version = pipe.read().strip()
assert printed_version == VERSION
Support any later versions of SQLAlchemy
|
import collections
import numbers
import os
from sqlalchemy import __version__
from sqlalchemy_imageattach.version import VERSION, VERSION_INFO
def test_version_info():
assert isinstance(VERSION_INFO, collections.Sequence)
assert len(VERSION_INFO) == 3
assert isinstance(VERSION_INFO[0], numbers.Integral)
assert isinstance(VERSION_INFO[1], numbers.Integral)
assert isinstance(VERSION_INFO[2], numbers.Integral)
def test_sqlalchemy_version():
sqla_version_info = list(map(int, __version__.split('.')[:2]))
assert sqla_version_info >= list(VERSION_INFO[:2])
assert __version__.split('.')[:2] >= VERSION.split('.')[:2]
def test_version():
assert isinstance(VERSION, str)
assert list(map(int, VERSION.split('.'))) == list(VERSION_INFO)
def test_print():
with os.popen('python -m sqlalchemy_imageattach.version') as pipe:
printed_version = pipe.read().strip()
assert printed_version == VERSION
|
<commit_before>import collections
import numbers
import os
from sqlalchemy import __version__
from sqlalchemy_imageattach.version import VERSION, VERSION_INFO
def test_version_info():
assert isinstance(VERSION_INFO, collections.Sequence)
assert len(VERSION_INFO) == 3
assert isinstance(VERSION_INFO[0], numbers.Integral)
assert isinstance(VERSION_INFO[1], numbers.Integral)
assert isinstance(VERSION_INFO[2], numbers.Integral)
def test_sqlalchemy_version():
assert list(map(int, __version__.split('.')[:2])) == list(VERSION_INFO[:2])
assert __version__.split('.')[:2] == VERSION.split('.')[:2]
def test_version():
assert isinstance(VERSION, str)
assert list(map(int, VERSION.split('.'))) == list(VERSION_INFO)
def test_print():
with os.popen('python -m sqlalchemy_imageattach.version') as pipe:
printed_version = pipe.read().strip()
assert printed_version == VERSION
<commit_msg>Support any later versions of SQLAlchemy<commit_after>
|
import collections
import numbers
import os
from sqlalchemy import __version__
from sqlalchemy_imageattach.version import VERSION, VERSION_INFO
def test_version_info():
assert isinstance(VERSION_INFO, collections.Sequence)
assert len(VERSION_INFO) == 3
assert isinstance(VERSION_INFO[0], numbers.Integral)
assert isinstance(VERSION_INFO[1], numbers.Integral)
assert isinstance(VERSION_INFO[2], numbers.Integral)
def test_sqlalchemy_version():
sqla_version_info = list(map(int, __version__.split('.')[:2]))
assert sqla_version_info >= list(VERSION_INFO[:2])
assert __version__.split('.')[:2] >= VERSION.split('.')[:2]
def test_version():
assert isinstance(VERSION, str)
assert list(map(int, VERSION.split('.'))) == list(VERSION_INFO)
def test_print():
with os.popen('python -m sqlalchemy_imageattach.version') as pipe:
printed_version = pipe.read().strip()
assert printed_version == VERSION
|
import collections
import numbers
import os
from sqlalchemy import __version__
from sqlalchemy_imageattach.version import VERSION, VERSION_INFO
def test_version_info():
assert isinstance(VERSION_INFO, collections.Sequence)
assert len(VERSION_INFO) == 3
assert isinstance(VERSION_INFO[0], numbers.Integral)
assert isinstance(VERSION_INFO[1], numbers.Integral)
assert isinstance(VERSION_INFO[2], numbers.Integral)
def test_sqlalchemy_version():
assert list(map(int, __version__.split('.')[:2])) == list(VERSION_INFO[:2])
assert __version__.split('.')[:2] == VERSION.split('.')[:2]
def test_version():
assert isinstance(VERSION, str)
assert list(map(int, VERSION.split('.'))) == list(VERSION_INFO)
def test_print():
with os.popen('python -m sqlalchemy_imageattach.version') as pipe:
printed_version = pipe.read().strip()
assert printed_version == VERSION
Support any later versions of SQLAlchemyimport collections
import numbers
import os
from sqlalchemy import __version__
from sqlalchemy_imageattach.version import VERSION, VERSION_INFO
def test_version_info():
assert isinstance(VERSION_INFO, collections.Sequence)
assert len(VERSION_INFO) == 3
assert isinstance(VERSION_INFO[0], numbers.Integral)
assert isinstance(VERSION_INFO[1], numbers.Integral)
assert isinstance(VERSION_INFO[2], numbers.Integral)
def test_sqlalchemy_version():
sqla_version_info = list(map(int, __version__.split('.')[:2]))
assert sqla_version_info >= list(VERSION_INFO[:2])
assert __version__.split('.')[:2] >= VERSION.split('.')[:2]
def test_version():
assert isinstance(VERSION, str)
assert list(map(int, VERSION.split('.'))) == list(VERSION_INFO)
def test_print():
with os.popen('python -m sqlalchemy_imageattach.version') as pipe:
printed_version = pipe.read().strip()
assert printed_version == VERSION
|
<commit_before>import collections
import numbers
import os
from sqlalchemy import __version__
from sqlalchemy_imageattach.version import VERSION, VERSION_INFO
def test_version_info():
assert isinstance(VERSION_INFO, collections.Sequence)
assert len(VERSION_INFO) == 3
assert isinstance(VERSION_INFO[0], numbers.Integral)
assert isinstance(VERSION_INFO[1], numbers.Integral)
assert isinstance(VERSION_INFO[2], numbers.Integral)
def test_sqlalchemy_version():
assert list(map(int, __version__.split('.')[:2])) == list(VERSION_INFO[:2])
assert __version__.split('.')[:2] == VERSION.split('.')[:2]
def test_version():
assert isinstance(VERSION, str)
assert list(map(int, VERSION.split('.'))) == list(VERSION_INFO)
def test_print():
with os.popen('python -m sqlalchemy_imageattach.version') as pipe:
printed_version = pipe.read().strip()
assert printed_version == VERSION
<commit_msg>Support any later versions of SQLAlchemy<commit_after>import collections
import numbers
import os
from sqlalchemy import __version__
from sqlalchemy_imageattach.version import VERSION, VERSION_INFO
def test_version_info():
assert isinstance(VERSION_INFO, collections.Sequence)
assert len(VERSION_INFO) == 3
assert isinstance(VERSION_INFO[0], numbers.Integral)
assert isinstance(VERSION_INFO[1], numbers.Integral)
assert isinstance(VERSION_INFO[2], numbers.Integral)
def test_sqlalchemy_version():
sqla_version_info = list(map(int, __version__.split('.')[:2]))
assert sqla_version_info >= list(VERSION_INFO[:2])
assert __version__.split('.')[:2] >= VERSION.split('.')[:2]
def test_version():
assert isinstance(VERSION, str)
assert list(map(int, VERSION.split('.'))) == list(VERSION_INFO)
def test_print():
with os.popen('python -m sqlalchemy_imageattach.version') as pipe:
printed_version = pipe.read().strip()
assert printed_version == VERSION
|
118edbb78d2d56ccc41cb3c65346d4f3d59f5647
|
py/insert-delete-getrandom-o1.py
|
py/insert-delete-getrandom-o1.py
|
class RandomizedSet(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.pos = dict()
self.lst = []
def insert(self, val):
"""
Inserts a value to the set. Returns true if the set did not already contain the specified element.
:type val: int
:rtype: bool
"""
if val not in self.pos:
self.pos[val] = len(self.lst)
self.lst.append(val)
return True
return False
def remove(self, val):
"""
Removes a value from the set. Returns true if the set contained the specified element.
:type val: int
:rtype: bool
"""
if val in self.pos:
idx = self.pos[val]
self.pos[self.lst[-1]] = idx
self.lst[idx] = self.lst[-1]
self.lst.pop()
self.pos.pop(val)
return True
return False
def getRandom(self):
"""
Get a random element from the set.
:rtype: int
"""
return random.choice(self.lst)
# Your RandomizedSet object will be instantiated and called as such:
# obj = RandomizedSet()
# param_1 = obj.insert(val)
# param_2 = obj.remove(val)
# param_3 = obj.getRandom()
|
Add py solution for 380. Insert Delete GetRandom O(1)
|
Add py solution for 380. Insert Delete GetRandom O(1)
380. Insert Delete GetRandom O(1): https://leetcode.com/problems/insert-delete-getrandom-o1/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 380. Insert Delete GetRandom O(1)
380. Insert Delete GetRandom O(1): https://leetcode.com/problems/insert-delete-getrandom-o1/
|
class RandomizedSet(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.pos = dict()
self.lst = []
def insert(self, val):
"""
Inserts a value to the set. Returns true if the set did not already contain the specified element.
:type val: int
:rtype: bool
"""
if val not in self.pos:
self.pos[val] = len(self.lst)
self.lst.append(val)
return True
return False
def remove(self, val):
"""
Removes a value from the set. Returns true if the set contained the specified element.
:type val: int
:rtype: bool
"""
if val in self.pos:
idx = self.pos[val]
self.pos[self.lst[-1]] = idx
self.lst[idx] = self.lst[-1]
self.lst.pop()
self.pos.pop(val)
return True
return False
def getRandom(self):
"""
Get a random element from the set.
:rtype: int
"""
return random.choice(self.lst)
# Your RandomizedSet object will be instantiated and called as such:
# obj = RandomizedSet()
# param_1 = obj.insert(val)
# param_2 = obj.remove(val)
# param_3 = obj.getRandom()
|
<commit_before><commit_msg>Add py solution for 380. Insert Delete GetRandom O(1)
380. Insert Delete GetRandom O(1): https://leetcode.com/problems/insert-delete-getrandom-o1/<commit_after>
|
class RandomizedSet(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.pos = dict()
self.lst = []
def insert(self, val):
"""
Inserts a value to the set. Returns true if the set did not already contain the specified element.
:type val: int
:rtype: bool
"""
if val not in self.pos:
self.pos[val] = len(self.lst)
self.lst.append(val)
return True
return False
def remove(self, val):
"""
Removes a value from the set. Returns true if the set contained the specified element.
:type val: int
:rtype: bool
"""
if val in self.pos:
idx = self.pos[val]
self.pos[self.lst[-1]] = idx
self.lst[idx] = self.lst[-1]
self.lst.pop()
self.pos.pop(val)
return True
return False
def getRandom(self):
"""
Get a random element from the set.
:rtype: int
"""
return random.choice(self.lst)
# Your RandomizedSet object will be instantiated and called as such:
# obj = RandomizedSet()
# param_1 = obj.insert(val)
# param_2 = obj.remove(val)
# param_3 = obj.getRandom()
|
Add py solution for 380. Insert Delete GetRandom O(1)
380. Insert Delete GetRandom O(1): https://leetcode.com/problems/insert-delete-getrandom-o1/class RandomizedSet(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.pos = dict()
self.lst = []
def insert(self, val):
"""
Inserts a value to the set. Returns true if the set did not already contain the specified element.
:type val: int
:rtype: bool
"""
if val not in self.pos:
self.pos[val] = len(self.lst)
self.lst.append(val)
return True
return False
def remove(self, val):
"""
Removes a value from the set. Returns true if the set contained the specified element.
:type val: int
:rtype: bool
"""
if val in self.pos:
idx = self.pos[val]
self.pos[self.lst[-1]] = idx
self.lst[idx] = self.lst[-1]
self.lst.pop()
self.pos.pop(val)
return True
return False
def getRandom(self):
"""
Get a random element from the set.
:rtype: int
"""
return random.choice(self.lst)
# Your RandomizedSet object will be instantiated and called as such:
# obj = RandomizedSet()
# param_1 = obj.insert(val)
# param_2 = obj.remove(val)
# param_3 = obj.getRandom()
|
<commit_before><commit_msg>Add py solution for 380. Insert Delete GetRandom O(1)
380. Insert Delete GetRandom O(1): https://leetcode.com/problems/insert-delete-getrandom-o1/<commit_after>class RandomizedSet(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.pos = dict()
self.lst = []
def insert(self, val):
"""
Inserts a value to the set. Returns true if the set did not already contain the specified element.
:type val: int
:rtype: bool
"""
if val not in self.pos:
self.pos[val] = len(self.lst)
self.lst.append(val)
return True
return False
def remove(self, val):
"""
Removes a value from the set. Returns true if the set contained the specified element.
:type val: int
:rtype: bool
"""
if val in self.pos:
idx = self.pos[val]
self.pos[self.lst[-1]] = idx
self.lst[idx] = self.lst[-1]
self.lst.pop()
self.pos.pop(val)
return True
return False
def getRandom(self):
"""
Get a random element from the set.
:rtype: int
"""
return random.choice(self.lst)
# Your RandomizedSet object will be instantiated and called as such:
# obj = RandomizedSet()
# param_1 = obj.insert(val)
# param_2 = obj.remove(val)
# param_3 = obj.getRandom()
|
|
7e40ef982aa7a75cde4171d14a95def80f99eb35
|
py/maximum-average-subarray-i.py
|
py/maximum-average-subarray-i.py
|
class Solution(object):
def findMaxAverage(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: float
"""
s = sum(nums[:k])
m = s
for i in xrange(k, len(nums)):
s += nums[i] - nums[i - k]
m = max(m, s)
return float(m) / k
|
Add py solution for 643. Maximum Average Subarray I
|
Add py solution for 643. Maximum Average Subarray I
643. Maximum Average Subarray I: https://leetcode.com/problems/maximum-average-subarray-i/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 643. Maximum Average Subarray I
643. Maximum Average Subarray I: https://leetcode.com/problems/maximum-average-subarray-i/
|
class Solution(object):
def findMaxAverage(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: float
"""
s = sum(nums[:k])
m = s
for i in xrange(k, len(nums)):
s += nums[i] - nums[i - k]
m = max(m, s)
return float(m) / k
|
<commit_before><commit_msg>Add py solution for 643. Maximum Average Subarray I
643. Maximum Average Subarray I: https://leetcode.com/problems/maximum-average-subarray-i/<commit_after>
|
class Solution(object):
def findMaxAverage(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: float
"""
s = sum(nums[:k])
m = s
for i in xrange(k, len(nums)):
s += nums[i] - nums[i - k]
m = max(m, s)
return float(m) / k
|
Add py solution for 643. Maximum Average Subarray I
643. Maximum Average Subarray I: https://leetcode.com/problems/maximum-average-subarray-i/class Solution(object):
def findMaxAverage(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: float
"""
s = sum(nums[:k])
m = s
for i in xrange(k, len(nums)):
s += nums[i] - nums[i - k]
m = max(m, s)
return float(m) / k
|
<commit_before><commit_msg>Add py solution for 643. Maximum Average Subarray I
643. Maximum Average Subarray I: https://leetcode.com/problems/maximum-average-subarray-i/<commit_after>class Solution(object):
def findMaxAverage(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: float
"""
s = sum(nums[:k])
m = s
for i in xrange(k, len(nums)):
s += nums[i] - nums[i - k]
m = max(m, s)
return float(m) / k
|
|
b97f97710a63c1d0c501c14e49dd0e26d8fb92d5
|
rabbitmq-connector.py
|
rabbitmq-connector.py
|
import asyncio
import aioamqp
@asyncio.coroutine
def callback(channel, body, envelope, properties):
print(body)
@asyncio.coroutine
def connect():
try:
transport, protocol = yield from aioamqp.connect()
channel = yield from protocol.channel()
except aioamqp.AmqpClosedConnection:
print("closed connections")
return
yield from channel.exchange("mnemosyne", "traces", durable=True)
yield from channel.queue(queue_name="mnemosyne-server", durable=True)
yield from channel.queue_bind(exchange_name="mnemosyne", queue_name="hello", routing_key="#")
print(' [*] Waiting for logs. To exit press CTRL+C')
yield from channel.basic_consume(callback, queue_name="mnemosyne-server", no_ack=True)
# close using the `AMQP` protocol
#yield from protocol.close()
# ensure the socket is closed.
#transport.close()
event_loop = asyncio.get_event_loop()
event_loop.run_until_complete(connect())
event_loop.run_forever()
|
Add basic python script for recieving mnemosyne AMQP messages
|
Add basic python script for recieving mnemosyne AMQP messages
|
Python
|
agpl-3.0
|
jgraichen/mnemosyne,jgraichen/mnemosyne,jgraichen/mnemosyne
|
Add basic python script for recieving mnemosyne AMQP messages
|
import asyncio
import aioamqp
@asyncio.coroutine
def callback(channel, body, envelope, properties):
print(body)
@asyncio.coroutine
def connect():
try:
transport, protocol = yield from aioamqp.connect()
channel = yield from protocol.channel()
except aioamqp.AmqpClosedConnection:
print("closed connections")
return
yield from channel.exchange("mnemosyne", "traces", durable=True)
yield from channel.queue(queue_name="mnemosyne-server", durable=True)
yield from channel.queue_bind(exchange_name="mnemosyne", queue_name="hello", routing_key="#")
print(' [*] Waiting for logs. To exit press CTRL+C')
yield from channel.basic_consume(callback, queue_name="mnemosyne-server", no_ack=True)
# close using the `AMQP` protocol
#yield from protocol.close()
# ensure the socket is closed.
#transport.close()
event_loop = asyncio.get_event_loop()
event_loop.run_until_complete(connect())
event_loop.run_forever()
|
<commit_before><commit_msg>Add basic python script for recieving mnemosyne AMQP messages<commit_after>
|
import asyncio
import aioamqp
@asyncio.coroutine
def callback(channel, body, envelope, properties):
print(body)
@asyncio.coroutine
def connect():
try:
transport, protocol = yield from aioamqp.connect()
channel = yield from protocol.channel()
except aioamqp.AmqpClosedConnection:
print("closed connections")
return
yield from channel.exchange("mnemosyne", "traces", durable=True)
yield from channel.queue(queue_name="mnemosyne-server", durable=True)
yield from channel.queue_bind(exchange_name="mnemosyne", queue_name="hello", routing_key="#")
print(' [*] Waiting for logs. To exit press CTRL+C')
yield from channel.basic_consume(callback, queue_name="mnemosyne-server", no_ack=True)
# close using the `AMQP` protocol
#yield from protocol.close()
# ensure the socket is closed.
#transport.close()
event_loop = asyncio.get_event_loop()
event_loop.run_until_complete(connect())
event_loop.run_forever()
|
Add basic python script for recieving mnemosyne AMQP messagesimport asyncio
import aioamqp
@asyncio.coroutine
def callback(channel, body, envelope, properties):
print(body)
@asyncio.coroutine
def connect():
try:
transport, protocol = yield from aioamqp.connect()
channel = yield from protocol.channel()
except aioamqp.AmqpClosedConnection:
print("closed connections")
return
yield from channel.exchange("mnemosyne", "traces", durable=True)
yield from channel.queue(queue_name="mnemosyne-server", durable=True)
yield from channel.queue_bind(exchange_name="mnemosyne", queue_name="hello", routing_key="#")
print(' [*] Waiting for logs. To exit press CTRL+C')
yield from channel.basic_consume(callback, queue_name="mnemosyne-server", no_ack=True)
# close using the `AMQP` protocol
#yield from protocol.close()
# ensure the socket is closed.
#transport.close()
event_loop = asyncio.get_event_loop()
event_loop.run_until_complete(connect())
event_loop.run_forever()
|
<commit_before><commit_msg>Add basic python script for recieving mnemosyne AMQP messages<commit_after>import asyncio
import aioamqp
@asyncio.coroutine
def callback(channel, body, envelope, properties):
print(body)
@asyncio.coroutine
def connect():
try:
transport, protocol = yield from aioamqp.connect()
channel = yield from protocol.channel()
except aioamqp.AmqpClosedConnection:
print("closed connections")
return
yield from channel.exchange("mnemosyne", "traces", durable=True)
yield from channel.queue(queue_name="mnemosyne-server", durable=True)
yield from channel.queue_bind(exchange_name="mnemosyne", queue_name="hello", routing_key="#")
print(' [*] Waiting for logs. To exit press CTRL+C')
yield from channel.basic_consume(callback, queue_name="mnemosyne-server", no_ack=True)
# close using the `AMQP` protocol
#yield from protocol.close()
# ensure the socket is closed.
#transport.close()
event_loop = asyncio.get_event_loop()
event_loop.run_until_complete(connect())
event_loop.run_forever()
|
|
720f537591fe4b79873fef904f504c401d244790
|
tests/test_ctests.py
|
tests/test_ctests.py
|
import os
"""Simple nosetest compatible test wrapper that runs the plain-C tests found in brushlib."""
# TODO: get more fine grained test setup
# * Make the C test lib be able to report the registered testcases
# * Make the C test lib be able to run a single specified test
# * Use this to generate and execute one test per case
tests_dir = os.path.dirname(os.path.abspath(__file__))
lib_dir = os.path.abspath(os.path.join(tests_dir, '../..'))
def is_ctest(fn):
return fn.startswith('test-') and not os.path.splitext(fn)[1]
def test_brushlib():
c_tests = [os.path.abspath(os.path.join(tests_dir, fn)) for fn in os.listdir(tests_dir) if is_ctest(fn)]
for executable in c_tests:
yield run_ctest, executable
def run_ctest(executable):
import subprocess
environ = {}
environ.update(os.environ)
environ.update({'LD_LIBRARY_PATH': lib_dir})
retval = subprocess.call(executable, env=environ, cwd=tests_dir)
assert (retval == 0)
|
Add a nose compatible wrapper
|
brushlib/tests: Add a nose compatible wrapper
Allows to use the "nosetests" test runner to
discover and run the brushlib C tests.
|
Python
|
isc
|
achadwick/libmypaint,b3sigma/libmypaint,achadwick/libmypaint,achadwick/libmypaint,b3sigma/libmypaint,achadwick/libmypaint,b3sigma/libmypaint
|
brushlib/tests: Add a nose compatible wrapper
Allows to use the "nosetests" test runner to
discover and run the brushlib C tests.
|
import os
"""Simple nosetest compatible test wrapper that runs the plain-C tests found in brushlib."""
# TODO: get more fine grained test setup
# * Make the C test lib be able to report the registered testcases
# * Make the C test lib be able to run a single specified test
# * Use this to generate and execute one test per case
tests_dir = os.path.dirname(os.path.abspath(__file__))
lib_dir = os.path.abspath(os.path.join(tests_dir, '../..'))
def is_ctest(fn):
return fn.startswith('test-') and not os.path.splitext(fn)[1]
def test_brushlib():
c_tests = [os.path.abspath(os.path.join(tests_dir, fn)) for fn in os.listdir(tests_dir) if is_ctest(fn)]
for executable in c_tests:
yield run_ctest, executable
def run_ctest(executable):
import subprocess
environ = {}
environ.update(os.environ)
environ.update({'LD_LIBRARY_PATH': lib_dir})
retval = subprocess.call(executable, env=environ, cwd=tests_dir)
assert (retval == 0)
|
<commit_before><commit_msg>brushlib/tests: Add a nose compatible wrapper
Allows to use the "nosetests" test runner to
discover and run the brushlib C tests.<commit_after>
|
import os
"""Simple nosetest compatible test wrapper that runs the plain-C tests found in brushlib."""
# TODO: get more fine grained test setup
# * Make the C test lib be able to report the registered testcases
# * Make the C test lib be able to run a single specified test
# * Use this to generate and execute one test per case
tests_dir = os.path.dirname(os.path.abspath(__file__))
lib_dir = os.path.abspath(os.path.join(tests_dir, '../..'))
def is_ctest(fn):
return fn.startswith('test-') and not os.path.splitext(fn)[1]
def test_brushlib():
c_tests = [os.path.abspath(os.path.join(tests_dir, fn)) for fn in os.listdir(tests_dir) if is_ctest(fn)]
for executable in c_tests:
yield run_ctest, executable
def run_ctest(executable):
import subprocess
environ = {}
environ.update(os.environ)
environ.update({'LD_LIBRARY_PATH': lib_dir})
retval = subprocess.call(executable, env=environ, cwd=tests_dir)
assert (retval == 0)
|
brushlib/tests: Add a nose compatible wrapper
Allows to use the "nosetests" test runner to
discover and run the brushlib C tests.
import os
"""Simple nosetest compatible test wrapper that runs the plain-C tests found in brushlib."""
# TODO: get more fine grained test setup
# * Make the C test lib be able to report the registered testcases
# * Make the C test lib be able to run a single specified test
# * Use this to generate and execute one test per case
tests_dir = os.path.dirname(os.path.abspath(__file__))
lib_dir = os.path.abspath(os.path.join(tests_dir, '../..'))
def is_ctest(fn):
return fn.startswith('test-') and not os.path.splitext(fn)[1]
def test_brushlib():
c_tests = [os.path.abspath(os.path.join(tests_dir, fn)) for fn in os.listdir(tests_dir) if is_ctest(fn)]
for executable in c_tests:
yield run_ctest, executable
def run_ctest(executable):
import subprocess
environ = {}
environ.update(os.environ)
environ.update({'LD_LIBRARY_PATH': lib_dir})
retval = subprocess.call(executable, env=environ, cwd=tests_dir)
assert (retval == 0)
|
<commit_before><commit_msg>brushlib/tests: Add a nose compatible wrapper
Allows to use the "nosetests" test runner to
discover and run the brushlib C tests.<commit_after>
import os
"""Simple nosetest compatible test wrapper that runs the plain-C tests found in brushlib."""
# TODO: get more fine grained test setup
# * Make the C test lib be able to report the registered testcases
# * Make the C test lib be able to run a single specified test
# * Use this to generate and execute one test per case
tests_dir = os.path.dirname(os.path.abspath(__file__))
lib_dir = os.path.abspath(os.path.join(tests_dir, '../..'))
def is_ctest(fn):
return fn.startswith('test-') and not os.path.splitext(fn)[1]
def test_brushlib():
c_tests = [os.path.abspath(os.path.join(tests_dir, fn)) for fn in os.listdir(tests_dir) if is_ctest(fn)]
for executable in c_tests:
yield run_ctest, executable
def run_ctest(executable):
import subprocess
environ = {}
environ.update(os.environ)
environ.update({'LD_LIBRARY_PATH': lib_dir})
retval = subprocess.call(executable, env=environ, cwd=tests_dir)
assert (retval == 0)
|
|
0d6c31fc6095d40b580568faf8aa787575a55183
|
newswall/providers/fb_graph_feed.py
|
newswall/providers/fb_graph_feed.py
|
"""
Facebook Graph Feed API Provider
================================
This provider needs `offline_access` permission.
See here how to get an access token with all permissions:
http://liquid9.tv/blog/2011/may/12/obtaining-permanent-facebook-oauth-access-token/
Required configuration keys::
{
"provider": "newswall.providers.fb_graph_feed",
"object": "FEINHEIT",
"from_id": "239846135569",
"access_token": "..."
}
"""
from datetime import datetime
import facebook
from newswall.providers.base import ProviderBase
class Provider(ProviderBase):
def update(self):
graph = facebook.GraphAPI(self.config['access_token'])
response = graph.get_object(u'%s/feed/' % self.config['object'])
from_id = self.config['from_id']
for entry in response['data']:
if entry['from']['id'] != from_id:
continue
if 'to' in entry: # messages
continue
if 'actions' not in entry:
continue
self.create_story(entry['actions'][0]['link'], # comment or like
title=entry.get('name') or entry.get('message') or entry.get('story', u''),
body=entry.get('message', u''),
image_url=entry.get('picture', u''),
timestamp=datetime.strptime(entry['created_time'], '%Y-%m-%dT%H:%M:%S+0000'),
)
|
Add a simple provider for wall posts via the Facebook Graph API
|
Add a simple provider for wall posts via the Facebook Graph API
|
Python
|
bsd-3-clause
|
matthiask/django-newswall,michaelkuty/django-newswall,michaelkuty/django-newswall,HerraLampila/django-newswall,registerguard/django-newswall,registerguard/django-newswall,matthiask/django-newswall,HerraLampila/django-newswall
|
Add a simple provider for wall posts via the Facebook Graph API
|
"""
Facebook Graph Feed API Provider
================================
This provider needs `offline_access` permission.
See here how to get an access token with all permissions:
http://liquid9.tv/blog/2011/may/12/obtaining-permanent-facebook-oauth-access-token/
Required configuration keys::
{
"provider": "newswall.providers.fb_graph_feed",
"object": "FEINHEIT",
"from_id": "239846135569",
"access_token": "..."
}
"""
from datetime import datetime
import facebook
from newswall.providers.base import ProviderBase
class Provider(ProviderBase):
def update(self):
graph = facebook.GraphAPI(self.config['access_token'])
response = graph.get_object(u'%s/feed/' % self.config['object'])
from_id = self.config['from_id']
for entry in response['data']:
if entry['from']['id'] != from_id:
continue
if 'to' in entry: # messages
continue
if 'actions' not in entry:
continue
self.create_story(entry['actions'][0]['link'], # comment or like
title=entry.get('name') or entry.get('message') or entry.get('story', u''),
body=entry.get('message', u''),
image_url=entry.get('picture', u''),
timestamp=datetime.strptime(entry['created_time'], '%Y-%m-%dT%H:%M:%S+0000'),
)
|
<commit_before><commit_msg>Add a simple provider for wall posts via the Facebook Graph API<commit_after>
|
"""
Facebook Graph Feed API Provider
================================
This provider needs `offline_access` permission.
See here how to get an access token with all permissions:
http://liquid9.tv/blog/2011/may/12/obtaining-permanent-facebook-oauth-access-token/
Required configuration keys::
{
"provider": "newswall.providers.fb_graph_feed",
"object": "FEINHEIT",
"from_id": "239846135569",
"access_token": "..."
}
"""
from datetime import datetime
import facebook
from newswall.providers.base import ProviderBase
class Provider(ProviderBase):
def update(self):
graph = facebook.GraphAPI(self.config['access_token'])
response = graph.get_object(u'%s/feed/' % self.config['object'])
from_id = self.config['from_id']
for entry in response['data']:
if entry['from']['id'] != from_id:
continue
if 'to' in entry: # messages
continue
if 'actions' not in entry:
continue
self.create_story(entry['actions'][0]['link'], # comment or like
title=entry.get('name') or entry.get('message') or entry.get('story', u''),
body=entry.get('message', u''),
image_url=entry.get('picture', u''),
timestamp=datetime.strptime(entry['created_time'], '%Y-%m-%dT%H:%M:%S+0000'),
)
|
Add a simple provider for wall posts via the Facebook Graph API"""
Facebook Graph Feed API Provider
================================
This provider needs `offline_access` permission.
See here how to get an access token with all permissions:
http://liquid9.tv/blog/2011/may/12/obtaining-permanent-facebook-oauth-access-token/
Required configuration keys::
{
"provider": "newswall.providers.fb_graph_feed",
"object": "FEINHEIT",
"from_id": "239846135569",
"access_token": "..."
}
"""
from datetime import datetime
import facebook
from newswall.providers.base import ProviderBase
class Provider(ProviderBase):
def update(self):
graph = facebook.GraphAPI(self.config['access_token'])
response = graph.get_object(u'%s/feed/' % self.config['object'])
from_id = self.config['from_id']
for entry in response['data']:
if entry['from']['id'] != from_id:
continue
if 'to' in entry: # messages
continue
if 'actions' not in entry:
continue
self.create_story(entry['actions'][0]['link'], # comment or like
title=entry.get('name') or entry.get('message') or entry.get('story', u''),
body=entry.get('message', u''),
image_url=entry.get('picture', u''),
timestamp=datetime.strptime(entry['created_time'], '%Y-%m-%dT%H:%M:%S+0000'),
)
|
<commit_before><commit_msg>Add a simple provider for wall posts via the Facebook Graph API<commit_after>"""
Facebook Graph Feed API Provider
================================
This provider needs `offline_access` permission.
See here how to get an access token with all permissions:
http://liquid9.tv/blog/2011/may/12/obtaining-permanent-facebook-oauth-access-token/
Required configuration keys::
{
"provider": "newswall.providers.fb_graph_feed",
"object": "FEINHEIT",
"from_id": "239846135569",
"access_token": "..."
}
"""
from datetime import datetime
import facebook
from newswall.providers.base import ProviderBase
class Provider(ProviderBase):
def update(self):
graph = facebook.GraphAPI(self.config['access_token'])
response = graph.get_object(u'%s/feed/' % self.config['object'])
from_id = self.config['from_id']
for entry in response['data']:
if entry['from']['id'] != from_id:
continue
if 'to' in entry: # messages
continue
if 'actions' not in entry:
continue
self.create_story(entry['actions'][0]['link'], # comment or like
title=entry.get('name') or entry.get('message') or entry.get('story', u''),
body=entry.get('message', u''),
image_url=entry.get('picture', u''),
timestamp=datetime.strptime(entry['created_time'], '%Y-%m-%dT%H:%M:%S+0000'),
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.